diff --git a/compiler/grainformat/grainformat.re b/compiler/grainformat/grainformat.re index 6d26f0cf1a..dc27025ce5 100644 --- a/compiler/grainformat/grainformat.re +++ b/compiler/grainformat/grainformat.re @@ -28,7 +28,7 @@ let get_program_string = filename => { let compile_parsed = filename => { let filename = Filepath.to_string(filename); let program_str = get_program_string(filename); - switch (Format.parse_source(program_str)) { + switch (Fmt.parse_source(program_str)) { | Error(ParseError(exn)) => let bt = if (Printexc.backtrace_status()) { @@ -56,16 +56,9 @@ let format_code = ( ~eol, ~output=?, - ~original_source: array(string), + ~source: array(string), program: Parsetree.parsed_program, ) => { - let formatted_code = - Grain_formatting.Format.format_ast(~original_source, ~eol, program); - - let buf = Buffer.create(0); - Buffer.add_string(buf, formatted_code); - - let contents = Buffer.to_bytes(buf); switch (output) { | Some(outfile) => let outfile = Filepath.to_string(outfile); @@ -73,11 +66,18 @@ let format_code = // because `foo` doesn't exist so it tries to mkdir it and raises Fs_access.ensure_parent_directory_exists(outfile); let oc = Fs_access.open_file_for_writing(outfile); - output_bytes(oc, contents); + set_binary_mode_out(oc, true); + Grain_formatting.Fmt.format( + ~write=output_string(oc), + ~source, + ~eol, + program, + ); close_out(oc); | None => set_binary_mode_out(stdout, true); - print_bytes(contents); + Grain_formatting.Fmt.format(~write=print_string, ~source, ~eol, program); + flush(stdout); }; }; @@ -145,8 +145,8 @@ let enumerate_runs = opts => let grainformat = runs => { List.iter( ({input_path, output_path}) => { - let (program, original_source, eol) = compile_parsed(input_path); - try(format_code(~eol, ~output=?output_path, ~original_source, program)) { + let (program, source, eol) = compile_parsed(input_path); + try(format_code(~eol, ~output=?output_path, ~source, program)) { | exn => Stdlib.Format.eprintf("@[%s@]@.", Printexc.to_string(exn)); exit(2); diff --git a/compiler/src/diagnostics/commenttree.re b/compiler/src/diagnostics/commenttree.re new file mode 100644 index 0000000000..b5ae9b3730 --- /dev/null +++ b/compiler/src/diagnostics/commenttree.re @@ -0,0 +1,74 @@ +open Grain_parsing; + +// This structure isn't a tree at all, but we use a binary search algorithm to +// efficiently find comments, which is tree-like in spirit. + +type t = { + comments: array(Parsetree.comment), + line_map: Hashtbl.t(int, Parsetree.comment), +}; + +let empty: t = {comments: [||], line_map: Hashtbl.create(0)}; + +let loc = cmt => { + switch (cmt) { + | Parsetree.Doc({cmt_loc}) + | Block({cmt_loc}) + | Line({cmt_loc}) + | Shebang({cmt_loc}) => cmt_loc + }; +}; + +let from_comments = x => { + // The array allows us to do a binary search for comments within a range. + let comments = Array.of_list(x); + // This map stores the last comment on a line, allowing us to quickly check + // for formatter-ignore comments. + let line_map = Hashtbl.create(Array.length(comments)); + List.iter( + comment => + Hashtbl.add(line_map, loc(comment).loc_start.pos_lnum, comment), + x, + ); + {comments, line_map}; +}; + +let rec find_start_index = (array, point, ans, left, right) => + if (left <= right) { + let middle = (left + right) / 2; + if (loc(array[middle]).loc_start.pos_cnum >= point) { + find_start_index(array, point, Some(middle), left, middle - 1); + } else { + find_start_index(array, point, ans, middle + 1, right); + }; + } else { + ans; + }; + +let rec collect_range = (array, start, stop) => + if (start == Array.length(array)) { + []; + } else { + let elem = array[start]; + if (loc(elem).loc_end.pos_cnum <= stop) { + [elem, ...collect_range(array, start + 1, stop)]; + } else { + []; + }; + }; + +let query = + ( + tree, + {Location.loc_start: {pos_cnum: start}, loc_end: {pos_cnum: finish}}, + ) => { + let array = tree.comments; + let start_index = + find_start_index(array, start, None, 0, Array.length(array) - 1); + switch (start_index) { + | None => [] + | Some(start_index) => collect_range(array, start_index, finish) + }; +}; + +let query_line = (tree, line) => Hashtbl.find_opt(tree.line_map, line); diff --git a/compiler/src/diagnostics/commenttree.rei b/compiler/src/diagnostics/commenttree.rei new file mode 100644 index 0000000000..5a4fe23185 --- /dev/null +++ b/compiler/src/diagnostics/commenttree.rei @@ -0,0 +1,10 @@ +open Grain_parsing; + +type t; + +let empty: t; + +let from_comments: list(Parsetree.comment) => t; + +let query: (t, Location.t) => list(Parsetree.comment); +let query_line: (t, int) => option(Parsetree.comment); diff --git a/compiler/src/formatting/comment_utils.re b/compiler/src/formatting/comment_utils.re deleted file mode 100644 index 2fddd42a6a..0000000000 --- a/compiler/src/formatting/comment_utils.re +++ /dev/null @@ -1,618 +0,0 @@ -open Grain_diagnostics; -open Grain_parsing; -module Doc = Res_doc; - -let rec get_comments_on_line = (line: int, comments: list(Parsetree.comment)) => - switch (comments) { - | [] => [] - | [c, ...remaining_comments] => - let c_loc: Location.t = Locations.get_comment_loc(c); - let (_, cmtline, cmtchar, _) = - Locations.get_raw_pos_info(c_loc.loc_start); - - if (cmtline == line) { - [c, ...get_comments_on_line(line, remaining_comments)]; - } else if (cmtline < line) { - get_comments_on_line(line, remaining_comments); - } else { - []; - }; // can stop early as there will be no more - }; - -let rec get_comments_before_location = - (~location: Location.t, comments: list(Parsetree.comment)) => { - let (_, stmt_start_line, stm_start_char, _) = - Locations.get_raw_pos_info(location.loc_start); - switch (comments) { - | [] => [] - | [cmt, ...remaining_comments] => - let c_loc: Location.t = Locations.get_comment_loc(cmt); - let (_, cmteline, cmtechar, _) = - Locations.get_raw_pos_info(c_loc.loc_end); - if (cmteline > stmt_start_line) { - []; // can stop now - } else if (cmteline < stmt_start_line) { - [cmt, ...get_comments_before_location(~location, remaining_comments)]; - } else if (cmtechar <= stm_start_char) { - [ - // ends on the same line as the stmt starts - cmt, - ...get_comments_before_location(~location, remaining_comments), - ]; - } else { - []; - }; - }; -}; - -let rec get_comments_inside_location = - (~location: Location.t, comments: list(Parsetree.comment)) => { - let (_, stmt_start_line, stm_start_char, _) = - Locations.get_raw_pos_info(location.loc_start); - let (_, stmt_end_line, stmt_end_char, _) = - Locations.get_raw_pos_info(location.loc_end); - - switch (comments) { - | [] => [] - | [cmt, ...remaining_comments] => - let c_loc: Location.t = Locations.get_comment_loc(cmt); - - let (_, cmtsline, cmtschar, _) = - Locations.get_raw_pos_info(c_loc.loc_start); - - let (_, cmteline, cmtechar, _) = - Locations.get_raw_pos_info(c_loc.loc_end); - if (cmtsline > stmt_end_line) { - []; // can stop now - } else if (cmteline < stmt_start_line) { - get_comments_inside_location(~location, remaining_comments); - } else if - // other cases were simple as we are on lines before or after. - // Now we need to check when the start line or end line match that we also take - // into account the start or end characte - (cmtsline > stmt_start_line - || cmtsline == stmt_start_line - && cmtschar >= stm_start_char) { - if (cmteline < stmt_end_line - || cmteline == stmt_end_line - && cmtechar <= stmt_end_char) { - [ - cmt, - ...get_comments_inside_location(~location, remaining_comments), - ]; - } else { - get_comments_inside_location(~location, remaining_comments); - }; - } else { - get_comments_inside_location(~location, remaining_comments); - }; - }; -}; - -let get_comments_between_locations = - (~loc1: Location.t, ~loc2: Location.t, comments: list(Parsetree.comment)) => { - let (_, stmt_end_line, stmt_end_char, _) = - Locations.get_raw_pos_info(loc1.loc_end); - let (_, stmt_start_line, stm_start_char, _) = - Locations.get_raw_pos_info(loc2.loc_start); - - // invert the request to look inside the location in the gap - - let start_loc: Lexing.position = { - pos_fname: "", - pos_lnum: stmt_end_line, - pos_bol: 0, - pos_cnum: stmt_end_char, - }; - let end_loc: Lexing.position = { - pos_fname: "", - pos_lnum: stmt_start_line, - pos_bol: 0, - pos_cnum: stm_start_char, - }; - - let location: Location.t = { - loc_start: start_loc, - loc_end: end_loc, - loc_ghost: false, - }; - get_comments_inside_location(~location, comments); -}; - -let get_comments_enclosed_and_before_location = - (~loc1: Location.t, ~loc2: Location.t, comments: list(Parsetree.comment)) => { - let (_, loc_start_line, loc_start_char, _) = - Locations.get_raw_pos_info(loc1.loc_start); - let (_, stmt_start_line, stm_start_char, _) = - Locations.get_raw_pos_info(loc2.loc_start); - - // invert the request to look inside the location in the gap - - let start_loc: Lexing.position = { - pos_fname: "", - pos_lnum: loc_start_line, - pos_bol: 0, - pos_cnum: loc_start_char, - }; - let end_loc: Lexing.position = { - pos_fname: "", - pos_lnum: stmt_start_line, - pos_bol: 0, - pos_cnum: stm_start_char, - }; - - let location: Location.t = { - loc_start: start_loc, - loc_end: end_loc, - loc_ghost: false, - }; - - get_comments_inside_location(~location, comments); -}; - -let get_comments_from_start_of_enclosing_location = - ( - ~enclosing_location: Location.t, - ~location: Location.t, - comments: list(Parsetree.comment), - ) => { - let (_, wrap_start_line, wrap_start_char, _) = - Locations.get_raw_pos_info(enclosing_location.loc_start); - let (_, loc_start_line, loc_start_char, _) = - Locations.get_raw_pos_info(location.loc_start); - - let start_loc: Lexing.position = { - pos_fname: "", - pos_lnum: wrap_start_line, - pos_bol: 0, - pos_cnum: wrap_start_char, - }; - let end_loc: Lexing.position = { - pos_fname: "", - pos_lnum: loc_start_line, - pos_bol: 0, - pos_cnum: loc_start_char, - }; - - let location: Location.t = { - loc_start: start_loc, - loc_end: end_loc, - loc_ghost: false, - }; - get_comments_inside_location(~location, comments); -}; - -let get_comments_between_locs = - ( - ~begin_loc: Location.t, - ~end_loc: Location.t, - comments: list(Parsetree.comment), - ) => { - let (_, stmt_end_line, stmt_end_char, _) = - Locations.get_raw_pos_info(begin_loc.loc_end); - let (_, stmt_start_line, stm_start_char, _) = - Locations.get_raw_pos_info(end_loc.loc_start); - - // invert the request to look inside the location in the gap - - let start_loc: Lexing.position = { - pos_fname: "", - pos_lnum: stmt_end_line, - pos_bol: 0, - pos_cnum: stmt_end_char, - }; - let end_loc: Lexing.position = { - pos_fname: "", - pos_lnum: stmt_start_line, - pos_bol: 0, - pos_cnum: stm_start_char, - }; - - let location: Location.t = { - loc_start: start_loc, - loc_end: end_loc, - loc_ghost: false, - }; - get_comments_inside_location(~location, comments); -}; - -let rec get_comments_on_line_end = - (~line: int, ~char: int, comments: list(Parsetree.comment)) => - switch (comments) { - | [] => [] - | [c, ...remaining_comments] => - let c_loc: Location.t = Locations.get_comment_loc(c); - let (_, cmtline, cmtchar, _) = - Locations.get_raw_pos_info(c_loc.loc_start); - - if (cmtline > line) { - []; // can stop early as there will be no more - } else if (cmtline == line && cmtchar >= char) { - [c, ...get_comments_on_line_end(~line, ~char, List.tl(comments))]; - } else { - get_comments_on_line_end(~line, ~char, List.tl(comments)); - }; - }; - -let rec get_comments_on_line_start = (~line: int, ~char: int, comments) => - switch (comments) { - | [] => [] - | [c, ...remaining_comments] => - let c_loc: Location.t = Locations.get_comment_loc(c); - let (_, cmtline, cmtchar, _) = - Locations.get_raw_pos_info(c_loc.loc_start); - - if (cmtline > line) { - []; // can stop early as there will be no more - } else if (cmtline == line && cmtchar <= char) { - [c, ...get_comments_on_line_start(~line, ~char, List.tl(comments))]; - } else { - get_comments_on_line_start(~line, ~char, List.tl(comments)); - }; - }; - -let get_comments_to_end_of_line = - (~location: Location.t, comments: list(Parsetree.comment)) => { - let (_, stmt_end_line, stmt_end_char, _) = - Locations.get_raw_pos_info(location.loc_end); - - get_comments_on_line_end( - ~line=stmt_end_line, - ~char=stmt_end_char, - comments, - ); -}; - -let comment_to_doc = (comment: Parsetree.comment) => { - let comment_string = Comments.get_comment_source(comment); - // We use cmt_source over cmt_content, which doesn't include the comment delimiters (// or /*) - // if we use cmt_source, it passes through the newline for line comments, which we don't want here - // we want our own line/hardline formatting blocks - - Doc.text(String.trim(comment_string)); -}; - -let get_after_brace_comments = - ( - ~first: option(Location.t)=?, - ~loc: Location.t, - comments: list(Parsetree.comment), - ) => { - let (_, startline, startc, _) = Locations.get_raw_pos_info(loc.loc_start); - - let cmts = get_comments_on_line(startline, comments); - switch (cmts) { - | [] => cmts - | [fst, ...rem] => - switch (first) { - | None => cmts - | Some(leading) => - let (_, firststartline, firststartc, _) = - Locations.get_raw_pos_info(leading.loc_start); - - List.filter( - cmt => { - let cmt_loc = Locations.get_comment_loc(cmt); - let (_, cmtendline, cmtendc, _) = - Locations.get_raw_pos_info(cmt_loc.loc_end); - cmtendline < firststartline - || cmtendline == firststartline - && cmtendc <= firststartc; - }, - cmts, - ); - } - }; -}; - -let rec comments_inner = - ( - ~prev: option(Parsetree.comment)=?, - comments: list(Parsetree.comment), - ) => { - switch (prev) { - | None => - switch (comments) { - | [] => [Doc.nil] - | [cmt, ...rem] => [ - comment_to_doc(cmt), - switch (cmt) { - | Line(_) - | Shebang(_) => Doc.hardLine - | Doc(_) => Doc.hardLine - | _ => Doc.nil - }, - ...comments_inner(~prev=cmt, rem), - ] - } - | Some(prev_cmt) => - switch (comments) { - | [] => [Doc.nil] - | [cmt, ...rem] => - let (_, prev_line, _, _) = - Locations.get_raw_pos_info( - Locations.get_comment_loc(prev_cmt).loc_end, - ); - let (_, this_line, _, _) = - Locations.get_raw_pos_info(Locations.get_comment_loc(cmt).loc_start); - - switch (this_line - prev_line) { - | 0 => [ - Doc.space, - comment_to_doc(cmt), - switch (cmt) { - | Line(_) - | Shebang(_) => Doc.hardLine - | Doc(_) => Doc.hardLine - | _ => Doc.nil - }, - ...comments_inner(~prev=cmt, rem), - ] - - | 1 => [ - switch (prev_cmt) { - | Line(_) => Doc.nil - | _ => Doc.hardLine - }, - comment_to_doc(cmt), - switch (cmt) { - | Line(_) - | Shebang(_) => Doc.hardLine - | Doc(_) => Doc.hardLine - | _ => Doc.nil - }, - ...comments_inner(~prev=cmt, rem), - ] - | _ => [ - switch (prev_cmt) { - | Doc(_) - | Shebang(_) - | Line(_) => Doc.nil - | _ => Doc.hardLine - }, - Doc.hardLine, - comment_to_doc(cmt), - switch (cmt) { - | Line(_) - | Shebang(_) => Doc.hardLine - | Doc(_) => Doc.hardLine - | _ => Doc.nil - }, - ...comments_inner(~prev=cmt, rem), - ] - }; - } - }; -}; - -let inbetween_comments_to_docs = - (~offset: bool, comments: list(Parsetree.comment)) => - switch (comments) { - | [] => Doc.nil - | _remaining_comments => - if (offset) { - Doc.concat([Doc.space, Doc.concat(comments_inner(comments))]); - } else { - Doc.concat(comments_inner(comments)); - } - }; - -let rec get_comments_after_location = - (~location: Location.t, comments: list(Parsetree.comment)) => { - let (_, stmt_end_line, stmt_end_char, _) = - Locations.get_raw_pos_info(location.loc_end); - - switch (comments) { - | [] => [] - | [cmt, ...remaining_comments] => - let c_loc: Location.t = Locations.get_comment_loc(cmt); - - let (_, cmtsline, cmtschar, _) = - Locations.get_raw_pos_info(c_loc.loc_start); - - if (cmtsline > stmt_end_line - || cmtsline == stmt_end_line - && cmtschar > stmt_end_char) { - [cmt, ...get_comments_after_location(~location, remaining_comments)]; - } else { - get_comments_after_location(~location, remaining_comments); - }; - }; -}; - -let rec trailing_comments_inner = - ( - ~prev: option(Parsetree.comment)=?, - comments: list(Parsetree.comment), - ) => { - switch (prev) { - | None => - switch (comments) { - | [] => [] - | [cmt] => [comment_to_doc(cmt)] - | [cmt, ...rem] => [ - comment_to_doc(cmt), - switch (cmt) { - | Line(_) - | Shebang(_) => Doc.hardLine - | Doc(_) => Doc.hardLine - | _ => Doc.nil - }, - ...trailing_comments_inner(~prev=cmt, rem), - ] - } - | Some(prev_cmt) => - let (_, prev_line, _, _) = - Locations.get_raw_pos_info( - Locations.get_comment_loc(prev_cmt).loc_end, - ); - - switch (comments) { - | [] => [] - | [cmt] => - let (_, this_line, _, _) = - Locations.get_raw_pos_info(Locations.get_comment_loc(cmt).loc_start); - - switch (this_line - prev_line) { - | 0 => [Doc.space, comment_to_doc(cmt)] - | 1 => [ - switch (prev_cmt) { - | Line(_) => Doc.nil - | _ => Doc.hardLine - }, - comment_to_doc(cmt), - ] - | _ => [Doc.hardLine, comment_to_doc(cmt)] - }; - | [cmt, ...rem] => - let (_, this_line, _, _) = - Locations.get_raw_pos_info(Locations.get_comment_loc(cmt).loc_start); - - switch (this_line - prev_line) { - | 0 => [ - Doc.space, - comment_to_doc(cmt), - switch (cmt) { - | Line(_) - | Shebang(_) => Doc.hardLine - | Doc(_) => Doc.hardLine - | _ => Doc.nil - }, - ...trailing_comments_inner(~prev=cmt, rem), - ] - - | 1 => [ - switch (prev_cmt) { - | Line(_) => Doc.nil - | _ => Doc.hardLine - }, - comment_to_doc(cmt), - switch (cmt) { - | Line(_) - | Shebang(_) => Doc.hardLine - | Doc(_) => Doc.hardLine - | _ => Doc.nil - }, - ...trailing_comments_inner(~prev=cmt, rem), - ] - | _ => [ - Doc.hardLine, - comment_to_doc(cmt), - switch (cmt) { - | Line(_) - | Shebang(_) => Doc.hardLine - | Doc(_) => Doc.hardLine - | _ => Doc.nil - }, - ...trailing_comments_inner(~prev=cmt, rem), - ] - }; - }; - }; -}; - -let block_trailing_comments_docs = (comments: list(Parsetree.comment)) => - switch (comments) { - | [] => Doc.nil - | _remaining_comments => Doc.concat(trailing_comments_inner(comments)) - }; - -let single_line_of_comments = (comments: list(Parsetree.comment)) => - switch (comments) { - | [] => Doc.nil - | _ => - Doc.concat([ - Doc.space, - Doc.join( - ~sep=Doc.space, - List.map(c => {comment_to_doc(c)}, comments), - ), - ]) - }; - -let rec new_comments_inner = - ( - ~prev: option(Parsetree.comment)=?, - comments: list(Parsetree.comment), - ) => { - switch (prev) { - | None => - switch (comments) { - | [] => [] - | [cmt, ...rem] => [ - comment_to_doc(cmt), - switch (cmt) { - | Line(_) - | Shebang(_) => Doc.hardLine - | Doc(_) => Doc.hardLine - | _ => Doc.nil - }, - ...new_comments_inner(~prev=cmt, rem), - ] - } - | Some(prev_cmt) => - switch (comments) { - | [] => [] - | [cmt, ...rem] => - let (_, prev_line, _, _) = - Locations.get_raw_pos_info( - Locations.get_comment_loc(prev_cmt).loc_end, - ); - let (_, this_line, _, _) = - Locations.get_raw_pos_info(Locations.get_comment_loc(cmt).loc_start); - - switch (this_line - prev_line) { - | 0 => [ - Doc.space, - comment_to_doc(cmt), - switch (cmt) { - | Line(_) - | Shebang(_) => Doc.hardLine - | Doc(_) => Doc.hardLine - | _ => Doc.nil - }, - ...new_comments_inner(~prev=cmt, rem), - ] - - | 1 => [ - switch (prev_cmt) { - | Line(_) - | Doc(_) => Doc.nil - | _ => Doc.hardLine - }, - comment_to_doc(cmt), - switch (cmt) { - | Line(_) - | Shebang(_) => Doc.hardLine - | Doc(_) => Doc.hardLine - | _ => Doc.nil - }, - ...new_comments_inner(~prev=cmt, rem), - ] - | _ => [ - switch (prev_cmt) { - | Doc(_) - | Shebang(_) - | Line(_) => Doc.nil - | _ => Doc.hardLine - }, - Doc.hardLine, - comment_to_doc(cmt), - switch (cmt) { - | Line(_) - | Shebang(_) => Doc.hardLine - | Doc(_) => Doc.hardLine - | _ => Doc.nil - }, - ...new_comments_inner(~prev=cmt, rem), - ] - }; - } - }; -}; - -let new_comments_to_docs = (comments: list(Parsetree.comment)) => - switch (comments) { - | [] => Doc.nil - | _remaining_comments => Doc.concat(new_comments_inner(comments)) - }; diff --git a/compiler/src/formatting/debug.re b/compiler/src/formatting/debug.re deleted file mode 100644 index 6c9288664b..0000000000 --- a/compiler/src/formatting/debug.re +++ /dev/null @@ -1,112 +0,0 @@ -open Grain; -open Compile; -open Grain_parsing; -open Grain_utils; -open Grain_diagnostics; - -let print_loc_string = (msg: string, loc: Grain_parsing.Location.t) => { - let (file, line, startchar, _) = Locations.get_raw_pos_info(loc.loc_start); - let (_, endline, endchar, _) = Locations.get_raw_pos_info(loc.loc_end); - - if (startchar >= 0) { - if (line == endline) { - Printf.sprintf("%s %d:%d,%d\n", msg, line, startchar, endchar); - } else { - Printf.sprintf( - "%s %d:%d - %d:%d\n", - msg, - line, - startchar, - endline, - endchar, - ); - }; - } else { - Printf.sprintf("Empty loc? %s %d:%d,%d\n", msg, line, startchar, endchar); - }; -}; - -let print_loc = (msg: string, loc: Grain_parsing.Location.t) => { - let (file, line, startchar, _) = Locations.get_raw_pos_info(loc.loc_start); - let (_, endline, endchar, _) = Locations.get_raw_pos_info(loc.loc_end); - - if (startchar >= 0) { - if (line == endline) { - Printf.printf("%s %d:%d,%d\n", msg, line, startchar, endchar); - } else { - Printf.printf( - "%s %d:%d - %d:%d\n", - msg, - line, - startchar, - endline, - endchar, - ); - }; - }; -}; -let debug_expression = (expr: Parsetree.expression) => { - switch (expr.pexp_desc) { - | PExpConstant(x) => print_loc("PExpConstant", expr.pexp_loc) - | PExpId({txt: id}) => print_loc("PExpId", expr.pexp_loc) - | PExpLet(rec_flag, mut_flag, vbs) => print_loc("PExpLet", expr.pexp_loc) - | PExpTuple(expressions) => print_loc("PExpTuple", expr.pexp_loc) - | PExpArray(expressions) => print_loc("PExpArray", expr.pexp_loc) - | PExpArrayGet(expression1, expression2) => - print_loc("PExpArrayGet", expr.pexp_loc) - | PExpArraySet(expression1, expression2, expression3) => - print_loc("PExpArraySet", expr.pexp_loc) - | PExpRecord(base, record) => print_loc("PExpRecord", expr.pexp_loc) - | PExpRecordGet(expression, {txt, _}) => - print_loc("PExpRecordGet", expr.pexp_loc) - | PExpRecordSet(expression, {txt, _}, expression2) => - print_loc("PExpRecordSet", expr.pexp_loc) - | PExpMatch(expression, match_branches) => - print_loc("PExpMatch", expr.pexp_loc) - | PExpPrim0(prim0) => print_loc("PExpPrim0", expr.pexp_loc) - | PExpPrim1(prim1, expression) => print_loc("PExpPrim1", expr.pexp_loc) - | PExpPrim2(prim2, expression, expression1) => - print_loc("PExpPrim2", expr.pexp_loc) - | PExpPrimN(primn, expressions) => print_loc("PExpPrimN", expr.pexp_loc) - | PExpIf(condition, trueExpr, falseExpr) => - print_loc("PExpIf", expr.pexp_loc) - | PExpWhile(expression, expression1) => - print_loc("PExpWhile", expr.pexp_loc) - | PExpFor(optexpression1, optexpression2, optexpression3, expression4) => - print_loc("PExpFor", expr.pexp_loc) - | PExpContinue => print_loc("PExpContinue", expr.pexp_loc) - | PExpBreak => print_loc("PExpBreak", expr.pexp_loc) - | PExpReturn(expression) => print_loc("PExpReturn", expr.pexp_loc) - | PExpConstraint(expression, parsed_type) => - print_loc("PExpConstraint", expr.pexp_loc) - | PExpLambda(patterns, expression) => - print_loc("PExpLambda", expr.pexp_loc) - | PExpApp(func, expressions) => print_loc("PExpApp", expr.pexp_loc) - | PExpConstruct(func, expression) => - print_loc("PExpConstruct", expr.pexp_loc) - | PExpBlock(expressions) => print_loc("PExpBlock", expr.pexp_loc) - | PExpBoxAssign(expression, expression1) => - print_loc("PExpBoxAssign", expr.pexp_loc) - | PExpAssign(expression, expression1) => - print_loc("PExpAssign", expr.pexp_loc) - | PExpUse(module_, items) => print_loc("PExpUse", expr.pexp_loc) - }; -}; -let debug_pattern = (pat: Parsetree.pattern) => { - switch (pat.ppat_desc) { - | PPatAny => print_endline("PPatAny") - | PPatConstant(c) => print_endline("PPatAny") - | PPatVar({txt, _}) => print_endline("PPatVar " ++ txt) - | PPatTuple(patterns) => print_endline("PPatTuple") - | PPatArray(patterns) => print_endline("PPatArray") - | PPatRecord(patternlocs, closedflag) => print_endline("PPatRecord") - | PPatConstraint(pattern, parsed_type) => print_endline("PPatConstraint") - | PPatConstruct(location, patterns) => print_endline("PPatConstruct") - | PPatOr(pattern1, pattern2) => print_endline("PPatOr") - | PPatAlias(pattern, loc) => print_endline("PPatAlias") - }; -}; - -let print_comments = (comments: list(Grain_parsing.Parsetree.comment)) => { - List.map(Comments.print_comment, comments); -}; diff --git a/compiler/src/formatting/doc.re b/compiler/src/formatting/doc.re new file mode 100644 index 0000000000..5baea7d023 --- /dev/null +++ b/compiler/src/formatting/doc.re @@ -0,0 +1,464 @@ +/** + The Doc module implements a document IR and engine for pretty-printing code. + Concatenation of Doc.t nodes is O(1) and printing a document is O(n) to the + size of the document. + + The most important aspect of the engine are groups and how breaks interact + with them. By default, the engine will print a group by either breaking none + of the break hints in that group if the entire group would fit on that line + (known as Flat mode) or all of the break hints in that group if the group + would not fit if printed in Flat mode (known as Breaking mode). This covers + 95% of formatting use cases, and users should tend to reach for default + groups before considering one of the alternatives. For the remaining 5% of + use cases, groups can also be created in FitGroups mode or FitAll mode. In + FitGroups mode, the engine will attempt to print as many subgroups in Flat + mode as possible on each line, breaking only when necessary. In FitAll mode, + the engine will attempt to print as many subgroups in Breaking mode as + possible on each line. + + Hardlines should be avoided. Instead, emit break hints and allow the engine + to decide when breaks should be made. If hardlines must be used, consider + using the group's ~print_width parameter to manually specify how wide the + engine should consider the group. By default, a group is only considered as + wide as the content leading to the first hardline. + + That's most of what you need to know to effectively use this module! Further + details on each node are provided below for maintainers or curious consumers. + + IR nodes: + • Empty + Has no effect on the output of the printing engine. + • GroupBreaker + Causes the enclosing group to be printed in Breaking mode. + • String + Prints the string as-is. The `string` function is Utf8-aware. + • Blank + Prints the specified number of spaces. + • BreakHint + Tells the engine that a break can be made here. If the engine decides not + to print a break, it prints the supplied document instead. + • Hardline + Forces the engine to print a newline character. Width calculations for + the current line are truncated at the Hardline. If the `phantom` field is + set to `true`, instead the Hardline is calculated as a zero-width non- + breaking character (the newline is emitted in the output, but + calculations assume it's just not there). + • IfBroken + If the engine has broken the current group, prints the `breaking` + document and prints the `flat` document otherwise. Note that for FitAll + and FitGroups groups, the `flat` document would be printed if the + IfBroken node appears before the point at which the group is broken, as + the engine makes that decision while printing the group (unlike default + groups, where the engine makes this decision before printing the group). + • Indent + Introduces indentation equal to the number of spaces specified when the + enclosing group is broken. When newline characters are emitted, they are + followed by space characters equal to the amount of indentation that has + been applied by all groups, unless this would lead to trailing + whitespace. Note that if the enclosing group has not been broken, the + indentation will not apply. For example, in this document, + group(~kind=FitGroups, indent(2, + group(indent(2, string("foo") ++ break ++ string("bar"))) + )) + if the break hint is broken by the engine, `bar`'s indentation level will + only be two spaces, as the outer group could never be broken be broken by + the engine. + • Group + ~kind=Auto + The engine checks if the group would fit on the current line if printed + in Flat mode. If so, it prints the group in Flat mode and Breaking mode + otherwise. + ~kind=FitGroups + The engine begins printing the group. When it encounters a break hint, + it checks if the following node would fit on the current line. If that + node is a Group, its Flat mode width is used for the check. If the node + would not fit, a break is emitted. + ~kind=FitAll + The engine begins printing the group. When it encounters a break hint, + it checks if the following node would fit on the current line. If that + node is a Group, its Breaking mode width is used for the check. If the + node would not fit, a break is emitted. + • Concat + Prints the first document followed by the second document. Keeps track of + the combined width to allow the engine to make constant-time decisions + about line breaks. +*/ +type t = + | Empty + | GroupBreaker + | String({ + value: string, + width, + }) + | Blank({count: int}) + | BreakHint({ + doc: t, + flat_width: width, + }) + | Hardline({phantom: bool}) + | IfBroken({ + flat: t, + breaking: t, + flat_width: width, + breaking_width: width, + }) + | Indent({ + count: int, + doc: t, + has_group_breaker: bool, + flat_width: width, + breaking_width: width, + }) + | Group({ + group_type, + doc: t, + flat_width: width, + breaking_width: width, + }) + | Concat({ + left: t, + right: t, + has_group_breaker: bool, + flat_width: width, + breaking_width: width, + }) +and group_type = + | Auto + | FitGroups + | FitAll +and width = + | WithBreak(int) + | WithoutBreak(int); + +let breaking_width = doc => + switch (doc) { + | Empty + | GroupBreaker => WithoutBreak(0) + | String({width}) => width + | Indent({breaking_width}) + | Group({breaking_width}) + | Concat({breaking_width}) + | IfBroken({breaking_width}) => breaking_width + | Blank({count}) => WithoutBreak(count) + | BreakHint(_) + | Hardline({phantom: false}) => WithBreak(0) + | Hardline({phantom: true}) => WithoutBreak(0) + }; + +let flat_width = doc => + switch (doc) { + | Empty + | GroupBreaker => WithoutBreak(0) + | String({width}) => width + | Indent({flat_width}) + | Group({flat_width}) + | Concat({flat_width}) + | IfBroken({flat_width}) + | BreakHint({flat_width}) => flat_width + | Blank({count}) => WithoutBreak(count) + | Hardline({phantom: false}) => WithBreak(0) + | Hardline({phantom: true}) => WithoutBreak(0) + }; + +let has_group_breaker = doc => + switch (doc) { + | GroupBreaker => true + | Empty + | IfBroken(_) + | BreakHint(_) + | Blank(_) + | Hardline(_) + | Group(_) + | String(_) => false + | Concat({has_group_breaker}) + | Indent({has_group_breaker}) => has_group_breaker + }; + +let width_value = width => + switch (width) { + | WithBreak(w) + | WithoutBreak(w) => w + }; + +let group_breaker = GroupBreaker; +let string = s => + String({value: s, width: WithoutBreak(Utf8.countInString(s))}); +let blank = c => Blank({count: c}); +let break = doc => BreakHint({doc, flat_width: flat_width(doc)}); +let hardline = Hardline({phantom: false}); +let phantom_hardline = Hardline({phantom: true}); +let if_broken = (breaking, flat) => + IfBroken({ + flat, + breaking, + flat_width: flat_width(flat), + breaking_width: breaking_width(breaking), + }); +let indent = (c, doc) => + Indent({ + count: c, + doc, + has_group_breaker: has_group_breaker(doc), + flat_width: flat_width(doc), + breaking_width: breaking_width(doc), + }); +let group = (~print_width=?, ~kind=Auto, doc) => { + let (flat_width, breaking_width) = + switch (print_width) { + | Some(width) => (WithoutBreak(width), WithoutBreak(width)) + | None => (flat_width(doc), breaking_width(doc)) + }; + Group({group_type: kind, doc, flat_width, breaking_width}); +}; + +let concat = (left, right) => { + let add = (left, right) => { + switch (left, right) { + | (WithBreak(_), _) => left + | (WithoutBreak(l), WithoutBreak(r)) => WithoutBreak(l + r) + | (WithoutBreak(l), WithBreak(r)) => WithBreak(l + r) + }; + }; + + let has_group_breaker = + has_group_breaker(left) || has_group_breaker(right); + + let (flat_width, breaking_width) = + if (has_group_breaker) { + let breaking_width = add(breaking_width(left), breaking_width(right)); + (breaking_width, breaking_width); + } else { + ( + add(flat_width(left), flat_width(right)), + add(breaking_width(left), breaking_width(right)), + ); + }; + + Concat({left, right, has_group_breaker, flat_width, breaking_width}); +}; +let (++) = concat; + +let breakable_space = break(blank(1)); +let break = break(Empty); +let space = blank(1); +let empty = Empty; + +let comma = string(","); +let comma_breakable_space = comma ++ breakable_space; + +let concat_map = (~sep, ~lead, ~trail, ~f: (~final: bool, 'a) => t, l) => { + switch (l) { + | [] => empty + | [first, ..._] => + let rec concat_map = (acc, l) => { + switch (l) { + | [] => failwith("Impossible: empty list") + | [ultimate] => + // one element list + acc ++ f(~final=true, ultimate) ++ trail(ultimate) + | [penultimate, ultimate] => + acc + ++ f(~final=false, penultimate) + ++ sep(penultimate, ultimate) + ++ f(~final=true, ultimate) + ++ trail(ultimate) + | [elem, next, ...rest] => + concat_map( + acc ++ f(~final=false, elem) ++ sep(elem, next), + [next, ...rest], + ) + }; + }; + concat_map(lead(first), l); + }; +}; + +let parens = (~lead=?, ~trail=?, doc) => + group( + Option.fold(~none=string("("), ~some=lead => lead ++ string("("), lead) + ++ indent(2, break ++ doc) + ++ break + ++ Option.fold( + ~none=string(")"), + ~some=trail => string(")") ++ trail, + trail, + ), + ); +let braces = doc => + group( + string("{") + ++ indent(2, breakable_space ++ doc) + ++ breakable_space + ++ string("}"), + ); +let block_braces = (~lead, ~trail, doc) => + group( + ~print_width=2, + string("{") ++ indent(2, lead ++ doc) ++ trail ++ string("}"), + ); +let array_brackets = doc => + group(string("[>") ++ indent(2, break ++ doc) ++ break ++ string("]")); +let list_brackets = doc => + group(string("[") ++ indent(2, break ++ doc) ++ break ++ string("]")); +let angle_brackets = doc => + group(string("<") ++ indent(2, break ++ doc) ++ break ++ string(">")); + +let double_quotes = doc => string("\"") ++ doc ++ string("\""); + +let trailing_comma = if_broken(string(","), empty); + +module Engine = { + type mode = + | Flat + | Breaking + | FitFlat + | FitBreaking; + + type group = { + mode, + mutable global_indent: int, + mutable local_indent: int, + mutable broken: bool, + }; + + let print = (~write, ~eol, ~line_width, doc) => { + // The current column we're writing to + let column = ref(0); + // Queue for indentation to prevent lines with just spaces + let write_queue = ref(None); + // Continuation for Fit mode calculations that depend on the size of the next node + let k = ref(None); + + let eol = + switch (eol) { + | Grain_utils.Fs_access.CRLF => "\r\n" + | LF => "\n" + }; + + let flush_write_queue = () => { + switch (write_queue^) { + | Some(queued) => + write(queued); + write_queue := None; + | None => () + }; + }; + + let rec print = (~group, doc) => { + switch (k^) { + | Some(f) => + k := None; + f(doc); + | None => () + }; + + switch (doc) { + | Empty + | GroupBreaker => () + | String({value, width}) => + flush_write_queue(); + write(value); + column := column^ + width_value(width); + | Blank({count}) => + flush_write_queue(); + write(String.make(count, ' ')); + column := column^ + count; + | BreakHint({doc, flat_width: width}) => + let break = () => { + group.broken = true; + group.global_indent = group.global_indent + group.local_indent; + group.local_indent = 0; + write(eol); + write_queue := Some(String.make(group.global_indent, ' ')); + column := group.global_indent; + }; + switch (group.mode) { + | Flat => print(~group, doc) + | Breaking => break() + | FitFlat => + k := + Some( + next_doc => { + let next_width = width_value(flat_width(next_doc)); + let hint_width = width_value(width); + if (column^ + hint_width + next_width > line_width) { + break(); + } else { + print(~group, doc); + }; + }, + ) + | FitBreaking => + k := + Some( + next_doc => { + let next_width = width_value(breaking_width(next_doc)); + let hint_width = width_value(width); + if (column^ + hint_width + next_width > line_width) { + break(); + } else { + print(~group, doc); + }; + }, + ) + }; + | Hardline(_) => + group.broken = true; + group.global_indent = group.global_indent + group.local_indent; + group.local_indent = 0; + write(eol); + write_queue := Some(String.make(group.global_indent, ' ')); + column := group.global_indent; + | IfBroken({flat, breaking}) => + if (group.broken) { + print(~group, breaking); + } else { + print(~group, flat); + } + | Indent({count, doc}) => + let global_indent = group.global_indent; + let local_indent = group.local_indent; + group.local_indent = local_indent + count; + print(~group, doc); + group.global_indent = global_indent; + group.local_indent = local_indent; + | Group({doc, group_type, flat_width}) => + let width = width_value(flat_width); + let mode = + switch (group_type) { + | _ when has_group_breaker(doc) => Breaking + | Auto when column^ + width > line_width => Breaking + | Auto => Flat + | FitGroups => FitFlat + | FitAll => FitBreaking + }; + + let group = { + mode, + global_indent: group.global_indent, + local_indent: 0, + broken: false, + }; + print(~group, doc); + | Concat({left, right}) => + print(~group, left); + print(~group, right); + }; + }; + + let group = { + mode: Flat, + global_indent: 0, + local_indent: 0, + broken: false, + }; + print(~group, doc); + }; + + let to_string = (~eol, ~line_width, doc) => { + let b = Buffer.create(2048); + let write = Buffer.add_string(b); + print(~write, ~eol, ~line_width, doc); + Buffer.contents(b); + }; +}; diff --git a/compiler/src/formatting/doc.rei b/compiler/src/formatting/doc.rei new file mode 100644 index 0000000000..f23a90d87c --- /dev/null +++ b/compiler/src/formatting/doc.rei @@ -0,0 +1,54 @@ +type t; +type group_type = + | Auto + | FitGroups + | FitAll; + +let empty: t; +let group_breaker: t; +let string: string => t; +let blank: int => t; +let space: t; +let breakable_space: t; +let break: t; +let hardline: t; +let phantom_hardline: t; +let if_broken: (t, t) => t; +let indent: (int, t) => t; +let group: (~print_width: int=?, ~kind: group_type=?, t) => t; +let concat: (t, t) => t; +let (++): (t, t) => t; + +let concat_map: + ( + ~sep: ('a, 'a) => t, + ~lead: 'a => t, + ~trail: 'a => t, + ~f: (~final: bool, 'a) => t, + list('a) + ) => + t; + +let comma: t; +let comma_breakable_space: t; +let trailing_comma: t; +let parens: (~lead: t=?, ~trail: t=?, t) => t; +let braces: t => t; +let block_braces: (~lead: t, ~trail: t, t) => t; +let array_brackets: t => t; +let list_brackets: t => t; +let angle_brackets: t => t; +let double_quotes: t => t; + +module Engine: { + let print: + ( + ~write: string => 'a, + ~eol: Grain_utils.Fs_access.eol, + ~line_width: int, + t + ) => + unit; + let to_string: + (~eol: Grain_utils.Fs_access.eol, ~line_width: int, t) => string; +}; diff --git a/compiler/src/formatting/fmt.re b/compiler/src/formatting/fmt.re new file mode 100644 index 0000000000..f775a8a7d1 --- /dev/null +++ b/compiler/src/formatting/fmt.re @@ -0,0 +1,3675 @@ +/** + This module implements a formatter for Grain code. For information about the + pretty-printing engine and specifics on its inner workings, see the Doc module. +*/ +open Grain; +open Compile; +open Grain_parsing; +open Grain_utils; +open Grain_diagnostics; +open Parsetree; +open Doc; + +exception FormatterError(string); + +type compilation_error = + | ParseError(exn) + | InvalidCompilationState; + +let parse_source = program_str => { + switch ( + { + let lines = String.split_on_char('\n', program_str); + let eol = Fs_access.determine_eol(List.nth_opt(lines, 0)); + let compile_state = + Compile.compile_string( + ~is_root_file=true, + ~hook=stop_after_parse, + ~name=?None, + program_str, + ); + + (compile_state, lines, eol); + } + ) { + | exception exn => Stdlib.Error(ParseError(exn)) + | ({cstate_desc: Parsed(parsed_program)}, lines, eol) => + Ok((parsed_program, Array.of_list(lines), eol)) + | _ => Error(InvalidCompilationState) + }; +}; + +type infix_grouping = + | None + | FormatterGrouping + | ParenGrouping; + +// As all (current) Grain infix operators have left-to-right associativity, +// operators with the same precedence do not need paren grouping on the left +// but do however need paren grouping on the right, as this indicates that the +// user grouped the operations in a particular manner. +// The only planned operator to have right-to-left associativity is +// exponentiation. When this is implemented, the logic is reversed. +type infix_side = + | Left + | Right; + +// This takes a location and makes the loc_end the same as loc_start. +// Its main purpose is to find comments between the start of an enclosing location and the first item inside. +let enclosing_start_location = loc => { + Location.{...loc, loc_end: loc.loc_start}; +}; + +// This takes a location and makes the loc_start the same as loc_end. +// Its main purpose is to find comments between the end of an enclosing location and the last item inside. +let enclosing_end_location = loc => { + Location.{...loc, loc_start: loc.loc_end}; +}; + +let is_same_op = (expr1, expr2) => + switch (expr1.pexp_desc, expr2.pexp_desc) { + | ( + PExpId({txt: Identifier.IdentName({txt: op1})}), + PExpId({txt: Identifier.IdentName({txt: op2})}), + ) => + op1 == op2 + | _ => false + }; + +let is_shift_or_concat_op = expr => + switch (expr.pexp_desc) { + | PExpId({txt: Identifier.IdentName({txt: op})}) => + if (String.length(op) > 1) { + switch (String.sub(op, 0, 2)) { + | "<<" + | ">>" + | "++" => true + | _ => false + }; + } else { + false; + } + | _ => false + }; + +let is_logic_op = expr => + switch (expr.pexp_desc) { + | PExpId({txt: Identifier.IdentName({txt: op})}) => + if (String.length(op) > 1) { + switch (String.sub(op, 0, 2)) { + | "<=" + | ">=" + | "==" + | "!=" + | "is" + | "&&" + | "||" => true + | _ => false + }; + } else { + false; + } + | _ => false + }; + +let is_math_op = expr => + if (is_logic_op(expr) || is_shift_or_concat_op(expr)) { + false; + } else { + switch (expr.pexp_desc) { + | PExpId({txt: Identifier.IdentName({txt: op})}) => + if (String.length(op) > 0) { + switch (op.[0]) { + | '*' + | '/' + | '%' + | '+' + | '-' + | '<' + | '>' + | '&' + | '^' + | '|' => true + | _ => false + }; + } else { + false; + } + | _ => false + }; + }; + +let op_precedence = startsWith => + switch (startsWith) { + | '*' + | '/' + | '%' => 120 + | '+' + | '-' => 110 + | '<' + | '>' => 90 + | '&' => 70 + | '^' => 60 + | '|' => 50 + | '_' => 10 + | _ => 9999 + }; + +let precedence = expr => { + switch (expr.pexp_desc) { + | PExpId({txt: Identifier.IdentName({txt: op})}) => + if (String.length(op) > 1) { + switch (String.sub(op, 0, 2)) { + | "++" => 110 + | "<<" + | ">>" => 100 + | "==" + | "!=" + | "is" => 80 + | "&&" => 40 + | "||" + | "??" => 30 + | _ => op_precedence(op.[0]) + }; + } else if (String.length(op) > 0) { + op_precedence(op.[0]); + } else { + 9999; + } + | _ => 9999 + }; +}; + +let infixop = op => { + switch (op.[0]) { + | '+' + | '-' + | '*' + | '/' + | '%' + | '=' + | '^' + | '<' + | '>' + | '&' + | '|' + | '?' => true + | _ when op == "is" => true + | _ when op == "isnt" => true + | _ when String.starts_with(~prefix="!=", op) => true + | _ + | exception _ => false + }; +}; + +let is_infix_op = expr => { + switch (expr.pexp_desc) { + | PExpId({txt: Identifier.IdentName({txt: op})}) => infixop(op) + | _ => false + }; +}; + +let prefixop = op => + switch (op.[0]) { + | '!' => true + | _ + | exception _ => false + }; + +let is_prefix_op = expr => { + switch (expr.pexp_desc) { + | PExpId({txt: Identifier.IdentName({txt: op})}) => prefixop(op) + | _ => false + }; +}; + +let is_keyword_function = expr => { + switch (expr.pexp_desc) { + | PExpId({txt: Identifier.IdentName({txt: "assert" | "throw" | "fail"})}) => + true + | _ => false + }; +}; + +let needs_grouping = (~parent, ~side: infix_side, expr) => { + switch (expr.pexp_desc, side) { + | (PExpIf(_), _) => ParenGrouping + | (PExpApp(fn1, _), Left) + when is_infix_op(fn1) && precedence(fn1) < precedence(parent) => + ParenGrouping + | (PExpApp(fn1, _), Right) + when is_infix_op(fn1) && precedence(fn1) <= precedence(parent) => + ParenGrouping + | (PExpApp(fn1, _), _) => + if (is_infix_op(fn1)) { + if ((!is_math_op(parent) && !is_logic_op(parent)) + && !is_same_op(fn1, parent)) { + ParenGrouping; + } else if (precedence(fn1) == precedence(parent)) { + None; + } else { + FormatterGrouping; + }; + } else { + FormatterGrouping; + } + | (PExpConstant(PConstNumber(PConstNumberRational(_, _))), _) + when op_precedence('/') <= precedence(parent) => + ParenGrouping + | _ => FormatterGrouping + }; +}; + +let get_op_and_assignment = new_value => { + switch (new_value.pexp_desc) { + | PExpApp( + { + pexp_desc: + PExpId({ + txt: + Identifier.IdentName({ + txt: ("+" | "-" | "*" | "/" | "%") as op, + }), + }), + }, + [_, assignment], + ) => ( + op, + assignment, + ) + | _ => failwith("Precondition: Must be collapsible") + }; +}; + +let is_collapsible_record_assignment = (record, elem, new_value) => { + switch (record, elem, new_value) { + | ( + {pexp_desc: PExpId({txt: IdentName({txt: name})})}, + {txt: elem_name}, + { + pexp_desc: + PExpApp( + { + pexp_desc: + PExpId({ + txt: + Identifier.IdentName({txt: "+" | "-" | "*" | "/" | "%"}), + }), + }, + [ + { + paa_expr: { + pexp_desc: + PExpRecordGet( + { + pexp_desc: PExpId({txt: IdentName({txt: new_name})}), + }, + {txt: new_elem_name}, + ), + }, + }, + _, + ], + ), + }, + ) => + name == new_name && elem_name == new_elem_name + | _ => false + }; +}; + +let is_collapsible_assignment = (binding, new_value) => { + switch (binding, new_value) { + | ( + {pexp_desc: PExpId({txt: IdentName({txt: name})})}, + { + pexp_desc: + PExpApp( + { + pexp_desc: + PExpId({ + txt: + Identifier.IdentName({txt: "+" | "-" | "*" | "/" | "%"}), + }), + }, + [ + { + paa_expr: { + pexp_desc: PExpId({txt: IdentName({txt: new_name})}), + }, + }, + _, + ], + ), + }, + ) => + name == new_name + | _ => false + }; +}; + +let has_disable_formatting_comment = (~comment_tree, loc: Location.t) => { + switch (Commenttree.query_line(comment_tree, loc.loc_start.pos_lnum - 1)) { + | Some(Line({cmt_content: "formatter-ignore"})) => true + | _ => false + }; +}; + +type formatter = { + print_original_code: (formatter, Location.t) => Doc.t, + print_infix_prefix_op: (formatter, expression) => Doc.t, + print_constant: (formatter, ~loc: Location.t, constant) => Doc.t, + print_punnable_pattern: (formatter, (loc(Identifier.t), pattern)) => Doc.t, + print_lambda_argument: (formatter, lambda_argument) => Doc.t, + print_pattern: (formatter, pattern) => Doc.t, + print_ident_string: (formatter, string) => Doc.t, + print_identifier: (formatter, Identifier.t) => Doc.t, + print_punnable_expression: + (formatter, (loc(Identifier.t), expression)) => Doc.t, + print_grouped_access_expression: (formatter, expression) => Doc.t, + print_use_item: (formatter, use_item) => Doc.t, + print_match_branch: (formatter, match_branch) => Doc.t, + print_attribute: (formatter, attribute) => Doc.t, + print_application_argument: + (formatter, ~infix_wrap: t => t=?, application_argument) => Doc.t, + print_if: + ( + formatter, + ~force_blocks: bool=?, + ~loc: Location.t, + expression, + expression, + option(expression) + ) => + Doc.t, + print_expression: (formatter, ~infix_wrap: t => t=?, expression) => Doc.t, + print_value_binding: (formatter, value_binding) => Doc.t, + print_parsed_type_argument: (formatter, parsed_type_argument) => Doc.t, + print_type: (formatter, parsed_type) => Doc.t, + print_label_declaration: (formatter, label_declaration) => Doc.t, + print_constructor_arguments: (formatter, constructor_arguments) => Doc.t, + print_exception: (formatter, type_exception) => Doc.t, + print_constructor_declaration: (formatter, constructor_declaration) => Doc.t, + print_data_declaration: (formatter, data_declaration) => Doc.t, + print_primitive_description: (formatter, primitive_description) => Doc.t, + print_include_declaration: (formatter, include_declaration) => Doc.t, + print_module_declaration: (formatter, module_declaration) => Doc.t, + print_value_description: (formatter, value_description) => Doc.t, + print_provide_item: (formatter, provide_item) => Doc.t, + print_toplevel_stmt: (formatter, toplevel_stmt) => Doc.t, + print_comment_range: + ( + ~none: t=?, + ~lead: t=?, + ~trail: t=?, + ~allow_breaks: bool=?, + ~block_start: bool=?, + ~block_end: bool=?, + Location.t, + Location.t + ) => + Doc.t, + print_program: (formatter, parsed_program) => Doc.t, +}; + +let print_original_code = (~source, fmt, location: Location.t) => { + let (_, start_line, startc, _) = + Locations.get_raw_pos_info(location.loc_start); + let (_, end_line, endc, _) = Locations.get_raw_pos_info(location.loc_end); + + let (++) = Stdlib.(++); + + let str = + if (Array.length(source) > end_line - 1) { + if (start_line == end_line) { + String_utils.Utf8.sub(source[start_line - 1], startc, endc - startc); + } else { + let text = ref(""); + for (line in start_line - 1 to end_line - 1) { + if (line + 1 == start_line) { + text := + text^ + ++ String_utils.Utf8.string_after(source[line], startc) + ++ "\n"; + } else if (line + 1 == end_line) { + text := text^ ++ String_utils.Utf8.sub(source[line], 0, endc); + } else { + text := text^ ++ source[line] ++ "\n"; + }; + }; + text^; + }; + } else { + raise(FormatterError("Requested beyond end of original source")); + }; + + string(str); +}; + +let print_infix_prefix_op = (fmt, expr) => { + switch (expr.pexp_desc) { + | PExpId({txt: Identifier.IdentName({txt: op})}) => string(op) + | _ => failwith("Impossible: non- prefix or infix op") + }; +}; + +let print_constant = (fmt, ~loc, constant) => { + fmt.print_original_code(fmt, loc); +}; + +let print_punnable_pattern = + ( + fmt, + ({txt: ident, loc: ident_loc}, pat): ( + Location.loc(Identifier.t), + pattern, + ), + ) => { + switch (pat.ppat_desc) { + | PPatVar({txt: name}) when Identifier.string_of_ident(ident) == name => + // Don't forget the comments that could have been between a punnable name and value, e.g. + // { foo: /* foo */ foo, } + fmt.print_comment_range(~trail=space, ident_loc, pat.ppat_loc) + ++ string(name) + | _ => + fmt.print_identifier(fmt, ident) + ++ string(":") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + ident_loc, + pat.ppat_loc, + ) + ++ fmt.print_pattern(fmt, pat) + }; +}; + +let print_lambda_argument = (fmt, arg) => { + fmt.print_pattern(fmt, arg.pla_pattern) + ++ ( + switch (arg.pla_default) { + | Some(expr) => + string("=") + ++ fmt.print_comment_range(arg.pla_pattern.ppat_loc, expr.pexp_loc) + ++ fmt.print_expression(fmt, expr) + | None => empty + } + ); +}; + +let print_pattern = (fmt, {ppat_desc, ppat_loc}) => { + switch (ppat_desc) { + | PPatAny => string("_") + | PPatVar({txt: name}) => fmt.print_ident_string(fmt, name) + | PPatAlias(pat, {txt: alias, loc: alias_loc}) => + fmt.print_pattern(fmt, pat) + ++ string(" as") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + pat.ppat_loc, + alias_loc, + ) + ++ string(alias) + | PPatOr(lhs, rhs) => + fmt.print_pattern(fmt, lhs) + ++ string(" |") + ++ fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + lhs.ppat_loc, + rhs.ppat_loc, + ) + ++ fmt.print_pattern(fmt, rhs) + | PPatConstruct({txt: ident, loc: ident_loc}, cstr_pat) => + fmt.print_identifier(fmt, ident) + ++ ( + switch (cstr_pat) { + | PPatConstrRecord([], closed_flag) => + braces( + ( + switch (closed_flag) { + | Open => string("_") + | Closed => empty + } + ) + ++ fmt.print_comment_range( + ident_loc, + enclosing_end_location(ppat_loc), + ), + ) + | PPatConstrRecord(pats, closed_flag) => + braces( + concat_map( + ~lead= + ((next_ident, _)) => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + ident_loc, + next_ident.loc, + ), + ~sep= + (({loc: prev_loc}, _), ({loc: next_loc}, _)) => { + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev_loc, + next_loc, + ) + }, + ~trail= + (({loc: prev_loc}, _)) => + fmt.print_comment_range( + ~lead=space, + ~block_end=true, + prev_loc, + enclosing_end_location(ppat_loc), + ), + ~f= + (~final, p) => + if (final) { + group(fmt.print_punnable_pattern(fmt, p)) + ++ ( + switch (closed_flag) { + | Open => comma_breakable_space ++ string("_") + | Closed => trailing_comma + } + ); + } else { + group(fmt.print_punnable_pattern(fmt, p) ++ comma); + }, + pats, + ), + ) + | PPatConstrSingleton => empty + | PPatConstrTuple(pats) => + parens( + concat_map( + ~lead= + ({ppat_loc: next}) => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + ident_loc, + next, + ), + ~sep= + ({ppat_loc: prev}, {ppat_loc: next}) => { + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev, + next, + ) + }, + ~trail= + ({ppat_loc: prev}) => + fmt.print_comment_range( + ~lead=space, + ~block_end=true, + prev, + enclosing_end_location(ppat_loc), + ), + ~f= + (~final, p) => + if (final) { + group(fmt.print_pattern(fmt, p)) ++ trailing_comma; + } else { + group(fmt.print_pattern(fmt, p) ++ comma); + }, + pats, + ), + ) + } + ) + | PPatConstraint(pat, typ) => + fmt.print_pattern(fmt, pat) + ++ string(":") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + pat.ppat_loc, + typ.ptyp_loc, + ) + ++ fmt.print_type(fmt, typ) + | PPatConstant(constant) => + fmt.print_constant(fmt, ~loc=ppat_loc, constant) + | PPatRecord(pats, closed_flag) => + braces( + concat_map( + ~lead= + ((next_ident, _)) => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(ppat_loc), + next_ident.loc, + ), + ~sep= + ((_, {ppat_loc: prev_loc}), ({loc: next_loc}, _)) => { + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev_loc, + next_loc, + ) + }, + ~trail= + ((_, {ppat_loc: prev_loc})) => + fmt.print_comment_range( + ~lead=space, + ~block_end=true, + prev_loc, + enclosing_end_location(ppat_loc), + ), + ~f= + (~final, p) => + if (final) { + group(fmt.print_punnable_pattern(fmt, p)) + ++ ( + switch (closed_flag) { + | Open when pats == [] => string("_") + | Open => comma_breakable_space ++ string("_") + | Closed => trailing_comma + } + ); + } else { + group(fmt.print_punnable_pattern(fmt, p) ++ comma); + }, + pats, + ), + ) + | PPatArray([]) => + array_brackets( + fmt.print_comment_range( + ~block_start=true, + ~block_end=true, + ~lead=space, + enclosing_start_location(ppat_loc), + enclosing_end_location(ppat_loc), + ), + ) + | PPatArray(pats) => + array_brackets( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~none=if_broken(empty, space), + ~lead=space, + ~trail=space, + enclosing_start_location(ppat_loc), + next.ppat_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev.ppat_loc, + next.ppat_loc, + ), + ~trail= + prev => + fmt.print_comment_range( + ~lead=space, + ~block_end=true, + prev.ppat_loc, + enclosing_end_location(ppat_loc), + ), + ~f= + (~final, p) => + if (final) { + group(fmt.print_pattern(fmt, p)) ++ trailing_comma; + } else { + group(fmt.print_pattern(fmt, p) ++ comma); + }, + pats, + ), + ) + | PPatList([]) => + list_brackets( + fmt.print_comment_range( + ~block_start=true, + ~block_end=true, + enclosing_start_location(ppat_loc), + enclosing_end_location(ppat_loc), + ), + ) + | PPatList(pats) => + list_brackets( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(ppat_loc), + switch (next) { + | ListItem(pat) + | ListSpread(pat, _) => pat.ppat_loc + }, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + switch (prev) { + | ListItem(pat) + | ListSpread(pat, _) => pat.ppat_loc + }, + switch (next) { + | ListItem(pat) + | ListSpread(pat, _) => pat.ppat_loc + }, + ), + ~trail= + prev => + fmt.print_comment_range( + ~lead=space, + ~block_end=true, + switch (prev) { + | ListItem(pat) + | ListSpread(pat, _) => pat.ppat_loc + }, + enclosing_end_location(ppat_loc), + ), + ~f= + (~final, item) => { + switch (item) { + | ListItem(pat) when final => + group(fmt.print_pattern(fmt, pat)) ++ trailing_comma + | ListItem(pat) => group(fmt.print_pattern(fmt, pat) ++ comma) + | ListSpread(pat, _) when final => + group(string("...") ++ fmt.print_pattern(fmt, pat)) + | ListSpread(pat, _) => + group(string("...") ++ fmt.print_pattern(fmt, pat) ++ comma) + } + }, + pats, + ), + ) + | PPatTuple(pats) => + parens( + concat_map( + ~lead= + ({ppat_loc: next}) => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(ppat_loc), + next, + ), + ~sep= + ({ppat_loc: prev}, {ppat_loc: next}) => { + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev, + next, + ) + }, + ~trail= + ({ppat_loc: prev}) => + fmt.print_comment_range( + ~lead=space, + ~block_end=true, + prev, + enclosing_end_location(ppat_loc), + ), + ~f= + (~final, p) => + if (final) { + group(fmt.print_pattern(fmt, p)) ++ trailing_comma; + } else { + group(fmt.print_pattern(fmt, p) ++ comma); + }, + pats, + ), + ) + }; +}; + +let print_ident_string = (fmt, ident) => + if (infixop(ident) || prefixop(ident)) { + parens(string(ident)); + } else { + string(ident); + }; + +let print_identifier = (fmt, ident) => { + fmt.print_ident_string(fmt, Identifier.string_of_ident(ident)); +}; + +let print_punnable_expression = (fmt, ({txt: ident, loc: ident_loc}, expr)) => { + switch (expr.pexp_desc) { + | PExpId({txt: name}) when Identifier.equal(ident, name) => + // Don't forget the comments that could have been between a punnable name and value, e.g. + // { foo: /* foo */ foo, } + fmt.print_comment_range(~trail=space, ident_loc, expr.pexp_loc) + ++ fmt.print_identifier(fmt, name) + | _ => + fmt.print_identifier(fmt, ident) + ++ string(":") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + ident_loc, + expr.pexp_loc, + ) + ++ fmt.print_expression(fmt, expr) + }; +}; + +let print_grouped_access_expression = (fmt, expr) => + switch (expr.pexp_desc) { + | PExpConstant(_) + | PExpConstruct(_) + | PExpTuple(_) + | PExpId(_) + | PExpArrayGet(_) + | PExpArraySet(_) + | PExpRecordGet(_) + | PExpRecordSet(_) + | PExpRecord(_) + | PExpBlock(_) + | PExpArray(_) + | PExpList(_) => fmt.print_expression(fmt, expr) + | PExpApp(func, _) when is_infix_op(func) => + parens(fmt.print_expression(fmt, expr)) + | PExpApp(_) => fmt.print_expression(fmt, expr) + | _ => parens(fmt.print_expression(fmt, expr)) + }; + +let print_use_item = (fmt, use_item) => { + switch (use_item) { + | PUseType({name, alias, loc}) => + string("type") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(loc), + name.loc, + ) + ++ fmt.print_identifier(fmt, name.txt) + ++ ( + switch (alias) { + | None => empty + | Some({txt: alias, loc: alias_loc}) => + string(" as") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + name.loc, + alias_loc, + ) + ++ fmt.print_identifier(fmt, alias) + } + ) + | PUseException({name, alias, loc}) => + string("exception") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(loc), + name.loc, + ) + ++ fmt.print_identifier(fmt, name.txt) + ++ ( + switch (alias) { + | None => empty + | Some({txt: alias, loc: alias_loc}) => + string(" as") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + name.loc, + alias_loc, + ) + ++ fmt.print_identifier(fmt, alias) + } + ) + | PUseModule({name, alias, loc}) => + string("module") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(loc), + name.loc, + ) + ++ fmt.print_identifier(fmt, name.txt) + ++ ( + switch (alias) { + | None => empty + | Some({txt: alias, loc: alias_loc}) => + string(" as") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + name.loc, + alias_loc, + ) + ++ fmt.print_identifier(fmt, alias) + } + ) + | PUseValue({name, alias}) => + fmt.print_identifier(fmt, name.txt) + ++ ( + switch (alias) { + | None => empty + | Some({txt: alias, loc: alias_loc}) => + string(" as") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + name.loc, + alias_loc, + ) + ++ fmt.print_identifier(fmt, alias) + } + ) + }; +}; + +let print_match_branch = (fmt, {pmb_pat, pmb_body, pmb_guard}) => { + let (guard, guard_loc) = + switch (pmb_guard) { + | None => (empty, pmb_pat.ppat_loc) + | Some(guard) => ( + fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + pmb_pat.ppat_loc, + guard.pexp_loc, + ) + ++ string("when ") + ++ fmt.print_expression(fmt, guard), + guard.pexp_loc, + ) + }; + let space_type = + switch (pmb_body.pexp_desc) { + | PExpBlock(_) => space + | _ => breakable_space + }; + group(fmt.print_pattern(fmt, pmb_pat)) + ++ guard + ++ string(" =>") + ++ indent( + 2, + fmt.print_comment_range( + ~none=space_type, + ~lead=space, + ~trail=space_type, + guard_loc, + pmb_body.pexp_loc, + ) + ++ group(fmt.print_expression(fmt, pmb_body)), + ); +}; + +let print_attribute = (fmt, attr) => { + switch (attr) { + | Asttypes.{attr_name: {txt: attr_name}, attr_args: []} => + string("@") ++ string(attr_name) + | {attr_name: {txt: attr_name, loc: attr_name_loc}, attr_args, attr_loc} => + string("@") + ++ string(attr_name) + ++ parens( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + attr_name_loc, + next.loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev.loc, + next.loc, + ), + ~trail= + prev => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + prev.loc, + enclosing_end_location(attr_loc), + ), + ~f= + (~final, attr_arg) => + if (final) { + double_quotes(string(attr_arg.txt)) ++ trailing_comma; + } else { + double_quotes(string(attr_arg.txt)) ++ comma; + }, + attr_args, + ), + ) + }; +}; + +let print_application_argument = (fmt, ~infix_wrap=?, arg) => { + ( + switch (arg.paa_label) { + | Unlabeled => empty + | Labeled({txt: label, loc: label_loc}) + | Default({txt: label, loc: label_loc}) => + string(label) + ++ string("=") + ++ fmt.print_comment_range(label_loc, arg.paa_expr.pexp_loc) + } + ) + ++ fmt.print_expression(fmt, ~infix_wrap?, arg.paa_expr); +}; + +let print_if = + (fmt, ~force_blocks=false, ~loc, condition, true_branch, false_branch) => + if (force_blocks) { + let true_branch_doc = + switch (true_branch.pexp_desc) { + | PExpBlock(_) => fmt.print_expression(fmt, true_branch) + | PExpIf(_) => parens(fmt.print_expression(fmt, true_branch)) + | _ => + block_braces( + ~lead=hardline, + ~trail=hardline, + fmt.print_expression(fmt, true_branch), + ) + }; + let false_branch_doc = + switch (false_branch) { + | Some({pexp_desc: PExpBlock(_)} as false_branch) => + Some(fmt.print_expression(fmt, false_branch)) + | Some({ + pexp_desc: PExpIf(condition, true_branch, false_branch), + pexp_loc: loc, + }) => + Some( + fmt.print_if( + fmt, + ~loc, + ~force_blocks, + condition, + true_branch, + false_branch, + ), + ) + | Some(false_branch) => + Some( + block_braces( + ~lead=hardline, + ~trail=hardline, + fmt.print_expression(fmt, false_branch), + ), + ) + | None => None + }; + group( + string("if ") + ++ parens( + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(loc), + condition.pexp_loc, + ) + ++ fmt.print_expression(fmt, ~infix_wrap=Fun.id, condition) + ++ fmt.print_comment_range( + ~block_end=true, + ~lead=space, + condition.pexp_loc, + true_branch.pexp_loc, + ), + ) + ++ space + ++ true_branch_doc + ++ ( + switch (false_branch_doc) { + | Some(false_branch_doc) => + fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + true_branch.pexp_loc, + Option.get(false_branch).pexp_loc, + ) + ++ string("else ") + ++ false_branch_doc + | None => empty + } + ), + ); + } else { + switch (true_branch.pexp_desc, false_branch) { + | (PExpBlock(_), _) + | (_, Some({pexp_desc: PExpBlock(_) | PExpIf(_)})) => + fmt.print_if( + fmt, + ~loc, + ~force_blocks=true, + condition, + true_branch, + false_branch, + ) + | (_, None) => + let true_branch_doc = + switch (true_branch.pexp_desc) { + | PExpIf(_) => parens(fmt.print_expression(fmt, true_branch)) + | _ => fmt.print_expression(fmt, true_branch) + }; + group( + string("if ") + ++ parens( + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(loc), + condition.pexp_loc, + ) + ++ fmt.print_expression(fmt, ~infix_wrap=Fun.id, condition) + ++ fmt.print_comment_range( + ~block_end=true, + ~lead=space, + condition.pexp_loc, + true_branch.pexp_loc, + ), + ) + ++ indent(2, breakable_space ++ true_branch_doc), + ); + | (_, Some(false_branch)) => + let true_branch_doc = + switch (true_branch.pexp_desc) { + | PExpIf(_) => parens(fmt.print_expression(fmt, true_branch)) + | _ => fmt.print_expression(fmt, true_branch) + }; + group( + string("if ") + ++ parens( + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(loc), + condition.pexp_loc, + ) + ++ fmt.print_expression(fmt, ~infix_wrap=Fun.id, condition) + ++ fmt.print_comment_range( + ~block_end=true, + ~lead=space, + condition.pexp_loc, + true_branch.pexp_loc, + ), + ) + ++ indent(2, breakable_space ++ true_branch_doc) + ++ fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + true_branch.pexp_loc, + false_branch.pexp_loc, + ) + ++ string("else") + ++ indent( + 2, + breakable_space ++ fmt.print_expression(fmt, false_branch), + ), + ); + }; + }; + +let print_expression = + (~comment_tree, fmt, ~infix_wrap=d => group(indent(2, d)), expr) => { + group( + concat_map( + ~lead=_ => empty, + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=hardline, + ~lead=space, + ~trail=hardline, + prev.Asttypes.attr_loc, + next.attr_loc, + ), + ~trail= + prev => + fmt.print_comment_range( + ~none=hardline, + ~lead=space, + ~trail=hardline, + prev.Asttypes.attr_loc, + expr.pexp_core_loc, + ), + ~f=(~final, a) => fmt.print_attribute(fmt, a), + expr.pexp_attributes, + ), + ) + ++ ( + switch (expr.pexp_desc) { + | PExpId({txt: ident}) => fmt.print_identifier(fmt, ident) + | PExpConstant(constant) => + fmt.print_constant(fmt, ~loc=expr.pexp_loc, constant) + | PExpConstruct({txt: ident, loc: ident_loc}, cstr_expr) => + fmt.print_identifier(fmt, ident) + ++ ( + switch (cstr_expr) { + | PExpConstrSingleton => empty + | PExpConstrTuple(exprs) => + parens( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + ident_loc, + next.pexp_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev.pexp_loc, + next.pexp_loc, + ), + ~trail= + prev => + fmt.print_comment_range( + ~lead=space, + ~block_end=true, + prev.pexp_loc, + enclosing_end_location(expr.pexp_loc), + ), + ~f= + (~final, e) => + if (final) { + group(fmt.print_expression(fmt, e)) ++ trailing_comma; + } else { + group(fmt.print_expression(fmt, e) ++ comma); + }, + exprs, + ), + ) + | PExpConstrRecord(exprs) => + braces( + concat_map( + ~lead= + ((next_ident, _)) => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + ident_loc, + next_ident.loc, + ), + ~sep= + ((_, prev), (next, _)) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev.pexp_loc, + next.loc, + ), + ~trail= + ((_, prev)) => + fmt.print_comment_range( + ~lead=space, + ~block_end=true, + prev.pexp_loc, + enclosing_end_location(expr.pexp_loc), + ), + ~f= + (~final, e) => + if (final) { + group(fmt.print_punnable_expression(fmt, e)); + } else { + group(fmt.print_punnable_expression(fmt, e) ++ comma); + }, + exprs, + ), + ) + } + ) + | PExpBlock(exprs) => + block_braces( + ~lead=empty, + ~trail=hardline, + concat_map( + ~lead= + first => + fmt.print_comment_range( + ~none=hardline, + ~lead=space, + ~trail=hardline, + enclosing_start_location(expr.pexp_loc), + first.pexp_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none= + switch ( + next.pexp_loc.loc_start.pos_lnum + - prev.pexp_loc.loc_end.pos_lnum + ) { + | 0 + | 1 => hardline + | _ => hardline ++ hardline + }, + ~lead=space, + ~trail=space, + prev.pexp_loc, + next.pexp_loc, + ), + ~trail= + last => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + last.pexp_loc, + enclosing_end_location(expr.pexp_loc), + ), + ~f= + (~final, e) => + if (has_disable_formatting_comment(~comment_tree, e.pexp_loc)) { + fmt.print_original_code(fmt, e.pexp_loc); + } else { + fmt.print_expression(fmt, e); + }, + exprs, + ), + ) + | PExpLet(rec_flag, mut_flag, vbs) => + string("let ") + ++ ( + switch (rec_flag) { + | Nonrecursive => empty + | Recursive => string("rec ") + } + ) + ++ ( + switch (mut_flag) { + | Immutable => empty + | Mutable => string("mut ") + } + ) + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~trail=space, + enclosing_start_location(expr.pexp_loc), + List.hd(vbs).pvb_loc, + ) + ++ group @@ + concat_map( + ~lead=_ => empty, + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=hardline, + ~lead=space, + ~trail=hardline, + prev.pvb_loc, + next.pvb_loc, + ) + ++ string("and "), + ~trail=_ => empty, + ~f=(~final, vb) => fmt.print_value_binding(fmt, vb), + vbs, + ) + | PExpApp(fn, [arg]) when is_prefix_op(fn) => + fmt.print_infix_prefix_op(fmt, fn) + ++ fmt.print_comment_range(fn.pexp_loc, arg.paa_loc) + ++ ( + switch (needs_grouping(~parent=fn, ~side=Left, arg.paa_expr)) { + | ParenGrouping => parens(fmt.print_application_argument(fmt, arg)) + | FormatterGrouping => + group(fmt.print_application_argument(fmt, arg)) + | None => fmt.print_application_argument(fmt, arg) + } + ) + | PExpApp(fn, [lhs, rhs]) when is_infix_op(fn) => + // To ensure adequate grouping/breaking of subexpressions, chains of + // binops are included in a single Doc.group, with new groups inserted + // where necessary. By default, this group indents when breaking. This + // behavior is overridden by passing ~infix_wrap=Fun.id to + // print_expression. This is particularly useful for things like the + // condition of an `if` statement, where we don't need the additional + // indenting. + infix_wrap @@ + ( + switch (needs_grouping(~parent=fn, ~side=Left, lhs.paa_expr)) { + | ParenGrouping => + parens( + fmt.print_application_argument(fmt, ~infix_wrap=Fun.id, lhs), + ) + | FormatterGrouping => + group( + indent( + 2, + fmt.print_application_argument(fmt, ~infix_wrap=Fun.id, lhs), + ), + ) + | None => fmt.print_application_argument(fmt, ~infix_wrap=Fun.id, lhs) + } + ) + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + ~allow_breaks=false, + lhs.paa_loc, + fn.pexp_loc, + ) + ++ fmt.print_infix_prefix_op(fmt, fn) + ++ fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + fn.pexp_loc, + rhs.paa_loc, + ) + ++ ( + switch (needs_grouping(~parent=fn, ~side=Right, rhs.paa_expr)) { + | ParenGrouping => + parens( + fmt.print_application_argument(fmt, ~infix_wrap=Fun.id, rhs), + ) + | FormatterGrouping => + group( + indent( + 2, + fmt.print_application_argument(fmt, ~infix_wrap=Fun.id, rhs), + ), + ) + | None => fmt.print_application_argument(fmt, ~infix_wrap=Fun.id, rhs) + } + ) + | PExpApp(fn, [rhs]) when is_keyword_function(fn) => + fmt.print_expression(fmt, fn) + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + fn.pexp_loc, + rhs.paa_loc, + ) + ++ fmt.print_expression(fmt, rhs.paa_expr) + | PExpApp(fn, exprs) => + group( + fmt.print_grouped_access_expression(fmt, fn) + ++ parens( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + fn.pexp_loc, + next.paa_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev.paa_loc, + next.paa_loc, + ), + ~trail= + prev => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + prev.paa_loc, + enclosing_end_location(expr.pexp_loc), + ), + ~f= + (~final, a) => + if (final) { + group(fmt.print_application_argument(fmt, a)); + } else { + group(fmt.print_application_argument(fmt, a) ++ comma); + }, + exprs, + ), + ), + ) + | PExpLambda( + [ + { + pla_label: Labeled({txt: label, loc: label_loc}), + pla_pattern: {ppat_desc: PPatVar({txt: var})}, + } as single_param, + ], + body, + ) + when label == var => + fmt.print_lambda_argument(fmt, single_param) + ++ string(" =>") + ++ fmt.print_comment_range(~lead=space, label_loc, body.pexp_loc) + ++ group( + switch (body.pexp_desc) { + | PExpBlock(_) => space ++ fmt.print_expression(fmt, body) + | _ => + indent(2, breakable_space ++ fmt.print_expression(fmt, body)) + }, + ) + | PExpLambda(params, body) => + parens( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(expr.pexp_loc), + next.pla_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev.pla_loc, + next.pla_loc, + ), + ~trail= + last => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + last.pla_loc, + body.pexp_loc, + ), + ~f= + (~final, a) => + if (final) { + group(fmt.print_lambda_argument(fmt, a)) ++ trailing_comma; + } else { + group(fmt.print_lambda_argument(fmt, a) ++ comma); + }, + params, + ), + ) + ++ string(" =>") + ++ group( + switch (body.pexp_desc) { + | PExpBlock(_) => space ++ fmt.print_expression(fmt, body) + | _ => + indent(2, breakable_space ++ fmt.print_expression(fmt, body)) + }, + ) + | PExpContinue => string("continue") + | PExpBreak => string("break") + | PExpTuple(exprs) => + parens( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(expr.pexp_loc), + next.pexp_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev.pexp_loc, + next.pexp_loc, + ), + ~trail= + last => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + last.pexp_loc, + enclosing_end_location(expr.pexp_loc), + ), + ~f= + (~final, e) => + if (final) { + group(fmt.print_expression(fmt, e)) ++ trailing_comma; + } else { + group(fmt.print_expression(fmt, e) ++ comma); + }, + exprs, + ), + ) + | PExpArray([]) => + array_brackets( + fmt.print_comment_range( + ~block_start=true, + ~block_end=true, + ~lead=space, + enclosing_start_location(expr.pexp_loc), + enclosing_end_location(expr.pexp_loc), + ), + ) + | PExpArray(exprs) => + array_brackets( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~none=if_broken(empty, space), + ~lead=space, + ~trail=space, + enclosing_start_location(expr.pexp_loc), + next.pexp_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev.pexp_loc, + next.pexp_loc, + ), + ~trail= + prev => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + prev.pexp_loc, + enclosing_end_location(expr.pexp_loc), + ), + ~f= + (~final, e) => + if (final) { + group(fmt.print_expression(fmt, e)) ++ trailing_comma; + } else { + group(fmt.print_expression(fmt, e) ++ comma); + }, + exprs, + ), + ) + | PExpList([]) => + list_brackets( + fmt.print_comment_range( + ~block_start=true, + ~block_end=true, + enclosing_start_location(expr.pexp_loc), + enclosing_end_location(expr.pexp_loc), + ), + ) + | PExpList(items) => + list_brackets( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(expr.pexp_loc), + switch (next) { + | ListItem(expr) + | ListSpread(expr, _) => expr.pexp_loc + }, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + switch (prev) { + | ListItem(expr) + | ListSpread(expr, _) => expr.pexp_loc + }, + switch (next) { + | ListItem(expr) + | ListSpread(expr, _) => expr.pexp_loc + }, + ), + ~trail= + prev => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + switch (prev) { + | ListItem(expr) + | ListSpread(expr, _) => expr.pexp_loc + }, + enclosing_end_location(expr.pexp_loc), + ), + ~f= + (~final, item) => { + switch (item) { + | ListItem(expr) when final => + group(fmt.print_expression(fmt, expr)) ++ trailing_comma + | ListItem(expr) => + group(fmt.print_expression(fmt, expr) ++ comma) + | ListSpread(expr, _) when final => + group(string("...") ++ fmt.print_expression(fmt, expr)) + | ListSpread(expr, _) => + group( + string("...") ++ fmt.print_expression(fmt, expr) ++ comma, + ) + } + }, + items, + ), + ) + | PExpArrayGet(arr, elem) => + fmt.print_grouped_access_expression(fmt, arr) + ++ fmt.print_comment_range(arr.pexp_loc, elem.pexp_loc) + ++ list_brackets(fmt.print_expression(fmt, ~infix_wrap=Fun.id, elem)) + | PExpArraySet(arr, elem, new_value) => + fmt.print_grouped_access_expression(fmt, arr) + ++ fmt.print_comment_range(arr.pexp_loc, elem.pexp_loc) + ++ list_brackets(fmt.print_expression(fmt, ~infix_wrap=Fun.id, elem)) + ++ string(" =") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + elem.pexp_loc, + new_value.pexp_loc, + ) + ++ fmt.print_expression(fmt, new_value) + | PExpRecord(base, labels) => + braces( + concat_map( + ~lead= + ((next_ident, _)) => + switch (base) { + | None => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(expr.pexp_loc), + next_ident.loc, + ) + | Some(base_expr) => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(expr.pexp_loc), + base_expr.pexp_loc, + ) + ++ string("...") + ++ fmt.print_expression(fmt, base_expr) + ++ comma + ++ fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + base_expr.pexp_loc, + next_ident.loc, + ) + }, + ~sep= + ((_, {pexp_loc: prev_loc}), ({loc: next_loc}, _)) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev_loc, + next_loc, + ), + ~trail= + ((_, {pexp_loc: prev_loc})) => + fmt.print_comment_range( + ~lead=space, + ~block_end=true, + prev_loc, + enclosing_end_location(expr.pexp_loc), + ), + ~f= + (~final, e) => + if (final) { + group(fmt.print_punnable_expression(fmt, e)) + ++ ( + if (Option.is_none(base) && List.length(labels) == 1) { + comma; + } else { + trailing_comma; + } + ); + } else { + group(fmt.print_punnable_expression(fmt, e) ++ comma); + }, + labels, + ), + ) + | PExpRecordGet(record, elem) => + fmt.print_grouped_access_expression(fmt, record) + ++ string(".") + ++ fmt.print_comment_range(record.pexp_loc, elem.loc) + ++ fmt.print_identifier(fmt, elem.txt) + | PExpRecordSet(record, elem, new_value) + when is_collapsible_record_assignment(record, elem, new_value) => + let (op, assignment) = get_op_and_assignment(new_value); + + fmt.print_grouped_access_expression(fmt, record) + ++ string(".") + ++ fmt.print_comment_range(record.pexp_loc, elem.loc) + ++ fmt.print_identifier(fmt, elem.txt) + ++ space + ++ string(op) + ++ string("=") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + elem.loc, + // TODO(#1977): There appears to be a bug with the parser that the location of + // paa_loc is further to the left than the underlying expression, so + // here we just use the location of the expression directly. + assignment.paa_expr.pexp_loc, + ) + ++ fmt.print_application_argument(fmt, assignment); + | PExpRecordSet(record, elem, new_value) => + fmt.print_grouped_access_expression(fmt, record) + ++ string(".") + ++ fmt.print_comment_range(record.pexp_loc, elem.loc) + ++ fmt.print_identifier(fmt, elem.txt) + ++ string(" =") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + elem.loc, + new_value.pexp_loc, + ) + ++ fmt.print_expression(fmt, new_value) + | PExpPrim0(_) => failwith("Impossible: PExpPrim0 in parsetree") + | PExpPrim1(_) => failwith("Impossible: PExpPrim1 in parsetree") + | PExpPrim2(_) => failwith("Impossible: PExpPrim2 in parsetree") + | PExpPrimN(_) => failwith("Impossible: PExpPrimN in parsetree") + | PExpAssign(binding, new_value) + when is_collapsible_assignment(binding, new_value) => + let (op, assignment) = get_op_and_assignment(new_value); + + fmt.print_expression(fmt, binding) + ++ space + ++ string(op) + ++ string("=") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + binding.pexp_loc, + // TODO(#1977): There appears to be a bug with the parser that the location of + // paa_loc is further to the left than the underlying expression, so + // here we just use the location of the expression directly. + assignment.paa_expr.pexp_loc, + ) + ++ fmt.print_application_argument(fmt, assignment); + | PExpAssign(binding, new_value) => + fmt.print_expression(fmt, binding) + ++ string(" =") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + binding.pexp_loc, + new_value.pexp_loc, + ) + ++ fmt.print_expression(fmt, new_value) + | PExpBoxAssign(binding, new_value) => + fmt.print_expression(fmt, binding) + ++ string(" :=") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + binding.pexp_loc, + new_value.pexp_loc, + ) + ++ fmt.print_expression(fmt, new_value) + | PExpReturn(return_expr) => + string("return") + ++ group( + switch (return_expr) { + | None => empty + | Some(return_expr) => + fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(expr.pexp_loc), + return_expr.pexp_loc, + ) + ++ fmt.print_expression(fmt, return_expr) + }, + ) + | PExpUse(ident, use_items) => + string("from") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(expr.pexp_loc), + ident.loc, + ) + ++ fmt.print_identifier(fmt, ident.txt) + ++ string(" use ") + ++ ( + switch (use_items) { + | PUseAll => + fmt.print_comment_range( + ~allow_breaks=false, + ~trail=space, + ident.loc, + enclosing_end_location(expr.pexp_loc), + ) + ++ string("*") + | PUseItems(items) => + braces( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + ident.loc, + switch (next) { + | PUseType({loc}) + | PUseException({loc}) + | PUseModule({loc}) + | PUseValue({loc}) => loc + }, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + switch (prev) { + | PUseType({loc}) + | PUseException({loc}) + | PUseModule({loc}) + | PUseValue({loc}) => loc + }, + switch (next) { + | PUseType({loc}) + | PUseException({loc}) + | PUseModule({loc}) + | PUseValue({loc}) => loc + }, + ), + ~trail= + prev => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + switch (prev) { + | PUseType({loc}) + | PUseException({loc}) + | PUseModule({loc}) + | PUseValue({loc}) => loc + }, + enclosing_end_location(expr.pexp_loc), + ), + ~f= + (~final, u) => + if (final) { + group(fmt.print_use_item(fmt, u)) ++ trailing_comma; + } else { + group(fmt.print_use_item(fmt, u) ++ comma); + }, + items, + ), + ) + } + ) + | PExpIf(cond, true_branch, false_branch) => + fmt.print_if(fmt, ~loc=expr.pexp_loc, cond, true_branch, false_branch) + | PExpWhile(cond, body) => + string("while ") + ++ parens( + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(expr.pexp_loc), + cond.pexp_loc, + ) + ++ fmt.print_expression(fmt, ~infix_wrap=Fun.id, cond) + ++ fmt.print_comment_range( + ~block_end=true, + ~lead=space, + cond.pexp_loc, + body.pexp_loc, + ), + ) + ++ space + ++ fmt.print_expression(fmt, body) + | PExpFor(init, cond, inc, body) => + let start_location = enclosing_start_location(expr.pexp_loc); + let (cond_start_loc, cond_block_start) = + switch (init) { + | None => (start_location, true) + | Some(init) => (init.pexp_loc, false) + }; + let (inc_start_loc, inc_block_start) = + switch (cond) { + | None => (cond_start_loc, cond_block_start) + | Some(cond) => (cond.pexp_loc, false) + }; + parens( + ~lead=string("for "), + ( + switch (init) { + | None => empty + | Some(init) => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(expr.pexp_loc), + init.pexp_loc, + ) + ++ fmt.print_expression(fmt, init) + } + ) + ++ string(";") + ++ ( + switch (cond) { + | None => break + | Some(cond) => + fmt.print_comment_range( + ~block_start=cond_block_start, + ~none=breakable_space, + ~lead=if (cond_block_start) {empty} else {space}, + ~trail=breakable_space, + cond_start_loc, + cond.pexp_loc, + ) + ++ fmt.print_expression(fmt, cond) + } + ) + ++ string(";") + ++ ( + switch (inc) { + | None => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + inc_start_loc, + body.pexp_loc, + ) + | Some(inc) => + fmt.print_comment_range( + ~block_start=inc_block_start, + ~none=breakable_space, + ~lead=if (inc_block_start) {empty} else {space}, + ~trail=breakable_space, + inc_start_loc, + inc.pexp_loc, + ) + ++ fmt.print_expression(fmt, inc) + ++ fmt.print_comment_range( + ~block_end=true, + ~lead=space, + inc.pexp_loc, + body.pexp_loc, + ) + } + ), + ~trail=space ++ fmt.print_expression(fmt, body), + ); + | PExpMatch(value, {txt: branches, loc: branches_loc}) => + string("match ") + ++ parens( + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(expr.pexp_loc), + value.pexp_loc, + ) + ++ fmt.print_expression(fmt, ~infix_wrap=Fun.id, value) + ++ fmt.print_comment_range( + ~block_end=true, + ~lead=space, + value.pexp_loc, + branches_loc, + ), + ) + ++ space + ++ block_braces( + ~lead=empty, + ~trail=hardline, + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~none=hardline, + ~lead=space, + ~trail=hardline, + enclosing_start_location(branches_loc), + next.pmb_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=hardline, + ~lead=space, + ~trail=hardline, + prev.pmb_loc, + next.pmb_loc, + ), + ~trail= + last => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + last.pmb_loc, + enclosing_end_location(expr.pexp_loc), + ), + ~f= + (~final, b) => + group(fmt.print_match_branch(fmt, b) ++ comma), + branches, + ), + ) + | PExpConstraint(expr, typ) => + fmt.print_expression(fmt, expr) + ++ string(":") + ++ group( + indent( + 2, + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + expr.pexp_loc, + typ.ptyp_loc, + ) + ++ fmt.print_type(fmt, typ), + ), + ) + } + ); +}; + +let print_value_binding = (fmt, {pvb_pat, pvb_expr}) => { + group( + ~kind=FitAll, + fmt.print_pattern(fmt, pvb_pat) + ++ string(" =") + ++ indent( + 2, + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + pvb_pat.ppat_loc, + pvb_expr.pexp_loc, + ) + ++ fmt.print_expression(fmt, pvb_expr), + ), + ); +}; + +let print_parsed_type_argument = (fmt, arg) => { + ( + switch (arg.ptyp_arg_label) { + | Unlabeled => empty + | Labeled({txt: label, loc: label_loc}) + | Default({txt: label, loc: label_loc}) => + string(label) + ++ string(":") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + label_loc, + arg.ptyp_arg_type.ptyp_loc, + ) + } + ) + ++ fmt.print_type(fmt, arg.ptyp_arg_type); +}; + +let print_type = (fmt, {ptyp_desc, ptyp_loc}) => { + switch (ptyp_desc) { + | PTyAny => string("_") + | PTyVar(name) => string(name) + | PTyConstr({txt: ident, loc: ident_loc}, params) => + let name = Identifier.string_of_ident(ident); + string(name) + ++ ( + switch (params) { + | [] => empty + | typs => + angle_brackets( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + ident_loc, + next.ptyp_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev.ptyp_loc, + next.ptyp_loc, + ), + ~trail= + prev => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + prev.ptyp_loc, + enclosing_end_location(ptyp_loc), + ), + ~f= + (~final, t) => + if (final) { + group(fmt.print_type(fmt, t)); + } else { + group(fmt.print_type(fmt, t) ++ comma); + }, + typs, + ), + ) + } + ); + | PTyTuple(typs) => + parens( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(ptyp_loc), + next.ptyp_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev.ptyp_loc, + next.ptyp_loc, + ), + ~trail= + prev => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + prev.ptyp_loc, + enclosing_end_location(ptyp_loc), + ), + ~f= + (~final, t) => + if (final) { + group(fmt.print_type(fmt, t)); + } else { + group(fmt.print_type(fmt, t) ++ comma); + }, + typs, + ), + ) + | PTyArrow([{ptyp_arg_label: Unlabeled} as param], return) => + fmt.print_parsed_type_argument(fmt, param) + ++ string(" =>") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + param.ptyp_arg_loc, + return.ptyp_loc, + ) + ++ fmt.print_type(fmt, return) + | PTyArrow(params, return) => + parens( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(ptyp_loc), + next.ptyp_arg_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev.ptyp_arg_loc, + next.ptyp_arg_loc, + ), + ~trail= + prev => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + prev.ptyp_arg_loc, + return.ptyp_loc, + ), + ~f= + (~final, a) => + if (final) { + group(fmt.print_parsed_type_argument(fmt, a)) ++ trailing_comma; + } else { + group(fmt.print_parsed_type_argument(fmt, a) ++ comma); + }, + params, + ), + ) + ++ string(" => ") + ++ fmt.print_type(fmt, return) + | PTyPoly(_) => failwith("Impossible: PTyPoly in the parsetree") + }; +}; + +let print_label_declaration = + (fmt, {pld_name, pld_type, pld_mutable, pld_loc}) => { + ( + switch (pld_mutable) { + | Mutable => string("mut ") + | Immutable => empty + } + ) + ++ fmt.print_identifier(fmt, pld_name.txt) + ++ string(":") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(pld_loc), + pld_type.ptyp_loc, + ) + ++ fmt.print_type(fmt, pld_type); +}; +let print_constructor_arguments = (fmt, args) => { + switch (args) { + | PConstrTuple({txt: typs, loc: typs_loc}) => + parens( + concat_map( + ~lead= + first => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(typs_loc), + first.ptyp_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev.ptyp_loc, + next.ptyp_loc, + ), + ~trail= + last => + fmt.print_comment_range( + ~lead=breakable_space, + last.ptyp_loc, + enclosing_end_location(typs_loc), + ), + ~f= + (~final, t) => + if (final) { + group(fmt.print_type(fmt, t)); + } else { + group(fmt.print_type(fmt, t) ++ comma); + }, + typs, + ), + ) + | PConstrRecord({txt: labels, loc: labels_loc}) => + braces( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(labels_loc), + next.pld_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev.pld_loc, + next.pld_loc, + ), + ~trail= + last => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + last.pld_loc, + enclosing_end_location(labels_loc), + ), + ~f= + (~final, ld) => + if (final) { + group(fmt.print_label_declaration(fmt, ld)) + ++ ( + switch (labels) { + | [_single_element] => comma + | _ => trailing_comma + } + ); + } else { + group(fmt.print_label_declaration(fmt, ld) ++ comma); + }, + labels, + ), + ) + | PConstrSingleton => empty + }; +}; + +let print_exception = (fmt, {ptyexn_constructor, ptyexn_loc}) => { + string("exception") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(ptyexn_loc), + ptyexn_constructor.pext_name.loc, + ) + ++ string(ptyexn_constructor.pext_name.txt) + ++ ( + switch (ptyexn_constructor.pext_kind) { + | PExtDecl((PConstrTuple({loc}) | PConstrRecord({loc})) as args) => + fmt.print_comment_range(ptyexn_constructor.pext_name.loc, loc) + ++ fmt.print_constructor_arguments(fmt, args) + | PExtDecl(PConstrSingleton) + | PExtRebind(_) => empty + } + ); +}; + +let print_constructor_declaration = (fmt, {pcd_name, pcd_args}) => { + string(pcd_name.txt) + ++ ( + switch (pcd_args) { + | PConstrTuple({loc}) + | PConstrRecord({loc}) => fmt.print_comment_range(pcd_name.loc, loc) + | PConstrSingleton => empty + } + ) + ++ fmt.print_constructor_arguments(fmt, pcd_args); +}; + +let print_data_declaration = (fmt, decl) => { + switch (decl) { + | { + pdata_name, + pdata_params, + pdata_manifest, + pdata_kind: PDataAbstract, + pdata_rec, + pdata_loc, + } => + string("type ") + ++ ( + switch (pdata_rec) { + | Recursive => string("rec ") + | Nonrecursive => empty + } + ) + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~trail=space, + enclosing_start_location(pdata_loc), + pdata_name.loc, + ) + ++ string(pdata_name.txt) + ++ ( + switch (pdata_params) { + | [] => empty + | typs => + angle_brackets( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + pdata_name.loc, + next.ptyp_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev.ptyp_loc, + next.ptyp_loc, + ), + ~trail= + prev => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + prev.ptyp_loc, + switch (pdata_manifest) { + | None => enclosing_end_location(pdata_loc) + | Some(typ) => typ.ptyp_loc + }, + ), + ~f= + (~final, t) => + if (final) { + group(fmt.print_type(fmt, t)); + } else { + group(fmt.print_type(fmt, t) ++ comma); + }, + pdata_params, + ), + ) + } + ) + ++ ( + switch (pdata_manifest) { + | None => empty + | Some(typ) => + group( + ~kind=FitAll, + string(" =") + ++ indent( + 2, + ( + switch (pdata_params) { + | [] => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + pdata_name.loc, + typ.ptyp_loc, + ) + | _ => breakable_space + } + ) + ++ fmt.print_type(fmt, typ), + ), + ) + } + ) + | { + pdata_name, + pdata_params, + pdata_kind: PDataVariant(cstr_decls), + pdata_rec, + pdata_loc, + } => + string("enum ") + ++ ( + switch (pdata_rec) { + | Recursive => string("rec ") + | Nonrecursive => empty + } + ) + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~trail=space, + enclosing_start_location(pdata_loc), + pdata_name.loc, + ) + ++ string(pdata_name.txt) + ++ ( + switch (pdata_params) { + | [] => empty + | typs => + angle_brackets( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + pdata_name.loc, + next.ptyp_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev.ptyp_loc, + next.ptyp_loc, + ), + ~trail=_ => empty, + ~f= + (~final, t) => + if (final) { + group(fmt.print_type(fmt, t)); + } else { + group(fmt.print_type(fmt, t) ++ comma); + }, + pdata_params, + ), + ) + } + ) + ++ space + ++ block_braces( + ~lead=empty, + ~trail=hardline, + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~none=hardline, + ~lead=space, + ~trail=hardline, + List.fold_left( + (_, param) => param.ptyp_loc, + pdata_name.loc, + pdata_params, + ), + next.pcd_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=hardline, + ~lead=space, + ~trail=hardline, + prev.pcd_loc, + next.pcd_loc, + ), + ~trail= + last => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + last.pcd_loc, + enclosing_end_location(pdata_loc), + ), + ~f= + (~final, cd) => + group(fmt.print_constructor_declaration(fmt, cd) ++ comma), + cstr_decls, + ), + ) + | { + pdata_name, + pdata_params, + pdata_kind: PDataRecord(labels), + pdata_rec, + pdata_loc, + } => + string("record ") + ++ ( + switch (pdata_rec) { + | Recursive => string("rec ") + | Nonrecursive => empty + } + ) + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~trail=space, + enclosing_start_location(pdata_loc), + pdata_name.loc, + ) + ++ string(pdata_name.txt) + ++ ( + switch (pdata_params) { + | [] => empty + | typs => + angle_brackets( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + pdata_name.loc, + next.ptyp_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + prev.ptyp_loc, + next.ptyp_loc, + ), + ~trail=_ => empty, + ~f= + (~final, t) => + if (final) { + group(fmt.print_type(fmt, t)); + } else { + group(fmt.print_type(fmt, t) ++ comma); + }, + pdata_params, + ), + ) + } + ) + ++ space + ++ block_braces( + ~lead=empty, + ~trail=hardline, + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~none=hardline, + ~lead=space, + ~trail=hardline, + List.fold_left( + (_, param) => param.ptyp_loc, + pdata_name.loc, + pdata_params, + ), + next.pld_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=hardline, + ~lead=space, + ~trail=hardline, + prev.pld_loc, + next.pld_loc, + ), + ~trail= + last => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + last.pld_loc, + enclosing_end_location(pdata_loc), + ), + ~f= + (~final, l) => + group(fmt.print_label_declaration(fmt, l) ++ comma), + labels, + ), + ) + }; +}; + +let print_primitive_description = (fmt, {pprim_ident, pprim_name, pprim_loc}) => { + string("primitive") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(pprim_loc), + pprim_ident.loc, + ) + ++ fmt.print_ident_string(fmt, pprim_ident.txt) + ++ string(" =") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + pprim_ident.loc, + pprim_name.loc, + ) + ++ double_quotes(string(pprim_name.txt)); +}; + +let print_include_declaration = (fmt, {pinc_path, pinc_alias, pinc_loc}) => { + string("include") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(pinc_loc), + pinc_path.loc, + ) + ++ double_quotes(string(pinc_path.txt)) + ++ ( + switch (pinc_alias) { + | None => empty + | Some({txt: alias, loc: alias_loc}) => + string(" as") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + pinc_path.loc, + alias_loc, + ) + ++ string(alias) + } + ); +}; + +let print_module_declaration = + (~comment_tree, fmt, {pmod_name, pmod_stmts, pmod_loc}) => { + string("module") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(pmod_loc), + pmod_name.loc, + ) + ++ string(pmod_name.txt) + ++ space + ++ block_braces( + ~lead=empty, + ~trail=hardline, + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~none=hardline, + ~lead=space, + ~trail=hardline, + pmod_name.loc, + next.ptop_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none= + switch ( + next.ptop_loc.loc_start.pos_lnum + - prev.ptop_loc.loc_end.pos_lnum + ) { + | 0 + | 1 => hardline + | _ => hardline ++ hardline + }, + ~lead=space, + ~trail=hardline, + prev.ptop_loc, + next.ptop_loc, + ), + ~trail= + prev => + fmt.print_comment_range( + ~lead=space, + prev.ptop_loc, + enclosing_end_location(pmod_loc), + ), + ~f= + (~final, s) => + if (has_disable_formatting_comment(~comment_tree, s.ptop_loc)) { + fmt.print_original_code(fmt, s.ptop_loc); + } else { + fmt.print_toplevel_stmt(fmt, s); + }, + pmod_stmts, + ), + ); +}; + +let print_value_description = + (fmt, {pval_mod, pval_name, pval_name_alias, pval_type, pval_loc}) => { + group @@ + string(pval_name.txt) + ++ string(":") + ++ indent( + 2, + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + pval_name.loc, + pval_type.ptyp_loc, + ) + ++ fmt.print_type(fmt, pval_type) + ++ ( + switch (pval_name_alias) { + | None => empty + | Some(alias) => + string(" as") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + pval_type.ptyp_loc, + alias.loc, + ) + ++ string(alias.txt) + } + ) + ++ string(" from") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + Option.fold( + ~none=pval_type.ptyp_loc, + ~some=alias => alias.loc, + pval_name_alias, + ), + enclosing_end_location(pval_loc), + ) + ++ double_quotes(string(pval_mod.txt)), + ); +}; + +let print_provide_item = (fmt, provide_item) => { + switch (provide_item) { + | PProvideType({name, alias, loc}) => + string("type") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(loc), + name.loc, + ) + ++ fmt.print_identifier(fmt, name.txt) + ++ ( + switch (alias) { + | None => empty + | Some(alias) => + string(" as") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + name.loc, + enclosing_end_location(loc), + ) + ++ fmt.print_identifier(fmt, alias.txt) + } + ) + | PProvideException({name, alias, loc}) => + string("exception") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(loc), + name.loc, + ) + ++ fmt.print_identifier(fmt, name.txt) + ++ ( + switch (alias) { + | None => empty + | Some(alias) => + string(" as") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + name.loc, + enclosing_end_location(loc), + ) + ++ fmt.print_identifier(fmt, alias.txt) + } + ) + | PProvideModule({name, alias, loc}) => + string("module") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(loc), + name.loc, + ) + ++ fmt.print_identifier(fmt, name.txt) + ++ ( + switch (alias) { + | None => empty + | Some(alias) => + string(" as") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + name.loc, + enclosing_end_location(loc), + ) + ++ fmt.print_identifier(fmt, alias.txt) + } + ) + | PProvideValue({name, alias, loc}) => + fmt.print_identifier(fmt, name.txt) + ++ ( + switch (alias) { + | None => empty + | Some(alias) => + string(" as") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + name.loc, + enclosing_end_location(loc), + ) + ++ fmt.print_identifier(fmt, alias.txt) + } + ) + }; +}; + +let print_toplevel_stmt = (fmt, stmt) => { + group( + concat_map( + ~lead=_ => empty, + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=hardline, + ~lead=space, + ~trail=hardline, + prev.Asttypes.attr_loc, + next.attr_loc, + ), + ~trail= + prev => + fmt.print_comment_range( + ~none=hardline, + ~lead=space, + ~trail=hardline, + prev.Asttypes.attr_loc, + stmt.ptop_core_loc, + ), + ~f=(~final, a) => fmt.print_attribute(fmt, a), + stmt.ptop_attributes, + ), + ) + ++ group( + switch (stmt.ptop_desc) { + | PTopExpr(expr) => fmt.print_expression(fmt, expr) + | PTopException(provide_flag, ex) => + ( + switch (provide_flag) { + | Asttypes.NotProvided => empty + | Asttypes.Abstract => + string("abstract") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(stmt.ptop_core_loc), + ex.ptyexn_loc, + ) + | Asttypes.Provided => + string("provide") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(stmt.ptop_core_loc), + ex.ptyexn_loc, + ) + } + ) + ++ fmt.print_exception(fmt, ex) + | PTopData(datas) => + group @@ + concat_map( + ~lead=_ => empty, + ~sep= + ((_, _, prev), (_, _, next)) => { + fmt.print_comment_range( + ~none=hardline, + ~lead=space, + ~trail=hardline, + prev, + next, + ) + ++ string("and") + ++ space + }, + ~trail=_ => empty, + ~f= + (~final, (provide_flag, decl, decl_loc)) => + group( + ( + switch (provide_flag) { + | Asttypes.NotProvided => empty + | Asttypes.Abstract => + string("abstract") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(decl_loc), + decl.pdata_loc, + ) + | Asttypes.Provided => + string("provide") + ++ fmt.print_comment_range( + ~none=space, + ~lead=space, + ~trail=space, + enclosing_start_location(decl_loc), + decl.pdata_loc, + ) + } + ) + ++ fmt.print_data_declaration(fmt, decl), + ), + datas, + ) + | PTopLet(provide_flag, rec_flag, mut_flag, vbs) => + group @@ + ( + switch (provide_flag) { + | NotProvided => empty + | Abstract => string("abstract ") + | Provided => string("provide ") + } + ) + ++ string("let ") + ++ ( + switch (rec_flag) { + | Nonrecursive => empty + | Recursive => string("rec ") + } + ) + ++ ( + switch (mut_flag) { + | Immutable => empty + | Mutable => string("mut ") + } + ) + ++ concat_map( + ~lead= + next => + fmt.print_comment_range( + ~allow_breaks=false, + ~trail=space, + enclosing_start_location(stmt.ptop_core_loc), + next.pvb_loc, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=hardline, + ~lead=space, + ~trail=hardline, + prev.pvb_loc, + next.pvb_loc, + ) + ++ string("and") + ++ space, + ~trail=_ => empty, + ~f=(~final, vb) => group(fmt.print_value_binding(fmt, vb)), + vbs, + ) + | PTopPrimitive(provide_flag, prim_desc) => + ( + switch (provide_flag) { + | NotProvided => empty + | Abstract => string("abstract ") + | Provided => string("provide ") + } + ) + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~trail=space, + enclosing_start_location(stmt.ptop_core_loc), + prim_desc.pprim_loc, + ) + ++ fmt.print_primitive_description(fmt, prim_desc) + | PTopInclude(include_decl) => + fmt.print_include_declaration(fmt, include_decl) + | PTopForeign(provide_flag, value_desc) => + ( + switch (provide_flag) { + | NotProvided => empty + | Abstract => string("abstract ") + | Provided => string("provide ") + } + ) + ++ string("foreign wasm ") + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~trail=space, + enclosing_start_location(stmt.ptop_core_loc), + value_desc.pval_loc, + ) + ++ fmt.print_value_description(fmt, value_desc) + | PTopModule(provide_flag, module_decl) => + ( + switch (provide_flag) { + | NotProvided => empty + | Abstract => string("abstract ") + | Provided => string("provide ") + } + ) + ++ fmt.print_comment_range( + ~allow_breaks=false, + ~trail=space, + enclosing_start_location(stmt.ptop_core_loc), + module_decl.pmod_loc, + ) + ++ fmt.print_module_declaration(fmt, module_decl) + | PTopProvide(provide_items) => + string("provide ") + ++ braces( + concat_map( + ~lead= + next => + fmt.print_comment_range( + ~block_start=true, + ~trail=space, + enclosing_start_location(stmt.ptop_core_loc), + switch (next) { + | PProvideType({loc}) + | PProvideException({loc}) + | PProvideModule({loc}) + | PProvideValue({loc}) => loc + }, + ), + ~sep= + (prev, next) => + fmt.print_comment_range( + ~none=breakable_space, + ~lead=space, + ~trail=breakable_space, + switch (prev) { + | PProvideType({loc}) + | PProvideException({loc}) + | PProvideModule({loc}) + | PProvideValue({loc}) => loc + }, + switch (next) { + | PProvideType({loc}) + | PProvideException({loc}) + | PProvideModule({loc}) + | PProvideValue({loc}) => loc + }, + ), + ~trail= + prev => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + switch (prev) { + | PProvideType({loc}) + | PProvideException({loc}) + | PProvideModule({loc}) + | PProvideValue({loc}) => loc + }, + enclosing_end_location(stmt.ptop_core_loc), + ), + ~f= + (~final, p) => + if (final) { + group(fmt.print_provide_item(fmt, p)) ++ trailing_comma; + } else { + group(fmt.print_provide_item(fmt, p) ++ comma); + }, + provide_items, + ), + ) + }, + ); +}; + +let print_comment_range = + ( + ~comment_tree, + ~none=empty, + ~lead=empty, + ~trail=empty, + ~allow_breaks=true, + ~block_start=false, + ~block_end=false, + prev: Location.t, + next: Location.t, + ) => { + // This function prints all comments between two given locations, while + // preserving line breaks and structure. It includes a number of optional + // parameters to assist in this goal: + // ~none: + // Printed when no comments are in this range + // ~lead: + // Printed before any comments. This parameter is ignored if there are + // no comments or the first comment is not on the same line as the + // start location. + // ~trail: + // Printed after any comments. This parameter is ignored if there are + // no comments or the last comment is not on the same line as the end + // location. + // ~allow_breaks: + // A flag indicating if we can add additional line breaks, e.g. no line + // breaks are allowed between most keywords, such as `let` and `mut`. + // ~block_start: + // A flag indicating these comments are to be printed at the start of + // curly braces, brackets, parens, or carets, that when broken would + // introduce a newline which would interfere with this function's + // whitespace calculations. Setting this parameter to `true` will + // prevent extra whitespace from appearing at the start of that block. + // ~block_end: + // A flag indicating these comments are to be printed at the end of + // curly braces, brackets, parens, or carets, that when broken would + // introduce a newline which would interfere with this function's + // whitespace calculations. Setting this parameter to `true` will + // prevent extra whitespace from appearing at the end of that block. + // Useful tips and tricks: + // • Use the ~none, ~lead, and ~trail parameters to handle whitespace + // between entities instead of trying to guess the output of this + // function. I.e. prefer `print_comment_range(~trail=space, ...)` to + // `print_comment_range(...) ++ space`, as this function will handle + // a trailing line comment for you. + // • Generally, your ~none and ~trail parameters should match. + // • The ~lead parameter should almost never contain a break, as this + // could prevent a comment from returning to the line it was originally + // on. + // Happy formatting! + + let between_loc: Location.t = { + loc_start: prev.loc_end, + loc_end: next.loc_start, + loc_ghost: true, + }; + + let comments = Commenttree.query(comment_tree, between_loc); + switch (comments) { + | [] => none // bail out quickly for the most common case + | _ => + let loc = cmt => { + switch (cmt) { + | Doc({cmt_loc}) + | Block({cmt_loc}) + | Line({cmt_loc}) + | Shebang({cmt_loc}) => cmt_loc + }; + }; + + let print_comment = cmt => { + switch (cmt) { + | Doc({cmt_source}) + | Block({cmt_source}) => string(cmt_source) + | Line({cmt_source}) + | Shebang({cmt_source}) => + // When a line comment appears anywhere, we force the surrounding + // group to break. Note that the hardline that must follow a line + // comment is not included here, but instead included later to + // account for constructs (like blocks) that may include their own + // newline character. + group_breaker ++ string(String.trim(cmt_source)) + }; + }; + + // We use phantom hardlines here to prevent comment line breaks from + // disrupting the formatting engine's width calculations. This means that + // the engine guarantees we get a break for line comments, but it + // considers the following code as just one really long line: + // func(foo, // comment + // bar, baz, // comment + // qux // comment + // ) + // This gives us a balance between comments completely changing the + // formatting of an expression versus the engine just being "mindful" + // about the comments. + + switch (comments) { + | [] => empty + | comments => + concat_map( + ~lead= + next => + switch (loc(next).loc_start.pos_lnum - prev.loc_end.pos_lnum) { + | _ when block_start => lead + | 0 => lead + | 1 when allow_breaks => phantom_hardline + | _ when allow_breaks => phantom_hardline ++ phantom_hardline + | _ => lead + }, + ~sep= + (prev, next) => { + let line_delta = + loc(next).loc_start.pos_lnum - loc(prev).loc_end.pos_lnum; + if (allow_breaks) { + switch (prev) { + | Block(_) when line_delta == 0 => breakable_space + | _ when line_delta <= 1 => phantom_hardline + | _ => phantom_hardline ++ phantom_hardline + }; + } else { + switch (prev) { + | Line(_) + | Shebang(_) => phantom_hardline // required for line comments no matter what + | _ => space + }; + }; + }, + ~trail= + prev => { + let line_delta = + next.loc_start.pos_lnum - loc(prev).loc_end.pos_lnum; + switch (prev) { + // prevent double spacing of comments at the end of a block + | Line(_) + | Shebang(_) when block_end => if_broken(empty, phantom_hardline) + | _ when block_end => empty + | Line(_) + | Shebang(_) when line_delta <= 1 => phantom_hardline + | Line(_) + | Shebang(_) when line_delta > 1 => + phantom_hardline ++ phantom_hardline + | Doc(_) when allow_breaks => phantom_hardline + | _ when allow_breaks && line_delta == 1 => phantom_hardline + | _ when allow_breaks && line_delta > 1 => + phantom_hardline ++ phantom_hardline + | _ => trail + }; + }, + ~f=(~final, c) => print_comment(c), + comments, + ) + }; + }; +}; + +let print_program = (~comment_tree, fmt, parsed_program) => { + let toplevel = + switch (parsed_program.statements) { + | [] => + fmt.print_comment_range( + ~none=hardline, + ~lead=space, + ~trail=hardline, + parsed_program.module_name.loc, + enclosing_end_location(parsed_program.prog_loc), + ) + | _ => + concat_map( + ~lead= + first => + fmt.print_comment_range( + ~none=hardline ++ hardline, + ~lead=space, + ~trail=hardline ++ hardline, + parsed_program.module_name.loc, + first.ptop_loc, + ), + ~sep= + (prev, next) => { + fmt.print_comment_range( + ~none= + switch ( + next.ptop_loc.loc_start.pos_lnum + - prev.ptop_loc.loc_end.pos_lnum + ) { + | 0 + | 1 => hardline + | _ => hardline ++ hardline + }, + ~lead=space, + ~trail=space, + prev.ptop_loc, + next.ptop_loc, + ) + }, + ~trail= + last => + fmt.print_comment_range( + ~block_end=true, + ~lead=space, + last.ptop_loc, + enclosing_end_location(parsed_program.prog_loc), + ) + ++ hardline, + ~f= + (~final, s) => + if (has_disable_formatting_comment(~comment_tree, s.ptop_loc)) { + fmt.print_original_code(fmt, s.ptop_loc); + } else { + fmt.print_toplevel_stmt(fmt, s); + }, + parsed_program.statements, + ) + }; + + group @@ + fmt.print_comment_range( + enclosing_start_location(parsed_program.prog_loc), + parsed_program.module_name.loc, + ) + ++ string("module ") + ++ string(parsed_program.module_name.txt) + ++ toplevel; +}; + +// The default_formatter cannot look up original source code or comments. +// You must override `print_original_code`, `print_expression`, `print_program`, +// `print_module_declaration`, and `print_comment_range` functions with +// those functions applied to your source code and comments. +let default_formatter: formatter = { + // Default printer cannot look up original code + print_original_code: print_original_code(~source=[||]), + print_infix_prefix_op, + print_constant, + print_punnable_pattern, + print_lambda_argument, + print_pattern, + print_ident_string, + print_identifier, + print_punnable_expression, + print_grouped_access_expression, + print_use_item, + print_match_branch, + print_attribute, + print_application_argument, + print_if, + // Default printer cannot look up comments + print_expression: print_expression(~comment_tree=Commenttree.empty), + print_value_binding, + print_parsed_type_argument, + print_type, + print_label_declaration, + print_constructor_arguments, + print_exception, + print_constructor_declaration, + print_data_declaration, + print_primitive_description, + print_include_declaration, + // Default printer cannot look up comments + print_module_declaration: + print_module_declaration(~comment_tree=Commenttree.empty), + print_value_description, + print_provide_item, + print_toplevel_stmt, + // Default printer cannot look up comments + print_comment_range: print_comment_range(~comment_tree=Commenttree.empty), + // Default printer cannot look up comments + print_program: print_program(~comment_tree=Commenttree.empty), +}; + +let format = (~write, ~source, ~eol, parsed_program) => { + let comment_tree = Commenttree.from_comments(parsed_program.comments); + let formatter = { + ...default_formatter, + print_original_code: print_original_code(~source), + print_expression: print_expression(~comment_tree), + print_module_declaration: print_module_declaration(~comment_tree), + print_comment_range: print_comment_range(~comment_tree), + print_program: print_program(~comment_tree), + }; + Engine.print( + ~write, + ~eol, + ~line_width=80, + formatter.print_program(formatter, parsed_program), + ); +}; + +let format_to_string = (~source, ~eol, parsed_program) => { + let comment_tree = Commenttree.from_comments(parsed_program.comments); + let formatter = { + ...default_formatter, + print_original_code: print_original_code(~source), + print_expression: print_expression(~comment_tree), + print_module_declaration: print_module_declaration(~comment_tree), + print_comment_range: print_comment_range(~comment_tree), + print_program: print_program(~comment_tree), + }; + Engine.to_string( + ~eol, + ~line_width=80, + formatter.print_program(formatter, parsed_program), + ); +}; diff --git a/compiler/src/formatting/format.re b/compiler/src/formatting/format.re deleted file mode 100644 index 7f966e44ea..0000000000 --- a/compiler/src/formatting/format.re +++ /dev/null @@ -1,5304 +0,0 @@ -open Grain; -open Compile; -open Grain_parsing; -open Grain_utils; -open Grain_diagnostics; - -module Doc = Res_doc; - -type iterator_item_type = - | IteratedListPattern - | IteratedRecordPattern - | IteratedRecord - | IteratedTypeConstructor - | IteratedPatterns - | IteratedArgs - | IteratedMatchItem - | IteratedDataDeclarations - | IteratedRecordLabels - | IteratedTupleExpression - | IteratedArrayExpression - | IteratedTypeItems - | IteratedTupleConstructor - | IteratedEnum - | IteratedRecordData - | IteratedValueBindings; -type expression_parent_type = - | InfixExpression - | GenericExpression - | AccessExpression; - -let exception_primitives = [|"throw", "fail", "assert"|]; - -let is_shift_or_concat_op = fn => - if (String.length(fn) > 1) { - switch (String.sub(fn, 0, 2)) { - | "<<" - | ">>" - | "++" - | "||" => true - | _ => false - }; - } else { - false; - }; - -let is_logic_op = fn => - if (String.length(fn) > 1) { - switch (String.sub(fn, 0, 2)) { - | "<=" - | ">=" - | "==" - | "!=" - | "is" - | "isnt" - | "&&" - | "||" => true - | _ => false - }; - } else { - false; - }; -let is_math_op = fn => - if (is_logic_op(fn) || is_shift_or_concat_op(fn)) { - false; - } else if (String.length(fn) > 0) { - switch (fn.[0]) { - | '*' - | '/' - | '%' - | '+' - | '-' - | '<' - | '>' - | '&' - | '^' - | '|' => true - | _ => false - }; - } else { - false; - }; - -let op_precedence = fn => { - let op_precedence = fn => - switch (fn) { - | '*' - | '/' - | '%' => 120 - | '+' - | '-' => 110 - | '<' - | '>' => 90 - | '&' => 70 - | '^' => 60 - | '|' => 50 - | '_' => 10 - | _ => 9999 - }; - if (String.length(fn) > 1) { - switch (String.sub(fn, 0, 2)) { - | "++" => 110 - | "<<" - | ">>" => 100 - | "==" - | "!=" - | "is" => 80 - | "&&" => 40 - | "||" - | "??" => 30 - | _ => op_precedence(fn.[0]) - }; - } else if (String.length(fn) > 0) { - op_precedence(fn.[0]); - } else { - 9999; - }; -}; -let list_cons = "[...]"; - -exception IllegalParse(string); -exception FormatterError(string); - -type compilation_error = - | ParseError(exn) - | InvalidCompilationState; - -type sugared_list_item = - | Regular(Parsetree.expression) - | Spread(Parsetree.expression); - -type record_item = - | Field((Location.loc(Identifier.t), Parsetree.expression)) - | RecordSpread(Parsetree.expression); - -type sugared_pattern_item = - | RegularPattern(Parsetree.pattern) - | SpreadPattern(Parsetree.pattern); - -let get_original_code = (location: Location.t, source: array(string)) => { - let (_, start_line, startc, _) = - Locations.get_raw_pos_info(location.loc_start); - let (_, end_line, endc, _) = Locations.get_raw_pos_info(location.loc_end); - - if (Array.length(source) > end_line - 1) { - if (start_line == end_line) { - String_utils.Utf8.sub(source[start_line - 1], startc, endc - startc); - } else { - let text = ref(""); - for (line in start_line - 1 to end_line - 1) { - if (line + 1 == start_line) { - text := - text^ - ++ String_utils.Utf8.string_after(source[line], startc) - ++ "\n"; - } else if (line + 1 == end_line) { - text := text^ ++ String_utils.Utf8.sub(source[line], 0, endc); - } else { - text := text^ ++ source[line] ++ "\n"; - }; - }; - text^; - }; - } else { - raise(FormatterError("Requested beyond end of original source")); - }; -}; - -// Be AWARE! This is only to be called when you know the comments list is not empty. -// Moved here in case we want to change the implementation in future -let get_last_item_in_list = comments => - List.nth(comments, List.length(comments) - 1); - -let is_disable_formatting_comment = (comment: Parsetree.comment) => { - switch (comment) { - | Line(cmt) => - if (cmt.cmt_content == "formatter-ignore") { - true; - } else { - false; - } - | _ => false - }; -}; - -let print_attributes = attributes => - switch (attributes) { - | [] => Doc.nil - | _ => - Doc.concat([ - Doc.join( - ~sep=Doc.space, - List.map( - ((a: Location.loc(string), args: list(Location.loc(string)))) => { - switch (args) { - | [] => Doc.concat([Doc.text("@"), Doc.text(a.txt)]) - | _ => - Doc.concat([ - Doc.text("@"), - Doc.text(a.txt), - Doc.text("("), - Doc.join( - ~sep=Doc.concat([Doc.comma, Doc.space]), - List.map( - (b: Location.loc(string)) => - Doc.concat([ - Doc.text("\""), - Doc.text(b.txt), - Doc.text("\""), - ]), - args, - ), - ), - Doc.text(")"), - ]) - } - }, - attributes, - ), - ), - Doc.hardLine, - ]) - }; -let force_break_if_line_comment = - (~separator, comments: list(Parsetree.comment)) => { - switch (comments) { - | [] => separator - | _ => - let last_comment = get_last_item_in_list(comments); - - switch (last_comment) { - | Line(_) => Doc.hardLine - | _ => separator - }; - }; -}; - -let break_parent_if_line_comment = - (~separator, comments: list(Parsetree.comment)) => { - switch (comments) { - | [] => separator - | _ => - let last_comment = get_last_item_in_list(comments); - - switch (last_comment) { - | Line(_) => Doc.breakParent - | _ => separator - }; - }; -}; - -let item_separator = (~this_line: int, ~line_above: int, break_separator) => - if (this_line - line_above > 1) { - Doc.concat([break_separator, Doc.hardLine]); - } else { - break_separator; - }; - -let comment_separator = - (~this_line: int, ~line_above: int, comment: Parsetree.comment) => - if (this_line - line_above > 1) { - switch (comment) { - | Line(_) => Doc.hardLine - | Shebang(_) => Doc.hardLine - | Doc(_) => Doc.hardLine - | _ => Doc.concat([Doc.hardLine, Doc.hardLine]) - }; - } else { - switch (comment) { - | Line(_) - | Shebang(_) => Doc.nil - | Doc(_) => Doc.softLine - | Block(_) => Doc.hardLine - }; - }; - -let add_parens = (doc: Doc.t) => - Doc.concat([ - Doc.lparen, - Doc.indent(Doc.concat([Doc.softLine, doc])), - Doc.softLine, - Doc.rparen, - ]); - -let infixop = (op: string) => { - switch (op.[0]) { - | '+' - | '-' - | '*' - | '/' - | '%' - | '=' - | '^' - | '<' - | '>' - | '&' - | '|' - | '?' => true - | _ when op == "is" => true - | _ when op == "isnt" => true - | _ when String.starts_with(~prefix="!=", op) => true - | _ => false - | exception _ => false - }; -}; - -let prefixop = (op: string) => { - switch (op.[0]) { - | '!' => true - | _ => false - | exception _ => false - }; -}; - -let no_attribute = _ => Doc.nil; - -let remove_used_comments = - ( - ~remove_comments: list(Parsetree.comment), - all_comments: list(Parsetree.comment), - ) => { - List.filter(c => !List.mem(c, remove_comments), all_comments); -}; - -type prev_item_t = - | PreviousItem(Location.t) - | Block(Location.t) - | TopOfFile; - -let before_comments_break_line = - (~previous: prev_item_t, ~this_line, comments) => - switch (previous) { - | TopOfFile => Doc.nil - | Block(block) => - let (_, bracket_line, _, _) = - Locations.get_raw_pos_info(block.loc_start); - switch (comments) { - | [] => Doc.nil - | [first_comment, ...rem] => - let (_, first_comment_line, _, _) = - Locations.get_raw_pos_info( - Locations.get_comment_loc(first_comment).loc_start, - ); - if (first_comment_line - bracket_line > 1) { - Doc.line; - } else { - Doc.nil; - }; - }; - - | PreviousItem(prev) => - let (_, prev_stmt_line, _, _) = Locations.get_raw_pos_info(prev.loc_end); - switch (comments) { - | [] => item_separator(~this_line, ~line_above=prev_stmt_line, Doc.line) - - | [first_comment, ...rem] => - let (_, first_comment_line, _, _) = - Locations.get_raw_pos_info( - Locations.get_comment_loc(first_comment).loc_start, - ); - - if (first_comment_line == prev_stmt_line) { - Doc.space; - } else { - item_separator( - ~this_line=first_comment_line, - ~line_above=prev_stmt_line, - Doc.line, - ); - }; - }; - }; - -let handle_after_comments_break = (~this_line, leading_comments) => - switch (leading_comments) { - | [] => Doc.nil - | in_comments => - let last_comment = get_last_item_in_list(in_comments); - let (_, last_comment_line, _, _) = - Locations.get_raw_pos_info( - Locations.get_comment_loc(last_comment).loc_end, - ); - - if (last_comment_line == this_line) { - Doc.line; - } else { - comment_separator( - ~this_line, - ~line_above=last_comment_line, - last_comment, - ); - }; - }; - -let before_comments_break = (~previous: prev_item_t, ~this_line, comments) => - switch (previous) { - | TopOfFile => Doc.nil - | Block(block) => - let (_, bracket_line, _, _) = - Locations.get_raw_pos_info(block.loc_start); - switch (comments) { - | [] => Doc.nil - | [first_comment, ...rem] => - let (_, first_comment_line, _, _) = - Locations.get_raw_pos_info( - Locations.get_comment_loc(first_comment).loc_start, - ); - if (first_comment_line - bracket_line > 1) { - Doc.hardLine; - } else { - Doc.nil; - }; - }; - - | PreviousItem(prev) => - let (_, prev_stmt_line, _, _) = Locations.get_raw_pos_info(prev.loc_end); - switch (comments) { - | [] => - item_separator(~this_line, ~line_above=prev_stmt_line, Doc.hardLine) - - | [first_comment, ...rem] => - let (_, first_comment_line, _, _) = - Locations.get_raw_pos_info( - Locations.get_comment_loc(first_comment).loc_start, - ); - - if (first_comment_line == prev_stmt_line) { - Doc.space; - } else { - item_separator( - ~this_line=first_comment_line, - ~line_above=prev_stmt_line, - Doc.hardLine, - ); - }; - }; - }; - -let rec block_item_iterator_line = - ( - ~previous: prev_item_t, - ~get_loc: 'a => Location.t, - ~print_item: (~comments: list(Parsetree.comment), 'a) => Doc.t, - ~comments: list(Parsetree.comment), - ~original_source, - ~separator: option(Doc.t)=?, - items: list('a), - ) => { - switch (items) { - | [] => Doc.nil - | [item, ...remainder] => - let leading_comments = - switch (previous) { - | Block(prev_node) => - Comment_utils.get_comments_before_location( - ~location=get_loc(item), - comments, - ) - | PreviousItem(prev_node) => - Comment_utils.get_comments_between_locations( - ~loc1=prev_node, - ~loc2=get_loc(item), - comments, - ) - | TopOfFile => - Comment_utils.get_comments_before_location( - ~location=get_loc(item), - comments, - ) - }; - - let leading_comment_docs = - Comment_utils.new_comments_to_docs(leading_comments); - - let this_loc = get_loc(item); - let (_, this_line, this_char, _) = - Locations.get_raw_pos_info(this_loc.loc_start); - - let after_comments_break = - handle_after_comments_break(~this_line, leading_comments); - - let bcb = - before_comments_break_line(~previous, ~this_line, leading_comments); - - let block_top_spacing = - switch (previous) { - | Block(block_loc) => - switch (leading_comments) { - | [] => - let (_, block_line, _, _) = - Locations.get_raw_pos_info(block_loc.loc_start); - if (this_line - block_line > 1) { - Doc.hardLine; - } else { - Doc.nil; - }; - | _ => Doc.nil - } - | _ => Doc.nil - }; - - let item_comments = - Comment_utils.get_comments_inside_location( - ~location=get_loc(item), - comments, - ); - let comments_without_leading = - remove_used_comments(~remove_comments=leading_comments, comments); - - switch (remainder) { - | [] => - let trailing_comments = - remove_used_comments( - ~remove_comments=item_comments, - comments_without_leading, - ); - - let trailing_comment_docs = - Comment_utils.block_trailing_comments_docs(trailing_comments); - - let (_, last_stmt_line, _, _) = - Locations.get_raw_pos_info(get_loc(item).loc_end); - let trailing_comment_separator = - switch (trailing_comments) { - | [] => Doc.nil - - | [first_comment, ...rem] => - let (_, first_comment_line, _, _) = - Locations.get_raw_pos_info( - Locations.get_comment_loc(first_comment).loc_start, - ); - - if (first_comment_line == last_stmt_line) { - Doc.space; - } else { - Doc.line; - }; - }; - - let this_item = - Doc.concat([ - bcb, - leading_comment_docs, - after_comments_break, - print_item(~comments=item_comments, item), - Doc.ifBreaks(Option.value(~default=Doc.nil, separator), Doc.nil), - trailing_comment_separator, - trailing_comment_docs, - ]); - - this_item; - - | _more => - let this_item = - Doc.concat([ - bcb, - block_top_spacing, - leading_comment_docs, - after_comments_break, - print_item(~comments=item_comments, item), - Option.value(~default=Doc.nil, separator), - ]); - - let comments_without_item_comments = - remove_used_comments( - ~remove_comments=item_comments, - comments_without_leading, - ); - - Doc.concat([ - this_item, - block_item_iterator_line( - ~previous=PreviousItem(get_loc(item)), - ~get_loc, - ~print_item, - ~comments=comments_without_item_comments, - ~original_source, - ~separator?, - remainder, - ), - ]); - }; - }; -}; - -let rec block_item_iterator = - ( - ~previous: prev_item_t, - ~get_loc: 'a => Location.t, - ~print_item: (~comments: list(Parsetree.comment), 'a) => Doc.t, - ~comments: list(Parsetree.comment), - ~print_attribute: 'a => Doc.t, - ~original_source, - items: list('a), - ) => { - switch (items) { - | [] => Doc.nil - | [item, ...remainder] => - let attribute_text = print_attribute(item); - let leading_comments = - switch (previous) { - | Block(prev_node) => - Comment_utils.get_comments_before_location( - ~location=get_loc(item), - comments, - ) - | PreviousItem(prev_node) => - Comment_utils.get_comments_between_locations( - ~loc1=prev_node, - ~loc2=get_loc(item), - comments, - ) - | TopOfFile => - Comment_utils.get_comments_before_location( - ~location=get_loc(item), - comments, - ) - }; - let leading_comment_docs = - Comment_utils.new_comments_to_docs(leading_comments); - - let this_loc = get_loc(item); - let (_, this_line, this_char, _) = - Locations.get_raw_pos_info(this_loc.loc_start); - - let after_comments_break = - handle_after_comments_break(~this_line, leading_comments); - - let bcb = before_comments_break(~previous, ~this_line, leading_comments); - - let block_top_spacing = - switch (previous) { - | Block(block_loc) => - switch (leading_comments) { - | [] => - let (_, block_line, _, _) = - Locations.get_raw_pos_info(block_loc.loc_start); - if (this_line - block_line > 1) { - Doc.hardLine; - } else { - Doc.nil; - }; - | _ => Doc.nil - } - | _ => Doc.nil - }; - - let disable_formatting = - switch (leading_comments) { - | [] => false - | cmts => - let last_comment = get_last_item_in_list(cmts); - - is_disable_formatting_comment(last_comment); - }; - - if (disable_formatting) { - let original_code = get_original_code(get_loc(item), original_source); - - let orig_doc = - Doc.concat([ - before_comments_break(~previous, ~this_line, leading_comments), - leading_comment_docs, - Doc.group(Doc.text(original_code)), - ]); - - let included_comments = - Comment_utils.get_comments_inside_location( - ~location=get_loc(item), - comments, - ); - - let cleaned_comments = - remove_used_comments(~remove_comments=included_comments, comments); - - switch (items) { - | [last_item] => - let block_trailing_comments = - Comment_utils.get_comments_after_location( - ~location=get_loc(last_item), - cleaned_comments, - ); - - switch (block_trailing_comments) { - | [] => orig_doc - | _ => - let block_trailing_comment_docs = - Comment_utils.block_trailing_comments_docs( - block_trailing_comments, - ); - - Doc.concat([ - orig_doc, - before_comments_break( - ~previous=PreviousItem(get_loc(last_item)), - ~this_line, - block_trailing_comments, - ), - block_trailing_comment_docs, - ]); - }; - | _ => - let item_comments = - Comment_utils.get_comments_inside_location( - ~location=get_loc(item), - comments, - ); - let comments_without_leading = - remove_used_comments(~remove_comments=leading_comments, comments); - let comments_without_item_comments = - remove_used_comments( - ~remove_comments=item_comments, - comments_without_leading, - ); - Doc.concat([ - orig_doc, - block_item_iterator( - ~previous=PreviousItem(get_loc(item)), - ~get_loc, - ~print_item, - ~comments=comments_without_item_comments, - ~print_attribute, - ~original_source, - remainder, - ), - ]); - }; - } else { - // regular formatting - - let item_comments = - Comment_utils.get_comments_inside_location( - ~location=get_loc(item), - comments, - ); - - let comments_without_leading = - remove_used_comments(~remove_comments=leading_comments, comments); - - switch (remainder) { - | [] => - let trailing_comments = - remove_used_comments( - ~remove_comments=item_comments, - comments_without_leading, - ); - - let trailing_comment_docs = - Comment_utils.block_trailing_comments_docs(trailing_comments); - - let (_, last_stmt_line, _, _) = - Locations.get_raw_pos_info(get_loc(item).loc_end); - let trailing_comment_separator = - switch (trailing_comments) { - | [] => Doc.nil - - | [first_comment, ...rem] => - let (_, first_comment_line, _, _) = - Locations.get_raw_pos_info( - Locations.get_comment_loc(first_comment).loc_start, - ); - - if (first_comment_line == last_stmt_line) { - Doc.space; - } else { - item_separator( - ~this_line=first_comment_line, - ~line_above=last_stmt_line, - Doc.hardLine, - ); - }; - }; - - let this_item = - Doc.concat([ - bcb, - block_top_spacing, - leading_comment_docs, - after_comments_break, - attribute_text, - print_item(~comments=item_comments, item), - trailing_comment_separator, - trailing_comment_docs, - ]); - - this_item; - - | _more => - let this_item = - Doc.concat([ - bcb, - block_top_spacing, - leading_comment_docs, - after_comments_break, - attribute_text, - print_item(~comments=item_comments, item), - ]); - - let comments_without_item_comments = - remove_used_comments( - ~remove_comments=item_comments, - comments_without_leading, - ); - - Doc.concat([ - this_item, - block_item_iterator( - ~previous=PreviousItem(get_loc(item)), - ~get_loc, - ~print_item, - ~comments=comments_without_item_comments, - ~print_attribute, - ~original_source, - remainder, - ), - ]); - }; - }; - }; -}; - -let print_trailing_comments = (~separator, ~itemloc: Location.t, comments) => { - let next_comment = ref(None); - - let items = - List.fold_right( - (comment: Parsetree.comment, acc) => { - let (_, this_comment_line, _, _) = - Locations.get_raw_pos_info( - Locations.get_comment_loc(comment).loc_start, - ); - - switch (next_comment^) { - | None => - let (_, code_line, _, _) = - Locations.get_raw_pos_info(itemloc.loc_end); - if (this_comment_line > code_line) { - [Doc.hardLine, Comment_utils.comment_to_doc(comment), ...acc]; - } else { - [Comment_utils.comment_to_doc(comment), ...acc]; - }; - | Some(next) => - let (_, next_comment_line, _, _) = - Locations.get_raw_pos_info( - Locations.get_comment_loc(next).loc_end, - ); - - next_comment := Some(comment); - - if (this_comment_line <= next_comment_line) { - [Doc.hardLine, Comment_utils.comment_to_doc(comment), ...acc]; - } else { - [Comment_utils.comment_to_doc(comment), ...acc]; - }; - }; - }, - comments, - [], - ); - - switch (items) { - | [] => Doc.nil - | items => Doc.concat([Doc.space, ...items]) - }; -}; - -let mix_comments_and_separator = - (~item_location: Location.t, ~separator, comments) => { - let separated = ref(false); - - let next_comment = ref(None); - - let force_break_for_comment = (comment, acc) => - switch ((comment: Parsetree.comment)) { - | Line(_) => - separated := true; - [ - separator, - Doc.space, - Comment_utils.comment_to_doc(comment), - Doc.breakParent, // forces the lines to break, and so make this line comment force a new line - ...acc, - ]; - - | _ => [Doc.space, Comment_utils.comment_to_doc(comment), ...acc] - }; - - let items = - List.fold_right( - (comment: Parsetree.comment, acc) => { - let (_, this_comment_line, _, _) = - Locations.get_raw_pos_info( - Locations.get_comment_loc(comment).loc_start, - ); - - switch (next_comment^) { - | None => - let (_, code_line, _, _) = - Locations.get_raw_pos_info(item_location.loc_end); - - if (this_comment_line > code_line) { - [Doc.hardLine, Comment_utils.comment_to_doc(comment), ...acc]; - } else { - force_break_for_comment(comment, acc); - }; - - | Some(next) => - let (_, next_comment_line, _, _) = - Locations.get_raw_pos_info( - Locations.get_comment_loc(next).loc_end, - ); - - next_comment := Some(comment); - - if (this_comment_line < next_comment_line) { - [Doc.hardLine, Comment_utils.comment_to_doc(comment), ...acc]; - } else { - force_break_for_comment(comment, acc); - }; - }; - }, - comments, - [], - ); - - if (separated^) { - Doc.concat(items); - } else { - Doc.ifBreaks( - Doc.concat([separator, Doc.concat(items)]), - Doc.concat([Doc.concat(items), separator]), - ); - }; -}; - -let rec item_iterator = - ( - ~previous: option(Location.t)=?, - ~get_loc: 'a => Location.t, - ~print_item: (~comments: list(Parsetree.comment), 'a) => Doc.t, - ~comments: list(Parsetree.comment), - ~followed_by_arrow: option(bool)=?, - ~iterated_item: iterator_item_type, - items: list('a), - ) => { - let trailing_separator = - switch (iterated_item) { - | IteratedListPattern - | IteratedDataDeclarations - | IteratedMatchItem - | IteratedRecordPattern - | IteratedTupleExpression - | IteratedArrayExpression - | IteratedRecord - | IteratedRecordData - | IteratedRecordLabels => true - | IteratedTypeItems - | IteratedEnum - | IteratedTypeConstructor - | IteratedArgs - | IteratedPatterns // we don't apply separators here as we may also need to apply type annotatins - | IteratedTupleConstructor - | IteratedValueBindings => false - }; - - let separator = Doc.comma; - - switch (items) { - | [] => [] - | [first_item, ...rest] => - // special case for the first item, we look for leading comments - // for all others, we look at the comments that come after them as that has the - // impact on where to place comments and separators - - let leading_comments = - Comment_utils.get_comments_before_location( - ~location=get_loc(first_item), - comments, - ); - let leading_comments_docs = - switch (leading_comments) { - | [] => Doc.nil - | _ => - Doc.group( - Doc.concat([ - Comment_utils.new_comments_to_docs(leading_comments), - Doc.ifBreaks(Doc.nil, Doc.space), - ]), - ) - }; - - let number_items = List.length(items); - - List.mapi( - (index, item) => { - let itemdoc = - print_item( - ~comments= - Comment_utils.get_comments_inside_location( - ~location=get_loc(item), - comments, - ), - item, - ); - - let trailing_comments = - if (index == number_items - 1) { - Comment_utils.get_comments_after_location( - ~location=get_loc(item), - comments, - ); - } else { - let next_item = List.nth(items, index + 1); - Comment_utils.get_comments_between_locations( - ~loc1=get_loc(item), - ~loc2=get_loc(next_item), - comments, - ); - }; - - let cmts = - if (index == number_items - 1) { - // last item - Doc.concat([ - Doc.ifBreaks( - if (trailing_separator) {separator} else {Doc.nil}, - Doc.nil, - ), - print_trailing_comments( - ~separator, - ~itemloc=get_loc(item), - trailing_comments, - ), - ]); - } else { - mix_comments_and_separator( - ~item_location=get_loc(item), - ~separator, - trailing_comments, - ); - }; - - // we only have leading comments for the first item - if (index == 0) { - Doc.concat([leading_comments_docs, itemdoc, cmts]); - } else { - Doc.concat([itemdoc, cmts]); - }; - }, - items, - ); - }; -}; - -let rec resugar_list_patterns = - ( - ~bracket_line, - ~original_source: array(string), - ~comments: list(Parsetree.comment), - ~next_loc: Location.t, - patterns: list(Parsetree.pattern), - ) => { - let processed_list = resugar_pattern_list_inner(patterns); - - let get_loc = (pattern: sugared_pattern_item) => { - switch (pattern) { - | RegularPattern(p) - | SpreadPattern(p) => p.ppat_loc - }; - }; - - let print_item = (~comments, pattern: sugared_pattern_item) => { - switch (pattern) { - | RegularPattern(e) => - Doc.group(print_pattern(~original_source, ~comments, ~next_loc, e)) - | SpreadPattern(e) => - Doc.group( - Doc.concat([ - Doc.text("..."), - print_pattern(~original_source, ~comments, ~next_loc, e), - ]), - ) - }; - }; - - let items = - item_iterator( - ~get_loc, - ~print_item, - ~comments, - ~iterated_item=IteratedListPattern, - processed_list, - ); - let printed_patterns = Doc.join(~sep=Doc.line, items); - let printed_patterns_after_bracket = - Doc.concat([Doc.softLine, printed_patterns]); - - Doc.group( - Doc.concat([ - Doc.lbracket, - Doc.indent(printed_patterns_after_bracket), - Doc.softLine, - Doc.rbracket, - ]), - ); -} - -and resugar_pattern_list_inner = (patterns: list(Parsetree.pattern)) => { - switch (patterns) { - | [arg1, arg2, ..._] => - switch (arg2.ppat_desc) { - | PPatConstruct(innercstr, PPatConstrTuple(innerpatterns)) => - let cstr = - switch (innercstr.txt) { - | IdentName({txt: name}) => name - | _ => "" - }; - - if (cstr == "[]") { - [RegularPattern(arg1)]; - } else if (cstr == list_cons) { - let inner = resugar_pattern_list_inner(innerpatterns); - [RegularPattern(arg1), ...inner]; - } else { - [RegularPattern(arg1), SpreadPattern(arg2)]; - }; - - | _ => [RegularPattern(arg1), SpreadPattern(arg2)] - } - | _ => - raise(IllegalParse("List pattern cons should always have two patterns")) - }; -} - -and resugar_list = - ( - ~original_source: array(string), - ~comments: list(Parsetree.comment), - expressions: list(Parsetree.expression), - ) => { - let processed_list = resugar_list_inner(expressions); - - let last_item_was_spread = ref(false); - - let list_length = List.length(processed_list); - - let items = - List.mapi( - (index, item) => - switch (item) { - | Regular(e) => - last_item_was_spread := false; - - // Do we have any comments on this line? - // If so, we break the whole list - - // we might have a list list [1, 2 // comment - // 3] - // so need to use the comment after the last item - // [1, - // 2, //comment - // 3] - - let end_line_comments = - if (index < list_length - 2) { - let next_item = List.nth(processed_list, index + 1); - Comment_utils.get_comments_between_locations( - ~loc1=e.pexp_loc, - ~loc2= - switch (next_item) { - | Regular(e) - | Spread(e) => e.pexp_loc - }, - comments, - ); - } else { - let (_, item_line, item_char, _) = - Locations.get_raw_pos_info(e.pexp_loc.loc_end); - Comment_utils.get_comments_on_line_end( - ~line=item_line, - ~char=item_char, - comments, - ); - }; - - ( - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments= - Comment_utils.get_comments_inside_location( - ~location=e.pexp_loc, - comments, - ), - e, - ), - end_line_comments, - ); - - | Spread(e) => - last_item_was_spread := true; - - ( - Doc.concat([ - Doc.text("..."), - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments= - Comment_utils.get_comments_inside_location( - ~location=e.pexp_loc, - comments, - ), - e, - ), - ]), - [], - ); - }, - processed_list, - ); - - // We have to compose this list by hand because of the complexity of if a list item - // is followed by a comment, the comma must come before the comment. - // It also impacts how we force a new line for a line ending comment at the end of a list - // without introducing an extra blank line when bringing the indentation back in again - - let last_line_breaks_for_comments = ref(false); - let items_length = List.length(items); - let list_items = - List.mapi( - (i, (item, item_comments)) => { - let final_item = items_length - 1 == i; - - let comment_doc = - switch (item_comments) { - | [] => - last_line_breaks_for_comments := false; - if (final_item) { - Doc.nil; - } else { - Doc.concat([Doc.comma, Doc.line]); - }; - | _ => - let trailing_comments = - List.map( - (cmt: Parsetree.comment) => - Doc.concat([Doc.space, Comment_utils.comment_to_doc(cmt)]), - item_comments, - ); - - last_line_breaks_for_comments := true; - Doc.concat([ - Doc.comma, - Doc.concat(trailing_comments), - if (final_item) {Doc.nil} else {Doc.hardLine}, - ]); - }; - - Doc.concat([Doc.group(item), comment_doc]); - }, - items, - ); - - Doc.group( - Doc.concat([ - Doc.lbracket, - Doc.indent( - Doc.concat([ - Doc.softLine, - Doc.concat(list_items), - if (last_item_was_spread^ || last_line_breaks_for_comments^) { - Doc.nil; - } else { - Doc.ifBreaks(Doc.comma, Doc.nil); - }, - ]), - ), - if (last_line_breaks_for_comments^) { - Doc.hardLine; - } else { - Doc.softLine; - }, - Doc.rbracket, - ]), - ); -} - -and resugar_list_inner = (expressions: list(Parsetree.expression)) => - switch (expressions) { - | [arg1, arg2] => - switch (arg2.pexp_desc) { - | PExpConstruct( - {txt: IdentName({txt: "[...]"})}, - PExpConstrTuple(innerexpressions), - ) => - let inner = resugar_list_inner(innerexpressions); - List.append([Regular(arg1)], inner); - | PExpConstruct({txt: IdentName({txt: "[]"})}, PExpConstrTuple(_)) => [ - Regular(arg1), - ] - | _ => [Regular(arg1), Spread(arg2)] - } - | _ => - // Grain syntax makes it impossible to construct a list cons without - // two arguments, but we'll check just to make sure - raise(IllegalParse("List cons should always have two expressions")) - } - -and check_for_pattern_pun = (pat: Parsetree.pattern) => - switch (pat.ppat_desc) { - | PPatVar({txt, _}) => Doc.text(txt) - | _ => Doc.nil - } - -and print_record_pattern = - ( - ~patternlocs: list((Location.loc(Identifier.t), Parsetree.pattern)), - ~closedflag: Asttypes.closed_flag, - ~original_source: array(string), - ~comments: list(Parsetree.comment), - ~next_loc: Location.t, - patloc: Location.t, - ) => { - let close = - switch (closedflag) { - | Open when patternlocs == [] => Doc.text("_") - | Open => Doc.concat([Doc.text(","), Doc.space, Doc.text("_")]) - | Closed => Doc.nil - }; - - let get_loc = - (patternloc: (Location.loc(Identifier.t), Parsetree.pattern)) => { - let (_, pat) = patternloc; - pat.ppat_loc; - }; - - let print_item = - ( - ~comments, - patternloc: (Location.loc(Identifier.t), Parsetree.pattern), - ) => { - let (loc, pat) = patternloc; - let printed_ident: Doc.t = print_ident(loc.txt); - - let printed_pat = - print_pattern(~original_source, ~comments, ~next_loc, pat); - - let punned_pat = check_for_pattern_pun(pat); - - let pun = - switch (printed_ident, punned_pat: Doc.t) { - | (Text(i), Text(e)) => i == e - | _ => false - }; - - if (pun) { - printed_ident; - } else { - Doc.concat([printed_ident, Doc.text(":"), Doc.space, printed_pat]); - }; - }; - - let after_brace_comments = - Comment_utils.get_after_brace_comments(~loc=patloc, comments); - let cleaned_comments = - remove_used_comments(~remove_comments=after_brace_comments, comments); - - let items = - item_iterator( - ~get_loc, - ~print_item, - ~comments=cleaned_comments, - ~iterated_item=IteratedRecordPattern, - patternlocs, - ); - let printed_fields = Doc.join(~sep=Doc.line, items); - - let printed_fields_after_brace = - Doc.concat([ - force_break_if_line_comment(~separator=Doc.line, after_brace_comments), - printed_fields, - ]); - - Doc.concat([ - Doc.lbrace, - Comment_utils.single_line_of_comments(after_brace_comments), - Doc.indent(Doc.concat([printed_fields_after_brace, close])), - Doc.line, - Doc.rbrace, - ]); -} - -and print_pattern = - ( - ~original_source: array(string), - ~comments: list(Parsetree.comment), - ~next_loc: Location.t, - pat: Parsetree.pattern, - ) => { - let printed_pattern: (Doc.t, bool) = - switch (pat.ppat_desc) { - | PPatAny => (Doc.text("_"), false) - | PPatConstant(c) => ( - print_constant(~original_source, ~loc=pat.ppat_loc, c), - false, - ) - | PPatVar({txt, _}) => - if (infixop(txt) || prefixop(txt)) { - (Doc.concat([Doc.lparen, Doc.text(txt), Doc.rparen]), false); - } else { - (Doc.text(txt), false); - } - | PPatTuple(patterns) => ( - Doc.group( - print_patterns(~next_loc, ~comments, ~original_source, patterns), - ), - true, - ) - | PPatArray(patterns) => ( - Doc.group( - Doc.concat([ - Doc.lbracket, - Doc.text(">"), - Doc.space, - print_patterns(~next_loc, ~comments, ~original_source, patterns), - Doc.rbracket, - ]), - ), - false, - ) - | PPatRecord(patternlocs, closedflag) => ( - print_record_pattern( - ~patternlocs, - ~closedflag, - ~original_source, - ~comments, - ~next_loc, - pat.ppat_loc, - ), - false, - ) - | PPatConstraint(pattern, parsed_type) => ( - Doc.concat([ - print_patterns(~next_loc, ~comments, ~original_source, [pattern]), - Doc.text(":"), - Doc.space, - print_type(~original_source, ~comments, parsed_type), - ]), - false, - ) - | PPatConstruct(location, PPatConstrTuple(patterns)) => - let func = - switch (location.txt) { - | IdentName({txt: name}) => name - | _ => "" - }; - if (func == list_cons) { - let (_, bracket_line, _, _) = - Locations.get_raw_pos_info(pat.ppat_loc.loc_start); - - ( - resugar_list_patterns( - ~bracket_line, - ~original_source, - ~comments, - ~next_loc, - patterns, - ), - false, - ); - } else { - ( - Doc.concat([ - print_ident(location.txt), - switch (patterns) { - | [] => Doc.nil - | _patterns => - add_parens( - print_patterns( - ~next_loc, - ~comments, - ~original_source, - patterns, - ), - ) - }, - ]), - false, - ); - }; - | PPatConstruct(location, PPatConstrSingleton) => ( - Doc.concat([print_ident(location.txt)]), - false, - ) - | PPatConstruct(location, PPatConstrRecord(patternlocs, closedflag)) => ( - Doc.concat([ - print_ident(location.txt), - print_record_pattern( - ~patternlocs, - ~closedflag, - ~original_source, - ~comments, - ~next_loc, - pat.ppat_loc, - ), - ]), - false, - ) - | PPatOr(pattern1, pattern2) => ( - Doc.group( - Doc.concat([ - Doc.group( - print_pattern(~original_source, ~comments, ~next_loc, pattern1), - ), - Doc.space, - Doc.text("|"), - Doc.line, - Doc.group( - print_pattern(~original_source, ~comments, ~next_loc, pattern2), - ), - ]), - ), - false, - ) - - | PPatAlias(pattern, loc) => ( - Doc.group( - Doc.concat([ - print_pattern(~original_source, ~comments, ~next_loc, pattern), - Doc.space, - Doc.text("as"), - Doc.space, - Doc.text(loc.txt), - ]), - ), - false, - ) - }; - - let (pattern, parens) = printed_pattern; - - let with_leading = [pattern]; - - let after_parens_comments = - Comment_utils.get_comments_to_end_of_line( - ~location=pat.ppat_loc, - comments, - ); - let after_parens_comments_docs = - Comment_utils.inbetween_comments_to_docs( - ~offset=true, - after_parens_comments, - ); - - let with_trailing = - if (after_parens_comments_docs == Doc.nil) { - with_leading; - } else { - List.append(with_leading, [after_parens_comments_docs]); - }; - - let clean_pattern = - switch (with_trailing) { - | [fst] => fst - | _ => Doc.concat(with_trailing) - }; - - if (parens) { - Doc.concat([ - Doc.lparen, - Doc.indent(Doc.concat([Doc.softLine, clean_pattern])), - Doc.ifBreaks(Doc.comma, Doc.nil), - Doc.softLine, - Doc.rparen, - ]); - } else { - clean_pattern; - }; -} - -and print_constant = - ( - ~original_source: array(string), - ~loc: Location.t, - c: Parsetree.constant, - ) => { - // we get the original code here to ensure it's well formatted and retains the - // approach of the original code, e.g. char format, number format - Doc.text( - get_original_code(loc, original_source), - ); -} - -and print_ident = (ident: Identifier.t) => { - switch (ident) { - | IdentName({txt: name}) => - if (infixop(name) || prefixop(name)) { - Doc.concat([Doc.lparen, Doc.text(name), Doc.rparen]); - } else { - Doc.text(name); - } - | IdentExternal(externalIdent, {txt: second}) => - Doc.concat([ - print_ident(externalIdent), - Doc.text("."), - Doc.text(second), - ]) - }; -} - -and print_record = - ( - ~base: option(Parsetree.expression), - ~fields: list((Location.loc(Identifier.t), Parsetree.expression)), - ~original_source: array(string), - ~comments: list(Parsetree.comment), - recloc: Location.t, - ) => { - let get_loc = item => { - switch (item) { - | Field((_, expr)) => expr.pexp_loc - | RecordSpread(base) => base.pexp_loc - }; - }; - - let print_item = (~comments, item) => { - switch (item) { - | Field(field) => - let (locidentifier, expr) = field; - let ident = locidentifier.txt; - let printed_ident = print_ident(ident); - let printed_expr = - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expr, - ); - let punned_expr = check_for_pun(expr); - - let pun = - switch (printed_ident, punned_expr: Doc.t) { - | (Text(i), Text(e)) => i == e - | _ => false - }; - - if (!pun) { - Doc.group( - Doc.concat([ - printed_ident, - Doc.text(":"), - Doc.space, - printed_expr, - ]), - ); - } else { - Doc.group(printed_ident); - }; - | RecordSpread(base) => - Doc.concat([ - Doc.text("..."), - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - base, - ), - ]) - }; - }; - - let items = - Option.to_list(Option.map(x => RecordSpread(x), base)) - @ List.map(x => Field(x), fields); - - let after_brace_comments = - switch (items) { - | [item, ..._] => - let loc = - switch (item) { - | Field((ident, _)) => ident.loc - | RecordSpread(exp) => exp.pexp_loc - }; - - Comment_utils.get_after_brace_comments( - ~loc=recloc, - ~first=loc, - comments, - ); - - | _ => Comment_utils.get_after_brace_comments(~loc=recloc, comments) // let s = {} is not legal syntax, but we can use all the comments - }; - - let cleaned_comments = - remove_used_comments(~remove_comments=after_brace_comments, comments); - - let items = - item_iterator( - ~get_loc, - ~print_item, - ~comments=cleaned_comments, - ~iterated_item=IteratedRecord, - items, - ); - let printed_fields = Doc.join(~sep=Doc.line, items); - - let printed_fields_after_brace = - Doc.concat([ - force_break_if_line_comment(~separator=Doc.line, after_brace_comments), - printed_fields, - ]); - - Doc.concat([ - Doc.lbrace, - Comment_utils.single_line_of_comments(after_brace_comments), - Doc.indent( - Doc.concat([ - printed_fields_after_brace, - Doc.ifBreaks( - Doc.nil, - switch (items) { - | [_one] => - // TODO: not needed once we annotate with :: - Doc.comma // append a comma as single argument record look like block {data:val} - | _ => Doc.nil - }, - ), - ]), - ), - Doc.line, - Doc.rbrace, - ]); -} - -and print_type = - ( - ~original_source: array(string), - ~comments: list(Parsetree.comment), - p: Parsetree.parsed_type, - ) => { - switch (p.ptyp_desc) { - | PTyAny => Doc.text("_") - | PTyVar(name) => Doc.text(name) - | PTyArrow(types, parsed_type) => - Doc.concat([ - Doc.group( - switch (types) { - | [] => Doc.concat([Doc.lparen, Doc.rparen]) - | [{ptyp_arg_label: Unlabeled, ptyp_arg_type: t}] => - print_type(~original_source, ~comments, t) - | _types => - Doc.concat([ - Doc.lparen, - Doc.indent( - Doc.concat([ - Doc.softLine, - Doc.join( - ~sep=Doc.concat([Doc.comma, Doc.line]), - List.map( - ({Parsetree.ptyp_arg_label: label, ptyp_arg_type: t}) => { - let label = - switch (label) { - | Asttypes.Unlabeled => Doc.nil - | Labeled(name) => - Doc.concat([ - Doc.text(name.txt), - Doc.text(":"), - Doc.space, - ]) - | Default(name) => - Doc.concat([ - Doc.question, - Doc.text(name.txt), - Doc.text(":"), - Doc.space, - ]) - }; - Doc.concat([ - label, - print_type(~original_source, ~comments, t), - ]); - }, - types, - ), - ), - ]), - ), - Doc.ifBreaks(Doc.comma, Doc.nil), - Doc.softLine, - Doc.rparen, - ]) - }, - ), - Doc.space, - Doc.text("=>"), - Doc.space, - print_type(~original_source, ~comments, parsed_type), - ]) - - | PTyTuple(parsed_types) => - Doc.concat([ - Doc.lparen, - Doc.indent( - Doc.concat([ - Doc.softLine, - Doc.join( - ~sep=Doc.concat([Doc.comma, Doc.line]), - List.map( - t => print_type(~original_source, ~comments, t), - parsed_types, - ), - ), - ]), - ), - Doc.softLine, - Doc.rparen, - ]) - - | PTyConstr(locidentifier, parsedtypes) => - let ident = locidentifier.txt; - switch (parsedtypes) { - | [] => print_ident(ident) - | [first, ...rem] => - let get_loc = (t: Parsetree.parsed_type) => { - t.ptyp_loc; - }; - let print_item = (~comments, t: Parsetree.parsed_type) => { - print_type(~original_source, ~comments, t); - }; - - let after_angle_comments = - Comment_utils.get_after_brace_comments( - ~loc=get_loc(first), - comments, - ); - let cleaned_comments = - remove_used_comments(~remove_comments=after_angle_comments, comments); - - let type_items = - item_iterator( - ~get_loc, - ~print_item, - ~comments=cleaned_comments, - ~iterated_item=IteratedTypeConstructor, - parsedtypes, - ); - let printed_types = Doc.join(~sep=Doc.line, type_items); - let printed_types_after_angle = - Doc.concat([ - force_break_if_line_comment( - ~separator=Doc.softLine, - after_angle_comments, - ), - printed_types, - ]); - - Doc.group( - Doc.concat([ - print_ident(ident), - Doc.text("<"), - Doc.indent(Doc.group(printed_types_after_angle)), - Doc.softLine, - Doc.text(">"), - ]), - ); - }; - - | PTyPoly(locationstrings, parsed_type) => - let original_code = get_original_code(p.ptyp_loc, original_source); - Doc.text(original_code); - }; -} - -and print_application = - ( - ~expression_parent: expression_parent_type, - ~expressions: list(Parsetree.application_argument), - ~original_source: array(string), - ~comments: list(Parsetree.comment), - func: Parsetree.expression, - ) => { - let function_name = get_function_name(func); - - switch (expressions) { - | [first, second] when infixop(function_name) => - print_infix_application( - ~expression_parent, - ~expressions, - ~original_source, - ~comments, - func, - ) - | _ => - print_other_application( - ~expression_parent, - ~expressions, - ~original_source, - ~comments, - func, - ) - }; -} - -and print_infix_application = - ( - ~expression_parent: expression_parent_type, - ~expressions: list(Parsetree.application_argument), - ~original_source: array(string), - ~comments: list(Parsetree.comment), - func: Parsetree.expression, - ) => { - let function_name = get_function_name(func); - - switch (expressions) { - | [first, second] => - let next_comments = - Comment_utils.get_comments_between_locations( - ~loc1=first.paa_loc, - ~loc2=second.paa_loc, - comments, - ); - - let (_, line, _, _) = Locations.get_raw_pos_info(first.paa_loc.loc_end); - - let line_comments = - Comment_utils.get_comments_on_line(line, next_comments); - - let after_comments = - remove_used_comments(~remove_comments=line_comments, next_comments); - - let after_comments_docs = - Comment_utils.block_trailing_comments_docs(after_comments); - - let line_comment_docs = - Comment_utils.single_line_of_comments(line_comments); - - let left_is_if = - switch (first.paa_expr.pexp_desc) { - | PExpIf(_) => true - | _ => false - }; - - let right_is_if = - switch (second.paa_expr.pexp_desc) { - | PExpIf(_) => true - | _ => false - }; - - let parent_prec = op_precedence(function_name); - let left_is_leaf = - switch (first.paa_expr.pexp_desc) { - | PExpApp(fn, expr) => - let child_name = get_function_name(fn); - let this_prec = op_precedence(child_name); - - this_prec < parent_prec || child_name != function_name; - | _ => true - }; - - let right_is_leaf = - switch (second.paa_expr.pexp_desc) { - | PExpApp(fn, expr) => - let child_name = get_function_name(fn); - let this_prec = op_precedence(child_name); - - this_prec < parent_prec || child_name != function_name; - | _ => true - }; - - let left_grouping_required = - switch (first.paa_expr.pexp_desc) { - | PExpApp(fn1, _) => - op_precedence(get_function_name(fn1)) < parent_prec - | PExpConstant(PConstNumber(PConstNumberRational(_, _))) => - op_precedence("/") < parent_prec - | _ => false - }; - - let right_grouping_required = - // the equality check is needed for the value on the right - // as we process from the left by default when the same prededence - switch (second.paa_expr.pexp_desc) { - | PExpApp(fn1, _) => - op_precedence(get_function_name(fn1)) <= parent_prec - | PExpConstant(PConstNumber(PConstNumberRational(_, _))) => - op_precedence("/") <= parent_prec - | _ => false - }; - - // Put parens around different operators for clarity, except - // math and logic operations where precedence is well-known - let left_is_different_op = - switch (first.paa_expr.pexp_desc) { - | PExpApp(fn1, _) => - let fn = get_function_name(fn1); - if (infixop(fn)) { - (!is_math_op(function_name) && !is_logic_op(function_name)) - && fn != function_name; - } else { - false; - }; - | _ => false - }; - - let left_needs_parens = - left_is_if || left_grouping_required || left_is_different_op; - let right_needs_parens = right_is_if || right_grouping_required; - - let lhs = - if (left_needs_parens) { - let l1 = - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - first.paa_expr, - ); - Doc.concat([ - Doc.lparen, - if (Doc.willBreak(l1)) { - Doc.indent(l1); - } else { - l1; - }, - Doc.rparen, - ]); - } else { - print_expression( - ~expression_parent, - ~original_source, - ~comments, - first.paa_expr, - ); - }; - - let rhs_expr = - if (right_needs_parens) { - Doc.concat([ - Doc.lparen, - Doc.concat([ - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - second.paa_expr, - ), - ]), - Doc.rparen, - ]); - } else { - Doc.concat([ - print_expression( - ~expression_parent, - ~original_source, - ~comments, - second.paa_expr, - ), - ]); - }; - - Doc.concat([ - if (left_is_leaf) { - Doc.group(lhs); - } else { - lhs; - }, - Doc.space, - Doc.text(function_name), - line_comment_docs, - if (after_comments_docs == Doc.nil) { - Doc.nil; - } else { - force_break_if_line_comment(~separator=Doc.nil, line_comments); - }, - after_comments_docs, - switch (expression_parent) { - | InfixExpression => - Doc.concat([ - Doc.line, - if (right_is_leaf) { - Doc.group(rhs_expr); - } else { - rhs_expr; - }, - ]) - | GenericExpression - | AccessExpression => - Doc.indent( - Doc.concat([ - Doc.line, - if (right_is_leaf) { - Doc.group(rhs_expr); - } else { - rhs_expr; - }, - ]), - ) - }, - ]); - - | _ => raise(IllegalParse("Formatter error, wrong number of args ")) - }; -} - -and print_arg_lambda = - (~comments, ~original_source, lambda: Parsetree.application_argument) => { - switch (lambda.paa_expr.pexp_desc) { - | PExpLambda(patterns, expression) => - let comments_in_expression = - Comment_utils.get_comments_inside_location( - ~location=expression.pexp_loc, - comments, - ); - - let raw_args = - print_lambda_arguments( - ~next_loc=expression.pexp_loc, - ~comments, - ~original_source, - ~followed_by_arrow=true, - patterns, - ); - - let label = - switch (lambda.paa_label) { - | Unlabeled => Doc.nil - | Labeled(name) - | Default(name) => Doc.concat([Doc.text(name.txt), Doc.equal]) - }; - - let args = - Doc.concat([ - label, - Doc.group( - switch (patterns) { - | [ - { - pla_label: Labeled(name), - pla_pattern: {ppat_desc: PPatVar(var)}, - }, - ] - when name.txt == var.txt => raw_args - | _patterns => - Doc.concat([ - Doc.lparen, - Doc.indent(Doc.concat([Doc.softLine, raw_args])), - Doc.softLine, - Doc.rparen, - ]) - }, - ), - ]); - - Doc.group( - switch (expression.pexp_desc) { - | PExpBlock(block_expressions) => - let body = - switch (block_expressions) { - | [] => - // Not legal syntax so we shouldn't ever hit it, but we'll handle - // it just in case. - Doc.group(Doc.concat([Doc.lbrace, Doc.rbrace])) - | _ => - let get_loc = (expr: Parsetree.expression) => { - expr.pexp_loc; - }; - let print_item = (~comments, expr: Parsetree.expression) => { - Doc.group( - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expr, - ), - ); - }; - let after_brace_comments = - Comment_utils.get_after_brace_comments( - ~loc=expression.pexp_loc, - comments_in_expression, - ); - let cleaned_comments = - remove_used_comments( - ~remove_comments=after_brace_comments, - comments_in_expression, - ); - - let print_attribute = (expr: Parsetree.expression) => - print_attributes(expr.pexp_attributes); - let printed_expressions = - block_item_iterator( - ~previous=Block(expression.pexp_loc), - ~get_loc, - ~print_item, - ~comments=cleaned_comments, - ~print_attribute, - ~original_source, - block_expressions, - ); - let start_after_brace = - Doc.concat([Doc.hardLine, printed_expressions]); - - Doc.concat([ - Doc.lbrace, - Comment_utils.single_line_of_comments(after_brace_comments), - Doc.indent(start_after_brace), - Doc.hardLine, - Doc.rbrace, - ]); - }; - Doc.concat([args, Doc.space, Doc.text("=>"), Doc.space, body]); - - | _ => - let body = - Doc.group( - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=comments_in_expression, - expression, - ), - ); - if (Doc.willBreak(body)) { - Doc.concat([ - args, - Doc.space, - Doc.text("=>"), - Doc.concat([Doc.space, body]), - ]); - } else { - Doc.concat([ - args, - Doc.space, - Doc.text("=>"), - Doc.indent(Doc.concat([Doc.line, body])), - ]); - }; - }, - ); - - | _ => raise(IllegalParse("Called on a non-lambda")) - }; -} - -and print_arg = - (~original_source, ~comments, arg: Parsetree.application_argument) => { - switch (arg.paa_expr.pexp_desc) { - | PExpLambda(patterns, expression) => - print_arg_lambda(~comments, ~original_source, arg) - | _ => - Doc.group( - print_application_argument( - ~expression_parent=InfixExpression, - ~original_source, - ~comments, - arg, - ), - ) - }; -} - -and print_args_with_comments = - ( - ~comments: list(Parsetree.comment), - ~original_source, - args: list(Parsetree.application_argument), - ) => { - let get_loc = (e: Parsetree.application_argument) => e.paa_loc; - let print_item = (~comments, e: Parsetree.application_argument) => { - Doc.group( - print_application_argument( - ~expression_parent=InfixExpression, - ~original_source, - ~comments, - e, - ), - ); - }; - - let args = - item_iterator( - ~get_loc, - ~print_item, - ~comments, - ~followed_by_arrow=false, - ~iterated_item=IteratedArgs, - args, - ); - - Doc.join(~sep=Doc.line, args); -} - -and print_arguments_with_callback_in_first_position = - (~original_source, ~comments, args: list(Parsetree.application_argument)) => { - switch (args) { - | [] => Doc.nil - | [callback] => - // we handle the special case of just one callback here as we call this if the first arg is a callback - - print_arg_lambda(~comments, ~original_source, callback) - - | [callback, expr] => - let printed_callback = - print_arg_lambda(~comments, ~original_source, callback); - - let printed_arg = print_arg(~comments, ~original_source, expr); - - Doc.ifBreaks( - Doc.concat([printed_callback, Doc.comma, Doc.space, printed_arg]), - Doc.concat([ - Doc.indent( - Doc.concat([ - Doc.softLine, - printed_callback, - Doc.comma, - Doc.line, - printed_arg, - ]), - ), - Doc.softLine, - ]), - ); - | [callback, ...remainder] => - let printed_callback = - print_arg_lambda(~comments, ~original_source, callback); - - let printed_args = - print_args_with_comments(~comments, ~original_source, remainder); - - Doc.concat([ - printed_callback, - Doc.comma, - Doc.group( - Doc.concat([ - Doc.line, - printed_args, - Doc.ifBreaks(Doc.line, Doc.nil), - ]), - ), - ]); - }; -} - -and print_arguments_with_callback_in_last_position = - (~original_source, ~comments, args: list(Parsetree.application_argument)) => - switch (args) { - | [] => Doc.nil - | [expr, callback] => - let printed_callback = - print_arg_lambda(~comments, ~original_source, callback); - let printed_first_arg = print_arg(~comments, ~original_source, expr); - - Doc.concat([printed_first_arg, Doc.comma, Doc.space, printed_callback]); - - | _ => - let last_expression = get_last_item_in_list(args); - let printed_callback = - print_arg_lambda(~comments, ~original_source, last_expression); - - let remainderArr = - Array.sub(Array.of_list(args), 0, List.length(args) - 1); - - let printed_args = - print_args_with_comments( - ~comments, - ~original_source, - Array.to_list(remainderArr), - ); - - Doc.concat([ - Doc.indent( - Doc.concat([ - Doc.softLine, - printed_args, - Doc.comma, - Doc.line, - printed_callback, - ]), - ), - Doc.softLine, - ]); - } - -and print_other_application = - ( - ~expression_parent: expression_parent_type, - ~expressions: list(Parsetree.application_argument), - ~original_source: array(string), - ~comments: list(Parsetree.comment), - func: Parsetree.expression, - ) => { - let function_name = get_function_name(func); - - switch (expressions) { - | [first] when prefixop(function_name) => - switch (first.paa_expr.pexp_desc) { - | PExpApp(fn, _) => - let inner_fn = get_function_name(fn); - if (infixop(inner_fn)) { - Doc.concat([ - Doc.text(function_name), - Doc.lparen, - Doc.group( - print_application_argument( - ~expression_parent, - ~original_source, - ~comments, - first, - ), - ), - Doc.rparen, - ]); - } else { - Doc.concat([ - Doc.text(function_name), - Doc.group( - print_application_argument( - ~expression_parent, - ~original_source, - ~comments, - first, - ), - ), - ]); - }; - - | _ => - Doc.concat([ - Doc.text(function_name), - Doc.group( - print_application_argument( - ~expression_parent, - ~original_source, - ~comments, - first, - ), - ), - ]) - } - - | [first, second] when infixop(function_name) => - print_infix_application( - ~expression_parent, - ~expressions, - ~original_source, - ~comments, - func, - ) - | _ when infixop(function_name) => - raise(IllegalParse("Formatter error, wrong number of args ")) - | [first_expr, ..._] - when Array.exists(fn => function_name == fn, exception_primitives) => - Doc.concat([ - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - func, - ), - Doc.space, - print_application_argument( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - first_expr, - ), - ]) - | [first_expr, ..._] => - // standard function application - // look out for special cases of callbacks in first or last position - - let first_arg_is_callback = - switch (first_expr.paa_expr.pexp_desc) { - | PExpLambda(_) => true - | _ => false - }; - - let last_arg_is_callback = - switch (expressions) { - | [] => false - | _ => - let last_expression = get_last_item_in_list(expressions); - - switch (last_expression.paa_expr.pexp_desc) { - | PExpLambda(_) => true - | _ => false - }; - }; - - if (first_arg_is_callback) { - let printed_args = - print_arguments_with_callback_in_first_position( - ~original_source, - ~comments, - expressions, - ); - Doc.concat([ - print_expression( - ~expression_parent=AccessExpression, - ~original_source, - ~comments, - func, - ), - Doc.lparen, - printed_args, - Doc.rparen, - ]); - } else if (last_arg_is_callback) { - let printed_args = - print_arguments_with_callback_in_last_position( - ~original_source, - ~comments, - expressions, - ); - Doc.concat([ - print_expression( - ~expression_parent=AccessExpression, - ~original_source, - ~comments, - func, - ), - Doc.lparen, - printed_args, - Doc.rparen, - ]); - } else { - let printed_args = - print_args_with_comments(~comments, ~original_source, expressions); - - Doc.group( - Doc.concat([ - print_expression( - ~expression_parent=AccessExpression, - ~original_source, - ~comments, - func, - ), - Doc.lparen, - Doc.indent(Doc.concat([Doc.softLine, printed_args])), - Doc.softLine, - Doc.rparen, - ]), - ); - }; - - | [] => - Doc.group( - Doc.concat([ - print_expression( - ~expression_parent=AccessExpression, - ~original_source, - ~comments, - func, - ), - Doc.lparen, - Doc.softLine, - Doc.rparen, - ]), - ) - }; -} - -and get_function_name = (expr: Parsetree.expression) => { - switch (expr.pexp_desc) { - | PExpConstant(x) => - switch (x) { - | PConstString(str) => str - | _ => "" - } - - | PExpId({txt: id}) => - switch (id) { - | IdentName(name) => name.txt - | _ => "" - } - | _ => "" - }; -} - -and check_for_pun = (expr: Parsetree.expression) => - switch (expr.pexp_desc) { - | PExpId({txt: id}) => print_ident(id) - | _ => Doc.nil - } - -and print_patterns = - ( - ~next_loc: Location.t, - ~comments: list(Parsetree.comment), - ~original_source: array(string), - ~followed_by_arrow: option(bool)=?, - patterns: list(Parsetree.pattern), - ) => { - let get_loc = (p: Parsetree.pattern) => p.ppat_loc; - let print_item = (~comments, p: Parsetree.pattern) => { - print_pattern(~original_source, ~comments, ~next_loc, p); - }; - - let comments_in_scope = - Comment_utils.get_comments_before_location(~location=next_loc, comments); - - switch (patterns) { - | [] => Doc.nil - | _ => - let items = - item_iterator( - ~get_loc, - ~print_item, - ~comments=comments_in_scope, - ~followed_by_arrow?, - ~iterated_item=IteratedPatterns, - patterns, - ); - Doc.join(~sep=Doc.line, items); - }; -} - -and print_lambda_arguments = - ( - ~next_loc: Location.t, - ~comments: list(Parsetree.comment), - ~original_source: array(string), - ~followed_by_arrow: option(bool)=?, - arguments: list(Parsetree.lambda_argument), - ) => { - let get_loc = (l: Parsetree.lambda_argument) => l.pla_loc; - let print_item = - ( - ~comments, - {pla_pattern: pattern, pla_default: default, pla_loc}: Parsetree.lambda_argument, - ) => { - let pattern_doc = - print_pattern(~original_source, ~comments, ~next_loc, pattern); - let default_doc = - switch (default) { - | None => Doc.nil - | Some(expr) => - Doc.concat([ - Doc.equal, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expr, - ), - ]) - }; - - Doc.concat([pattern_doc, default_doc]); - }; - - let comments_in_scope = - Comment_utils.get_comments_before_location(~location=next_loc, comments); - - switch (arguments) { - | [] => Doc.nil - | _ => - let items = - item_iterator( - ~get_loc, - ~print_item, - ~comments=comments_in_scope, - ~followed_by_arrow?, - ~iterated_item=IteratedPatterns, - arguments, - ); - Doc.join(~sep=Doc.line, items); - }; -} - -and print_application_argument = - ( - ~comments: list(Parsetree.comment), - ~expression_parent: expression_parent_type, - ~original_source: array(string), - argument: Parsetree.application_argument, - ) => { - let expr_doc = - print_expression( - ~expression_parent, - ~original_source, - ~comments, - argument.paa_expr, - ); - switch (argument.paa_label, argument.paa_expr.pexp_desc) { - | (Asttypes.Unlabeled, _) => expr_doc - | (Labeled(name) | Default(name), _) => - Doc.concat([Doc.text(name.txt), Doc.equal, expr_doc]) - }; -} - -and print_application_arguments = - ( - ~next_loc: Location.t, - ~comments: list(Parsetree.comment), - ~expression_parent: expression_parent_type, - ~original_source: array(string), - ~followed_by_arrow: option(bool)=?, - arguments: list(Parsetree.application_argument), - ) => { - let get_loc = (l: Parsetree.application_argument) => l.paa_loc; - let print_item = (~comments, argument: Parsetree.application_argument) => { - print_application_argument( - ~comments, - ~expression_parent, - ~original_source, - argument, - ); - }; - - let comments_in_scope = - Comment_utils.get_comments_before_location(~location=next_loc, comments); - - switch (arguments) { - | [] => Doc.nil - | _ => - let items = - item_iterator( - ~get_loc, - ~print_item, - ~comments=comments_in_scope, - ~followed_by_arrow?, - ~iterated_item=IteratedPatterns, - arguments, - ); - Doc.join(~sep=Doc.line, items); - }; -} - -and paren_wrap_patterns = - ( - ~wrapper: Location.t, - ~next_loc: Location.t, - ~comments: list(Parsetree.comment), - ~original_source: array(string), - ~followed_by_arrow: bool, - patterns: list(Parsetree.lambda_argument), - ) => { - let args = - print_lambda_arguments( - ~next_loc, - ~comments, - ~original_source, - ~followed_by_arrow, - patterns, - ); - - switch (patterns) { - | [] => Doc.concat([Doc.lparen, args, Doc.rparen]) - | [{pla_label: Labeled(name), pla_pattern: {ppat_desc: PPatVar(var)}}] - when name.txt == var.txt => args - | _patterns => - let trail_sep = Doc.ifBreaks(Doc.comma, Doc.nil); - - Doc.group( - Doc.indent( - Doc.concat([ - Doc.softLine, - Doc.lparen, - Doc.indent(Doc.concat([Doc.softLine, args, trail_sep])), - Doc.softLine, - Doc.rparen, - ]), - ), - ); - }; -} -and print_expression_inner = - ( - ~expression_parent: expression_parent_type, - ~original_source: array(string), - ~comments: list(Parsetree.comment), - expr: Parsetree.expression, - ) => { - let expression_doc = - switch (expr.pexp_desc) { - | PExpConstant(x) => - print_constant(~original_source, ~loc=expr.pexp_loc, x) - | PExpId({txt: id}) => print_ident(id) - | PExpLet(rec_flag, mut_flag, vbs) => - print_value_bind( - ~provide_flag=Asttypes.NotProvided, - ~rec_flag, - ~mut_flag, - ~original_source, - ~comments, - vbs, - ) - | PExpTuple(expressions) => - let get_loc = (e: Parsetree.expression) => { - e.pexp_loc; - }; - let print_item = (~comments, e: Parsetree.expression) => { - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - e, - ); - }; - - let after_paren_comments = - Comment_utils.get_after_brace_comments(~loc=expr.pexp_loc, comments); - let cleaned_comments = - remove_used_comments(~remove_comments=after_paren_comments, comments); - let expr_items = - item_iterator( - ~get_loc, - ~print_item, - ~comments=cleaned_comments, - ~iterated_item=IteratedTupleExpression, - expressions, - ); - - let printed_expr_items = Doc.join(~sep=Doc.line, expr_items); - let printed_expr_items_after_paren = - Doc.concat([Doc.softLine, printed_expr_items]); - Doc.group( - Doc.concat([ - Doc.lparen, - Comment_utils.single_line_of_comments(after_paren_comments), - Doc.indent(printed_expr_items_after_paren), - Doc.ifBreaks( - Doc.nil, - switch (expressions) { - | [_one] => Doc.comma - | _ => Doc.nil - }, - ), - Doc.softLine, - Doc.rparen, - ]), - ); - - | PExpArray(expressions) => - let get_loc = (e: Parsetree.expression) => { - e.pexp_loc; - }; - let print_item = (~comments, e: Parsetree.expression) => { - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - e, - ); - }; - - let after_bracket_comments = - Comment_utils.get_after_brace_comments(~loc=expr.pexp_loc, comments); - let cleaned_comments = - remove_used_comments( - ~remove_comments=after_bracket_comments, - comments, - ); - let items = - item_iterator( - ~get_loc, - ~print_item, - ~comments=cleaned_comments, - ~iterated_item=IteratedArrayExpression, - expressions, - ); - Doc.group( - switch (expressions) { - | [] => Doc.text("[>]") - | _ => - Doc.concat([ - Doc.lbracket, - Doc.text("> "), - Comment_utils.single_line_of_comments(after_bracket_comments), - Doc.indent( - Doc.concat([ - force_break_if_line_comment( - ~separator=Doc.softLine, - after_bracket_comments, - ), - Doc.join(~sep=Doc.line, items), - ]), - ), - Doc.softLine, - Doc.rbracket, - ]) - }, - ); - | PExpArrayGet(expression1, expression2) => - Doc.concat([ - print_expression( - ~expression_parent=AccessExpression, - ~original_source, - ~comments, - expression1, - ), - Doc.lbracket, - Doc.group( - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expression2, - ), - ), - Doc.rbracket, - ]) - | PExpArraySet(expression1, expression2, expression3) => - Doc.group( - Doc.concat([ - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expression1, - ), - Doc.lbracket, - Doc.group( - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expression2, - ), - ), - Doc.rbracket, - Doc.space, - Doc.text("="), - Doc.indent( - Doc.concat([ - Doc.space, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expression3, - ), - ]), - ), - ]), - ) - - | PExpRecord(base, record) => - print_record( - ~base, - ~fields=record, - ~original_source, - ~comments, - expr.pexp_loc, - ) - | PExpRecordGet(expression, {txt, _}) => - Doc.concat([ - print_expression( - ~expression_parent=AccessExpression, - ~original_source, - ~comments, - expression, - ), - Doc.dot, - print_ident(txt), - ]) - | PExpRecordSet(expression, {txt, _}, expression2) => - let left = - Doc.concat([ - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expression, - ), - Doc.dot, - print_ident(txt), - ]); - print_assignment(~original_source, ~comments, left, expression2); - | PExpMatch(expression, match_branches) => - let arg = - Doc.concat([ - Doc.lparen, - Doc.group( - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expression, - ), - ), - Doc.rparen, - ]); - - let get_loc = (branch: Parsetree.match_branch) => { - branch.pmb_loc; - }; - - let print_item = (~comments, branch: Parsetree.match_branch) => { - let branch_comments = - Comment_utils.get_comments_inside_location( - ~location=branch.pmb_loc, - comments, - ); - - let branch_pattern_comments = - Comment_utils.get_comments_inside_location( - ~location=branch.pmb_pat.ppat_loc, - comments, - ); - - Doc.group( - Doc.concat([ - Doc.group( - print_pattern( - ~original_source, - ~comments=branch_pattern_comments, - ~next_loc= - switch (branch.pmb_guard) { - | None => branch.pmb_body.pexp_loc - | Some(b) => b.pexp_loc - }, - branch.pmb_pat, - ), - ), - switch (branch.pmb_guard) { - | None => - Doc.concat([ - Doc.space, - Doc.text("=>"), - switch (branch.pmb_body.pexp_desc) { - | PExpBlock(expressions) => - Doc.concat([ - Doc.space, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=branch_comments, - branch.pmb_body, - ), - ]) - | _ => - Doc.indent( - Doc.concat([ - Doc.line, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=branch_comments, - branch.pmb_body, - ), - ]), - ) - }, - ]) - | Some(guard) => - let branch_guard_comments = - Comment_utils.get_comments_inside_location( - ~location=guard.pexp_loc, - comments, - ); - let guard_doc = - Doc.group( - print_expression( - ~expression_parent=InfixExpression, - ~original_source, - ~comments=branch_guard_comments, - guard, - ), - ); - Doc.concat([ - Doc.space, - Doc.text("when"), - Doc.space, - Doc.group( - Doc.concat([ - Doc.ifBreaks(Doc.lparen, Doc.nil), - Doc.indent(Doc.concat([Doc.softLine, guard_doc])), - Doc.softLine, - Doc.ifBreaks(Doc.rparen, Doc.nil), - ]), - ), - Doc.space, - Doc.text("=>"), - switch (branch.pmb_body.pexp_desc) { - | PExpBlock(_) - | PExpIf(_) => - Doc.concat([ - Doc.space, - Doc.group( - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=branch_comments, - branch.pmb_body, - ), - ), - ]) - - | _ => - Doc.indent( - Doc.concat([ - Doc.line, - Doc.group( - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=branch_comments, - branch.pmb_body, - ), - ), - ]), - ) - }, - ]); - }, - ]), - ); - }; - - let after_brace_comments = - Comment_utils.get_after_brace_comments(~loc=expr.pexp_loc, comments); - let cleaned_comments = - remove_used_comments(~remove_comments=after_brace_comments, comments); - - let items = - item_iterator( - ~get_loc, - ~print_item, - ~comments=cleaned_comments, - ~iterated_item=IteratedMatchItem, - match_branches, - ); - let printed_branches = Doc.join(~sep=Doc.hardLine, items); - - let printed_branches_after_brace = - Doc.concat([ - force_break_if_line_comment( - ~separator=Doc.line, - after_brace_comments, - ), - printed_branches, - ]); - - Doc.group( - Doc.concat([ - Doc.text("match"), - Doc.space, - arg, - Doc.space, - Doc.lbrace, - Comment_utils.single_line_of_comments(after_brace_comments), - Doc.indent(printed_branches_after_brace), - Doc.line, - Doc.rbrace, - ]), - ); - - | PExpPrim0(prim0) => - let original_code = get_original_code(expr.pexp_loc, original_source); - Doc.text(original_code); - | PExpPrim1(prim1, expression) => - let original_code = get_original_code(expr.pexp_loc, original_source); - Doc.text(original_code); - | PExpPrim2(prim2, expression, expression1) => - let original_code = get_original_code(expr.pexp_loc, original_source); - Doc.text(original_code); - | PExpPrimN(primn, expressions) => - let original_code = get_original_code(expr.pexp_loc, original_source); - Doc.text(original_code); - | PExpIf(condition, true_expr, false_expr) => - let cond_leading_comment = - Comment_utils.get_comments_from_start_of_enclosing_location( - ~enclosing_location=expr.pexp_loc, - ~location=condition.pexp_loc, - comments, - ); - - let cond_trailing_comment = - Comment_utils.get_comments_between_locs( - ~begin_loc=condition.pexp_loc, - ~end_loc=true_expr.pexp_loc, - comments, - ); - - let last_comment_different_line = - switch (cond_trailing_comment) { - | [] => false - | [first, ...rest] => - let (_, first_comment_line, _, _) = - Locations.get_raw_pos_info( - Locations.get_comment_loc(first).loc_start, - ); - - let (_, condition_line, _, _) = - Locations.get_raw_pos_info(condition.pexp_loc.loc_end); - - first_comment_line > condition_line; - }; - - let same_line_comments = - last_comment_different_line ? [] : cond_trailing_comment; - let later_line_comments = - last_comment_different_line ? cond_trailing_comment : []; - - let print_later_comments = (~default, later_line_comments) => - switch (later_line_comments) { - | [] => default - | cmts => - Doc.concat([ - Doc.line, - Comment_utils.new_comments_to_docs(later_line_comments), - ]) - }; - - let true_trailing_comment = - switch (false_expr) { - | None => - Comment_utils.get_comments_after_location( - ~location=true_expr.pexp_loc, - comments, - ) - | Some(false_expr) => - Comment_utils.get_comments_between_locs( - ~begin_loc=true_expr.pexp_loc, - ~end_loc=false_expr.pexp_loc, - comments, - ) - }; - - let true_is_block = - switch (true_expr.pexp_desc) { - | PExpBlock(_) => true - | _ => false - }; - - let true_is_if = - switch (true_expr.pexp_desc) { - | PExpIf(_) => true - | _ => false - }; - - let false_is_block = - switch (false_expr) { - | Some({pexp_desc: PExpBlock(_)}) => true - | _ => false - }; - - let false_is_if = - switch (false_expr) { - | Some({pexp_desc: PExpBlock(expressions)}) => - switch (expressions) { - | [] => false - | [hd, ...tail] => - switch (hd.pexp_desc) { - | PExpIf(_) => true - | _ => false - } - } - | _ => false - }; - - let commentsInCondition = - Comment_utils.get_comments_inside_location( - ~location=condition.pexp_loc, - comments, - ); - - let comments_in_true_statement = - Comment_utils.get_comments_inside_location( - ~location=true_expr.pexp_loc, - comments, - ); - - let true_made_block = ref(false); - let false_made_block = ref(false); - - let true_clause = - switch (true_expr.pexp_desc) { - | PExpBlock(expressions) => - Doc.concat([ - Doc.space, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=comments_in_true_statement, - true_expr, - ), - ]) - - | _ => - if (false_is_block) { - true_made_block := true; - Doc.concat([ - Doc.space, - Doc.lbrace, - // no comment to add here as this was a single line expression - Doc.indent( - Doc.concat([ - Doc.hardLine, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=comments_in_true_statement, - true_expr, - ), - ]), - ), - Doc.hardLine, - Doc.rbrace, - ]); - } else if (true_is_if) { - Doc.concat([ - print_later_comments(~default=Doc.space, later_line_comments), - Doc.lparen, - Doc.indent( - Doc.concat([ - Doc.softLine, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=comments_in_true_statement, - true_expr, - ), - ]), - ), - Doc.softLine, - Doc.rparen, - ]); - } else { - Doc.indent( - Doc.concat([ - print_later_comments(~default=Doc.line, later_line_comments), - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=comments_in_true_statement, - true_expr, - ), - ]), - ); - } - }; - - let comments_in_false_statement = - switch (false_expr) { - | None => [] - | Some({pexp_loc}) => - Comment_utils.get_comments_inside_location( - ~location=pexp_loc, - comments, - ) - }; - - let false_clause = - switch (false_expr) { - | Some({pexp_desc: PExpBlock(expressions)} as false_expr) => - switch (expressions) { - | [] => Doc.nil - | _ => - Doc.concat([ - Doc.space, - Doc.text("else"), - Doc.space, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=comments_in_false_statement, - false_expr, - ), - ]) - } - | Some( - {pexp_desc: PExpIf(_condition, _true_expr, _false_expr)} as false_expr, - ) => - Doc.concat([ - Doc.space, - Doc.text("else"), - if (false_is_if) { - Doc.concat([ - Doc.space, - Doc.lparen, - Doc.indent( - Doc.concat([ - Doc.softLine, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=comments_in_false_statement, - false_expr, - ), - ]), - ), - Doc.softLine, - Doc.rparen, - ]); - } else { - Doc.concat([ - Doc.space, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=comments_in_false_statement, - false_expr, - ), - ]); - }, - ]) - | Some(false_expr) => - Doc.concat([ - if (true_is_block) { - false_made_block := true; - Doc.concat([ - Doc.space, - Doc.text("else"), - Doc.space, - Doc.lbrace, - // no comments to add here as original was single line - Doc.indent( - Doc.concat([ - Doc.hardLine, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=comments_in_false_statement, - false_expr, - ), - ]), - ), - Doc.hardLine, - Doc.rbrace, - ]); - } else { - Doc.concat([ - Doc.line, - Doc.text("else"), - Doc.space, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=comments_in_false_statement, - false_expr, - ), - ]); - }, - ]) - | None => Doc.nil - }; - - let inner = - Doc.concat([ - Doc.softLine, - Comment_utils.inbetween_comments_to_docs( - ~offset=false, - cond_leading_comment, - ), - switch (cond_leading_comment) { - | [] => Doc.nil - | _ => Doc.ifBreaks(Doc.nil, Doc.space) - }, - Doc.group( - print_expression( - ~expression_parent=InfixExpression, - ~original_source, - ~comments=commentsInCondition, - condition, - ), - ), - switch (same_line_comments) { - | [] => Doc.nil - | _ => - Doc.concat([ - Doc.concat( - List.mapi( - (index, c) => - Doc.concat([ - Doc.space, - Comment_utils.comment_to_doc(c), - switch (c) { - | Line(_) => Doc.breakParent - | _ => Doc.nil - }, - ]), - same_line_comments, - ), - ), - ]) - }, - ]); - - Doc.concat([ - Doc.group( - Doc.concat([ - Doc.text("if"), - Doc.space, - Doc.group( - Doc.concat([ - Doc.lparen, - Doc.indent(inner), - Doc.softLine, - Doc.rparen, - ]), - ), - ]), - ), - Doc.group(true_clause), - Comment_utils.inbetween_comments_to_docs( - ~offset=true, - true_trailing_comment, - ), - Doc.group(false_clause), - ]); - | PExpWhile(expression, expression1) => - let comments_in_expression = - Comment_utils.get_comments_inside_location( - ~location=expression.pexp_loc, - comments, - ); - let comments_in_expression_1 = - Comment_utils.get_comments_inside_location( - ~location=expression1.pexp_loc, - comments, - ); - Doc.concat([ - Doc.text("while"), - Doc.space, - Doc.group( - Doc.concat([ - Doc.lparen, - Doc.indent( - Doc.concat([ - Doc.softLine, - print_expression( - ~expression_parent=InfixExpression, - ~original_source, - ~comments=comments_in_expression, - expression, - ), - ]), - ), - Doc.softLine, - Doc.rparen, - ]), - ), - Doc.space, - Doc.group( - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=comments_in_expression_1, - expression1, - ), - ), - ]); - - | PExpFor(optexpression1, optexpression2, optexpression3, expression4) => - let comments_in_expression4 = - Comment_utils.get_comments_inside_location( - ~location=expression4.pexp_loc, - comments, - ); - let comments_before_loop_expression = - Comment_utils.get_comments_enclosed_and_before_location( - ~loc1=expr.pexp_loc, - ~loc2=expression4.pexp_loc, - comments, - ); - Doc.concat([ - Doc.group( - Doc.concat([ - Doc.text("for"), - Doc.space, - Doc.lparen, - Doc.indent( - Doc.concat([ - Doc.softLine, - switch (optexpression1) { - | Some(expr) => - Doc.group( - print_expression( - ~expression_parent=InfixExpression, - ~original_source, - ~comments=comments_before_loop_expression, - expr, - ), - ) - | None => Doc.nil - }, - Doc.text(";"), - switch (optexpression2, optexpression3) { - | (None, None) => Doc.nil - | (None, Some(_)) => Doc.space - | (Some(expr), _) => - Doc.concat([ - Doc.line, - Doc.group( - print_expression( - ~expression_parent=InfixExpression, - ~original_source, - ~comments=comments_before_loop_expression, - expr, - ), - ), - ]) - }, - Doc.text(";"), - switch (optexpression3) { - | Some(expr) => - Doc.concat([ - switch (expr.pexp_desc) { - | PExpBlock(_) => Doc.space - | _ => Doc.line - }, - Doc.group( - print_expression( - ~expression_parent=InfixExpression, - ~original_source, - ~comments=comments_before_loop_expression, - expr, - ), - ), - ]) - | None => Doc.nil - }, - ]), - ), - Doc.softLine, - Doc.rparen, - ]), - ), - Doc.space, - Doc.group( - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=comments_in_expression4, - expression4, - ), - ), - ]); - | PExpContinue => Doc.text("continue") - | PExpBreak => Doc.text("break") - | PExpReturn(expr) => - Doc.concat([ - Doc.text("return"), - switch (expr) { - | Some(expr) => - Doc.concat([ - Doc.space, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expr, - ), - ]) - | None => Doc.nil - }, - ]) - | PExpConstraint(expression, parsed_type) => - let comments_in_expression = - Comment_utils.get_comments_inside_location( - ~location=expression.pexp_loc, - comments, - ); - - Doc.group( - Doc.concat([ - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=comments_in_expression, - expression, - ), - Doc.text(":"), - Doc.space, - print_type( - ~original_source, - ~comments= - Comment_utils.get_comments_inside_location( - ~location=parsed_type.ptyp_loc, - comments, - ), - parsed_type, - ), - ]), - ); - | PExpLambda(patterns, expression) => - let comments_in_expression = - Comment_utils.get_comments_inside_location( - ~location=expression.pexp_loc, - comments, - ); - - let patterns_comments = - Comment_utils.get_comments_enclosed_and_before_location( - ~loc1=expr.pexp_loc, - ~loc2=expression.pexp_loc, - comments, - ); - - let args = - paren_wrap_patterns( - ~wrapper=expr.pexp_loc, - ~next_loc=expression.pexp_loc, - ~comments=patterns_comments, - ~original_source, - ~followed_by_arrow=true, - patterns, - ); - - switch (expression.pexp_desc) { - | PExpBlock(_) - | PExpLambda(_) => - Doc.concat([ - Doc.group( - Doc.concat([args, Doc.space, Doc.text("=>"), Doc.space]), - ), - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=comments_in_expression, - expression, - ), - ]) - | PExpIf(_) => - let out = - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=comments_in_expression, - expression, - ); - - Doc.concat([ - Doc.group( - Doc.concat([ - args, - Doc.space, - Doc.text("=>"), - Doc.ifBreaks(Doc.space, Doc.line), - ]), - ), - out, - ]); - | _ => - Doc.concat([ - args, - Doc.space, - Doc.text("=>"), - Doc.indent( - Doc.concat([ - Doc.line, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=comments_in_expression, - expression, - ), - ]), - ), - ]) - }; - - | PExpApp(func, expressions) => - let comments_in_expression = - Comment_utils.get_comments_inside_location( - ~location=expr.pexp_loc, - comments, - ); - print_application( - ~expression_parent, - ~expressions, - ~original_source, - ~comments=comments_in_expression, - func, - ); - | PExpConstruct( - {txt: IdentName({txt: "[...]"})}, - PExpConstrTuple(expressions), - ) => - resugar_list(~original_source, ~comments, expressions) - | PExpConstruct( - {txt: IdentName({txt: "[]"})}, - PExpConstrTuple(expressions), - ) => - Doc.text("[]") - | PExpConstruct({txt: id}, PExpConstrSingleton) => print_ident(id) - | PExpConstruct(constr, PExpConstrTuple(expressions)) => - let comments_in_expression = - Comment_utils.get_comments_inside_location( - ~location=expr.pexp_loc, - comments, - ); - // Treat constructors as function calls - let expressions = - List.map( - expr => - { - Parsetree.paa_label: Unlabeled, - paa_expr: expr, - paa_loc: expr.pexp_loc, - }, - expressions, - ); - print_application( - ~expression_parent, - ~expressions, - ~original_source, - ~comments=comments_in_expression, - Ast_helper.Expression.ident(~loc=constr.loc, constr), - ); - | PExpConstruct(id, PExpConstrRecord(record)) => - Doc.concat([ - print_ident(id.txt), - print_record( - ~base=None, - ~fields=record, - ~original_source, - ~comments, - expr.pexp_loc, - ), - ]) - | PExpBlock(expressions) => - switch (expressions) { - | [] => - // Not legal syntax so we shouldn't ever hit it, but we'll handle - // it just in case. - Doc.breakableGroup( - ~forceBreak=true, - Doc.concat([Doc.lbrace, Doc.indent(Doc.line), Doc.rbrace]), - ) - | _ => - let get_loc = (expr: Parsetree.expression) => { - expr.pexp_loc; - }; - - let print_item = (~comments, expr: Parsetree.expression) => { - Doc.group( - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expr, - ), - ); - }; - - let after_brace_comments = - Comment_utils.get_after_brace_comments( - ~loc=expr.pexp_loc, - comments, - ); - - let cleaned_comments = - remove_used_comments( - ~remove_comments=after_brace_comments, - comments, - ); - - let print_attribute = (expr: Parsetree.expression) => - print_attributes(expr.pexp_attributes); - - let printed_expressions = - block_item_iterator( - ~previous=Block(expr.pexp_loc), - ~get_loc, - ~print_item, - ~comments=cleaned_comments, - ~print_attribute, - ~original_source, - expressions, - ); - - let start_after_brace = - Doc.concat([Doc.hardLine, printed_expressions]); - - Doc.breakableGroup( - ~forceBreak=true, - Doc.concat([ - Doc.lbrace, - Comment_utils.single_line_of_comments(after_brace_comments), - Doc.indent(start_after_brace), - Doc.hardLine, - Doc.rbrace, - ]), - ); - } - - | PExpBoxAssign(expression, expression1) => - Doc.concat([ - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expression, - ), - Doc.space, - Doc.text(":="), - Doc.space, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expression1, - ), - ]) - | PExpAssign(expression, expression1) => - let left = - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expression, - ); - print_assignment(~original_source, ~comments, left, expression1); - | PExpUse(module_, items) => - let use = - switch (items) { - | PUseAll => Doc.text("*") - | PUseItems(items) => - switch (items) { - | [] => Doc.concat([Doc.lbrace, Doc.rbrace]) - | _ => - let get_loc = (item: Parsetree.use_item) => { - switch (item) { - | PUseValue({loc}) - | PUseModule({loc}) - | PUseType({loc}) - | PUseException({loc}) => loc - }; - }; - - let comments_in_expression = - Comment_utils.get_comments_inside_location( - ~location=expr.pexp_loc, - comments, - ); - - let after_brace_comments = - Comment_utils.get_after_brace_comments( - ~loc=expr.pexp_loc, - comments_in_expression, - ); - let cleaned_comments = - remove_used_comments( - ~remove_comments=after_brace_comments, - comments_in_expression, - ); - - let print_item = (~comments, item: Parsetree.use_item) => { - let item_name = (name, alias) => { - Location.( - switch (alias) { - | None => print_ident(name.txt) - | Some(alias) => - Doc.concat([ - print_ident(name.txt), - Doc.space, - Doc.text("as"), - Doc.space, - print_ident(alias.txt), - ]) - } - ); - }; - - switch (item) { - | PUseValue({name, alias}) => item_name(name, alias) - | PUseModule({name, alias}) => - Doc.concat([Doc.text("module "), item_name(name, alias)]) - | PUseType({name, alias}) => - Doc.concat([Doc.text("type "), item_name(name, alias)]) - | PUseException({name, alias}) => - Doc.concat([Doc.text("exception "), item_name(name, alias)]) - }; - }; - - let printed_items = - block_item_iterator_line( - ~previous=Block(expr.pexp_loc), - ~get_loc, - ~print_item, - ~comments=cleaned_comments, - ~separator=Doc.comma, - ~original_source, - items, - ); - - Doc.concat([ - Doc.lbrace, - Comment_utils.single_line_of_comments(after_brace_comments), - Doc.indent( - Doc.concat([ - force_break_if_line_comment( - ~separator=Doc.line, - after_brace_comments, - ), - printed_items, - ]), - ), - Doc.line, - Doc.rbrace, - ]); - } - }; - Doc.concat([ - Doc.text("from"), - Doc.space, - print_ident(module_.txt), - Doc.space, - Doc.text("use"), - Doc.space, - use, - ]); - }; - - expression_doc; -} - -and is_grouped_access_expression = (expr: Parsetree.expression) => { - switch (expr.pexp_desc) { - | PExpConstant(_) - | PExpConstruct(_) - | PExpTuple(_) - | PExpId(_) - | PExpArrayGet(_) - | PExpArraySet(_) - | PExpRecordGet(_) - | PExpRecordSet(_) - | PExpRecord(_) - | PExpBlock(_) - | PExpArray(_) => false - | PExpApp(func, _) => - let func_name = get_function_name(func); - infixop(func_name); - | _ => true - }; -} - -and print_expression = - ( - ~expression_parent: expression_parent_type, - ~original_source: array(string), - ~comments: list(Parsetree.comment), - expr: Parsetree.expression, - ) => { - let printed_expr = - print_expression_inner( - ~expression_parent, - ~original_source, - ~comments, - expr, - ); - switch (expression_parent) { - | AccessExpression => - if (is_grouped_access_expression(expr)) { - Doc.concat([Doc.lparen, printed_expr, Doc.rparen]); - } else { - printed_expr; - } - - | GenericExpression - | InfixExpression => printed_expr - }; -} -and print_assignment = (~original_source, ~comments, left, value) => { - switch (value.pexp_desc) { - | PExpApp(func, expressions) => - let function_name = get_function_name(func); - - let trimmed_operator = String.trim(function_name); - - let left_matches_first = - switch (expressions) { - | [expr, ...remainder] => - print_application_argument( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expr, - ) - == left - | _ => false - }; - - if (left_matches_first) { - // +=, -=, *=, /=, and %= - switch (trimmed_operator) { - | "+" - | "-" - | "*" - | "/" - | "%" => - let sugared_op = Doc.text(" " ++ trimmed_operator ++ "= "); - Doc.concat([ - left, - sugared_op, - switch (expressions) { - | [] => - raise(IllegalParse("Sugared op needs at least one expression")) - | [expression] => - let expr = - print_application_argument( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expression, - ); - switch (expression.paa_expr.pexp_desc) { - | PExpIf(_) => - Doc.indent( - print_application_argument( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expression, - ), - ) - | _ => expr - }; - | [expression1, expression2, ...rest] => - let expr = - print_application_argument( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expression2, - ); - switch (expression2.paa_expr.pexp_desc) { - | PExpIf(_) => - Doc.indent( - print_application_argument( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expression2, - ), - ) - | _ => expr - }; - }, - ]); - | _ => - Doc.concat([ - left, - Doc.space, - Doc.equal, - Doc.space, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - value, - ), - ]) - }; - } else { - Doc.concat([ - left, - Doc.space, - Doc.equal, - Doc.space, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - value, - ), - ]); - }; - - | _ => - Doc.concat([ - left, - Doc.space, - Doc.equal, - Doc.space, - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - value, - ), - ]) - }; -} -and print_value_bind = - ( - ~provide_flag: Asttypes.provide_flag, - ~rec_flag: Asttypes.rec_flag, - ~mut_flag: Asttypes.mut_flag, - ~original_source: array(string), - ~comments: list(Parsetree.comment), - vbs: list(Parsetree.value_binding), - ) => { - let exported = - switch (provide_flag) { - | NotProvided => Doc.nil - | Provided => Doc.text("provide ") - | Abstract => Doc.text("abstract ") - }; - let recursive = - switch (rec_flag) { - | Nonrecursive => Doc.nil - | Recursive => Doc.text("rec ") - }; - let mutble = - switch (mut_flag) { - | Immutable => Doc.nil - | Mutable => Doc.text("mut ") - }; - - let value_bindings = - switch (vbs) { - | [] => Doc.nil - | [first, ...rem] => - let leading_comments = - Comment_utils.get_comments_before_location( - ~location=first.pvb_loc, - comments, - ); - let leading_comments_docs = - switch (leading_comments) { - | [] => Doc.nil - | _ => - Doc.group( - Doc.concat([ - Comment_utils.new_comments_to_docs(leading_comments), - Doc.ifBreaks(Doc.nil, Doc.space), - ]), - ) - }; - - let previous_bind: ref(option(Parsetree.value_binding)) = ref(None); - let docs = - Doc.join( - ~sep=Doc.concat([Doc.hardLine]), - List.mapi( - (i, vb: Parsetree.value_binding) => { - let leading_comments = - switch (previous_bind^) { - | None => [] - | Some(prev) => - Comment_utils.get_comments_between_locations( - ~loc1=prev.pvb_loc, - ~loc2=vb.pvb_loc, - comments, - ) - }; - - let leading_comment_docs = - switch (leading_comments) { - | [] => Doc.nil - | _ => - Doc.group( - Doc.concat([ - Comment_utils.new_comments_to_docs(leading_comments), - Doc.ifBreaks(Doc.nil, Doc.space), - ]), - ) - }; - - let after_let_comments = - Comment_utils.get_comments_enclosed_and_before_location( - ~loc1=vb.pvb_loc, - ~loc2=vb.pvb_pat.ppat_loc, - comments, - ); - - let after_let_comments_docs = - switch (after_let_comments) { - | [] => Doc.nil - | _ => - Comment_utils.inbetween_comments_to_docs( - ~offset=false, - after_let_comments, - ) - }; - - let expr_comments = - Comment_utils.get_comments_inside_location( - ~location=vb.pvb_expr.pexp_loc, - comments, - ); - let printed = - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments=expr_comments, - vb.pvb_expr, - ); - - let expression = - switch (vb.pvb_expr.pexp_desc) { - | PExpIf(_) => - if (Doc.willBreak(printed)) { - Doc.concat([Doc.space, printed]); - } else { - Doc.indent(Doc.concat([Doc.line, printed])); - } - | _ => Doc.concat([Doc.space, printed]) - }; - - let pattern_comments = - Comment_utils.get_comments_enclosed_and_before_location( - ~loc1=vb.pvb_loc, - ~loc2=vb.pvb_expr.pexp_loc, - comments, - ); - - previous_bind := Some(vb); - - Doc.concat([ - leading_comment_docs, - if (i != 0) { - Doc.text("and "); - } else { - Doc.nil; - }, - Doc.group( - print_pattern( - ~original_source, - ~comments=pattern_comments, - ~next_loc=vb.pvb_loc, - vb.pvb_pat, - ), - ), - after_let_comments_docs, - switch (after_let_comments) { - | [] => Doc.space - | _ => Doc.nil - }, - Doc.equal, - expression, - ]); - }, - vbs, - ), - ); - - Doc.concat([leading_comments_docs, docs]); - }; - - Doc.group( - Doc.concat([ - exported, - Doc.text("let"), - Doc.space, - recursive, - mutble, - value_bindings, - ]), - ); -}; - -let rec print_data = - ( - ~original_source: array(string), - ~comments: list(Parsetree.comment), - data: Parsetree.data_declaration, - ) => { - let nameloc = data.pdata_name; - switch (data.pdata_kind) { - | PDataAbstract => - let get_loc = (t: Parsetree.parsed_type) => t.ptyp_loc; - let print_item = (~comments, t: Parsetree.parsed_type) => { - print_type(~original_source, ~comments, t); - }; - - let after_angle_cmts = - switch (data.pdata_params) { - | [hd, ...rem] => - Comment_utils.get_comments_before_location( - ~location=get_loc(hd), - comments, - ) - | [] => - Comment_utils.get_comments_to_end_of_line( - ~location=nameloc.loc, - comments, - ) - }; - - switch (data.pdata_params) { - | [] => - let type_comments = - switch (after_angle_cmts) { - | [] => Doc.nil - | cmts => Comment_utils.single_line_of_comments(cmts) - }; - Doc.concat([ - Doc.text("type"), - Doc.space, - if (data.pdata_rec == Recursive) { - Doc.text("rec "); - } else { - Doc.nil; - }, - Doc.text(data.pdata_name.txt), - Doc.group( - Doc.concat([ - switch (data.pdata_manifest) { - | Some(manifest) => - Doc.concat([ - Doc.space, - Doc.equal, - Doc.indent( - Doc.concat([ - Doc.line, - print_type(~original_source, ~comments, manifest), - type_comments, - ]), - ), - ]) - | None => Doc.nil - }, - ]), - ), - ]); - - | [hd, ...rem] => - let after_angle_comments = - switch (after_angle_cmts) { - | [] => Doc.softLine - | cmts => - Doc.concat([ - Doc.space, - Comment_utils.new_comments_to_docs(cmts), - Doc.ifBreaks(Doc.nil, Doc.space), - ]) - }; - let type_comments = - switch (data.pdata_manifest) { - | None => comments - | Some(manifest) => - let tcomments = - Comment_utils.get_comments_between_locs( - ~begin_loc=nameloc.loc, - ~end_loc=manifest.ptyp_loc, - comments, - ); - - remove_used_comments(~remove_comments=after_angle_cmts, tcomments); - }; - - let items = - item_iterator( - ~get_loc, - ~print_item, - ~comments=type_comments, - ~iterated_item=IteratedTypeItems, - data.pdata_params, - ); - let printed_types = Doc.join(~sep=Doc.line, items); - let printed_types_after_angle = printed_types; - let params = [ - Doc.text("<"), - Doc.indent( - Doc.concat([after_angle_comments, printed_types_after_angle]), - ), - Doc.softLine, - Doc.text(">"), - ]; - - Doc.concat([ - Doc.text("type"), - Doc.space, - if (data.pdata_rec == Recursive) { - Doc.text("rec "); - } else { - Doc.nil; - }, - Doc.group(Doc.concat([Doc.text(data.pdata_name.txt), ...params])), - switch (data.pdata_manifest) { - | Some(manifest) => - let manifest_comments = - Comment_utils.get_comments_inside_location( - ~location=manifest.ptyp_loc, - comments, - ); - - Doc.group( - Doc.concat([ - Doc.space, - Doc.equal, - Doc.space, - print_type( - ~original_source, - ~comments=manifest_comments, - manifest, - ), - ]), - ); - | None => Doc.nil - }, - ]); - }; - - | PDataVariant(constr_declarations) => - let get_loc = (lbl: Parsetree.constructor_declaration) => { - lbl.pcd_loc; - }; - - let print_item = (~comments, d: Parsetree.constructor_declaration) => { - Doc.group( - Doc.concat([ - Doc.text(d.pcd_name.txt), - switch (d.pcd_args) { - | PConstrTuple({txt: parsed_types}) => - switch (parsed_types) { - | [] => Doc.nil - | [first, ...rem] => - let get_loc = (t: Parsetree.parsed_type) => t.ptyp_loc; - let print_item = (~comments, t: Parsetree.parsed_type) => { - Doc.concat([print_type(~original_source, ~comments, t)]); - }; - - let after_paren_comments = - Comment_utils.get_after_brace_comments( - ~first=first.ptyp_loc, - ~loc=get_loc(first), - comments, - ); - let cleaned_comments = - remove_used_comments( - ~remove_comments=after_paren_comments, - comments, - ); - - let type_items = - item_iterator( - ~get_loc, - ~print_item, - ~comments=cleaned_comments, - ~iterated_item=IteratedTupleConstructor, - parsed_types, - ); - let printed_type_items = Doc.join(~sep=Doc.line, type_items); - - let printed_type_items_after_parens = - Doc.concat([ - force_break_if_line_comment( - ~separator=Doc.softLine, - after_paren_comments, - ), - printed_type_items, - ]); - - Doc.group( - Doc.concat([ - Doc.lparen, - Comment_utils.single_line_of_comments(after_paren_comments), - Doc.indent(printed_type_items_after_parens), - Doc.softLine, - Doc.rparen, - ]), - ); - } - | PConstrRecord({txt: label_declarations}) => - let get_loc = (lbl: Parsetree.label_declaration) => { - lbl.pld_loc; - }; - - let print_item = (~comments, lbl: Parsetree.label_declaration) => { - Doc.concat([ - print_ident(lbl.pld_name.txt), - Doc.text(":"), - Doc.space, - print_type(~original_source, ~comments, lbl.pld_type), - ]); - }; - - let pre_brace_comments = []; // We can't determine from AST if comment comes before or after brace - - let remaining_comments = - remove_used_comments( - ~remove_comments=pre_brace_comments, - comments, - ); - - let after_brace_comments = - Comment_utils.get_after_brace_comments( - ~loc=data.pdata_loc, - remaining_comments, - ); - - let cleaned_comments = - remove_used_comments( - ~remove_comments=after_brace_comments, - remaining_comments, - ); - - let decl_items = - item_iterator( - ~get_loc, - ~print_item, - ~comments=cleaned_comments, - ~iterated_item=IteratedRecordLabels, - label_declarations, - ); - let printed_decls = Doc.join(~sep=Doc.hardLine, decl_items); - let printed_decls_after_brace = - Doc.concat([Doc.hardLine, printed_decls]); - - Doc.group( - Doc.concat([ - Doc.lbrace, - Comment_utils.single_line_of_comments(after_brace_comments), - Doc.indent(printed_decls_after_brace), - Doc.hardLine, - Doc.rbrace, - ]), - ); - | PConstrSingleton => Doc.nil - }, - ]), - ); - }; - - let after_brace_comments = - Comment_utils.get_after_brace_comments(~loc=data.pdata_loc, comments); - let cleaned_comments = - remove_used_comments(~remove_comments=after_brace_comments, comments); - - let decl_items = - item_iterator( - ~get_loc, - ~print_item, - ~comments=cleaned_comments, - ~iterated_item=IteratedDataDeclarations, - constr_declarations, - ); - let printed_decls = Doc.join(~sep=Doc.hardLine, decl_items); - - let printed_decls_after_brace = - Doc.concat([ - force_break_if_line_comment( - ~separator=Doc.hardLine, - after_brace_comments, - ), - printed_decls, - ]); - - Doc.group( - Doc.concat([ - Doc.text("enum"), - Doc.space, - if (data.pdata_rec == Recursive) { - Doc.text("rec "); - } else { - Doc.nil; - }, - Doc.text(nameloc.txt), - switch (data.pdata_params) { - | [] => Doc.space - | [first, ...rem] => - let get_loc = (t: Parsetree.parsed_type) => t.ptyp_loc; - let print_item = (~comments, t: Parsetree.parsed_type) => { - print_type(~original_source, ~comments, t); - }; - - let after_angle_comments = - Comment_utils.get_after_brace_comments( - ~loc=get_loc(first), - comments, - ); - let cleaned_comments = - remove_used_comments( - ~remove_comments=after_angle_comments, - comments, - ); - - let params = - item_iterator( - ~get_loc, - ~print_item, - ~comments=cleaned_comments, - ~iterated_item=IteratedEnum, - data.pdata_params, - ); - - let printed_data_params = Doc.join(~sep=Doc.line, params); - - let printed_data_params_after_angle = - Doc.concat([ - force_break_if_line_comment( - ~separator=Doc.softLine, - after_angle_comments, - ), - printed_data_params, - ]); - - Doc.group( - Doc.concat([ - Doc.group( - Doc.concat([ - Doc.text("<"), - Comment_utils.single_line_of_comments(after_angle_comments), - Doc.indent(printed_data_params_after_angle), - Doc.softLine, - Doc.text(">"), - ]), - ), - Doc.space, - ]), - ); - }, - Doc.lbrace, - Comment_utils.single_line_of_comments(after_brace_comments), - Doc.indent(printed_decls_after_brace), - Doc.hardLine, - Doc.rbrace, - ]), - ); - - | PDataRecord(label_declarations) => - let get_loc = (lbl: Parsetree.label_declaration) => { - lbl.pld_loc; - }; - - let print_item = (~comments, lbl: Parsetree.label_declaration) => { - let is_mutable = - switch (lbl.pld_mutable) { - | Mutable => Doc.text("mut ") - | Immutable => Doc.nil - }; - Doc.concat([ - is_mutable, - print_ident(lbl.pld_name.txt), - Doc.text(":"), - Doc.space, - print_type(~original_source, ~comments, lbl.pld_type), - ]); - }; - - let pre_brace_comments = []; // We can't determine from AST if comment comes before or after brace - - let remaining_comments = - remove_used_comments(~remove_comments=pre_brace_comments, comments); - - let after_brace_comments = - Comment_utils.get_after_brace_comments( - ~loc=data.pdata_loc, - remaining_comments, - ); - - let cleaned_comments = - remove_used_comments( - ~remove_comments=after_brace_comments, - remaining_comments, - ); - - let decl_items = - item_iterator( - ~get_loc, - ~print_item, - ~comments=cleaned_comments, - ~iterated_item=IteratedRecordLabels, - label_declarations, - ); - let printed_decls = Doc.join(~sep=Doc.hardLine, decl_items); - let printed_decls_after_brace = Doc.concat([Doc.hardLine, printed_decls]); - - Doc.group( - Doc.concat([ - Doc.text("record"), - Doc.space, - if (data.pdata_rec == Recursive) { - Doc.text("rec "); - } else { - Doc.nil; - }, - Doc.text(nameloc.txt), - switch (data.pdata_params) { - | [] => Doc.space - | [first, ...rem] => - let get_loc = (t: Parsetree.parsed_type) => t.ptyp_loc; - let print_item = (~comments, t: Parsetree.parsed_type) => { - print_type(~original_source, ~comments, t); - }; - - let param_items = - item_iterator( - ~get_loc, - ~print_item, - ~comments=[], - ~iterated_item=IteratedRecordData, - data.pdata_params, - ); - let printed_param_items = Doc.join(~sep=Doc.line, param_items); - let printed_params_after_angle = - Doc.concat([ - force_break_if_line_comment( - ~separator=Doc.softLine, - pre_brace_comments, - ), - printed_param_items, - ]); - Doc.group( - Doc.concat([ - Doc.text("<"), - Comment_utils.single_line_of_comments(pre_brace_comments), - Doc.indent(printed_params_after_angle), - Doc.softLine, - Doc.text(">"), - Doc.space, - ]), - ); - }, - Doc.concat([ - Doc.lbrace, - Comment_utils.single_line_of_comments(after_brace_comments), - Doc.indent(printed_decls_after_brace), - Doc.hardLine, - Doc.rbrace, - ]), - ]), - ); - }; -}; -let data_print = - ( - ~original_source: array(string), - ~comments: list(Parsetree.comment), - datas: list((Parsetree.provide_flag, Parsetree.data_declaration)), - ) => { - let previous_data: ref(option(Parsetree.data_declaration)) = ref(None); - Doc.join( - ~sep=Doc.concat([Doc.hardLine]), - List.mapi( - (i, data) => { - let (expt, decl: Parsetree.data_declaration) = data; - - let leading_comments = - switch (previous_data^) { - | None => [] - | Some(prev) => - Comment_utils.get_comments_between_locations( - ~loc1=prev.pdata_loc, - ~loc2=decl.pdata_loc, - comments, - ) - }; - - let leading_comment_docs = - Comment_utils.new_comments_to_docs(leading_comments); - - let data_comments = - Comment_utils.get_comments_inside_location( - ~location=decl.pdata_loc, - comments, - ); - - previous_data := Some(decl); - - Doc.concat([ - leading_comment_docs, - if (i != 0) { - Doc.text("and "); - } else { - Doc.nil; - }, - switch ((expt: Asttypes.provide_flag)) { - | NotProvided => Doc.nil - | Provided => Doc.text("provide ") - | Abstract => Doc.text("abstract ") - }, - print_data(~original_source, ~comments=data_comments, decl), - ]); - }, - datas, - ), - ); -}; -let include_print = - ( - ~comments: list(Parsetree.comment), - ~original_source: array(string), - inc: Parsetree.include_declaration, - ) => { - let path = inc.pinc_path.txt; - - let alias = - switch (inc.pinc_alias) { - | Some({txt: name}) => - Doc.concat([Doc.space, Doc.text("as"), Doc.space, Doc.text(name)]) - | None => Doc.nil - }; - - Doc.group( - Doc.concat([ - Doc.text("include"), - Doc.space, - Doc.doubleQuote, - Doc.text(path), - Doc.doubleQuote, - alias, - ]), - ); -}; - -let print_foreign_value_description = - ( - ~original_source: array(string), - ~comments: list(Parsetree.comment), - vd: Parsetree.value_description, - ) => { - let ident = vd.pval_name.txt; - - let fixed_ident = - if (infixop(ident) || prefixop(ident)) { - Doc.concat([Doc.lparen, Doc.text(ident), Doc.rparen]); - } else { - Doc.text(ident); - }; - - Doc.concat([ - fixed_ident, - Doc.text(":"), - Doc.space, - print_type(~original_source, ~comments, vd.pval_type), - switch (vd.pval_name_alias) { - | None => Doc.space - | Some(alias) => - Doc.concat([ - Doc.space, - Doc.text("as"), - Doc.space, - Doc.text(alias.txt), - Doc.space, - ]) - }, - Doc.text("from"), - Doc.space, - Doc.text("\""), - Doc.text(vd.pval_mod.txt), - Doc.text("\""), - ]); -}; - -let print_primitive_description = - ( - ~original_source: array(string), - ~comments: list(Parsetree.comment), - pd: Parsetree.primitive_description, - ) => { - let ident = pd.pprim_ident.txt; - - let fixed_ident = - if (infixop(ident) || prefixop(ident)) { - Doc.concat([Doc.lparen, Doc.text(ident), Doc.rparen]); - } else { - Doc.text(ident); - }; - - Doc.concat([ - fixed_ident, - Doc.space, - Doc.equal, - Doc.space, - Doc.text("\""), - Doc.text(pd.pprim_name.txt), - Doc.text("\""), - ]); -}; - -let rec toplevel_print = - ( - ~original_source: array(string), - ~comments: list(Parsetree.comment), - data: Parsetree.toplevel_stmt, - ) => { - let without_comments = - switch (data.ptop_desc) { - | PTopInclude(include_declaration) => - include_print(~comments, ~original_source, include_declaration) - | PTopForeign(provide_flag, value_description) => - let provide = - switch (provide_flag) { - | NotProvided => Doc.nil - | Provided => Doc.text("provide ") - | Abstract => Doc.text("abstract ") - }; - Doc.concat([ - provide, - Doc.text("foreign wasm "), - print_foreign_value_description( - ~original_source, - ~comments, - value_description, - ), - ]); - | PTopPrimitive(provide_flag, primitive_description) => - let provide = - switch (provide_flag) { - | NotProvided => Doc.nil - | Provided => Doc.text("provide ") - | Abstract => Doc.text("abstract ") - }; - Doc.concat([ - provide, - Doc.text("primitive "), - print_primitive_description( - ~original_source, - ~comments, - primitive_description, - ), - ]); - | PTopData(data_declarations) => - data_print(~original_source, ~comments, data_declarations) - - | PTopLet(provide_flag, rec_flag, mut_flag, value_bindings) => - print_value_bind( - ~provide_flag, - ~rec_flag, - ~mut_flag, - ~original_source, - ~comments, - value_bindings, - ) - | PTopExpr(expression) => - print_expression( - ~expression_parent=GenericExpression, - ~original_source, - ~comments, - expression, - ) - | PTopException(provide_flag, type_exception) => - let provide = - switch (provide_flag) { - | NotProvided => Doc.nil - | Provided => Doc.text("provide ") - | Abstract => Doc.text("abstract ") - }; - let cstr = type_exception.ptyexn_constructor; - - let kind = - switch (cstr.pext_kind) { - | PExtDecl(sargs) => - switch (sargs) { - | PConstrSingleton => Doc.nil - | PConstrTuple({txt: parsed_types}) => - if (List.length(parsed_types) > 0) { - Doc.concat([ - Doc.lparen, - Doc.join( - ~sep=Doc.comma, - List.map( - t => print_type(~original_source, ~comments, t), - parsed_types, - ), - ), - Doc.rparen, - ]); - } else { - Doc.nil; - } - | PConstrRecord(_) => - failwith( - "Impossible: exception should not have a record constructor", - ) - } - - | PExtRebind(lid) => print_ident(lid.txt) - }; - - Doc.concat([ - provide, - Doc.text("exception "), - Doc.text(cstr.pext_name.txt), - kind, - ]); - | PTopProvide(items) => - let items = - switch (items) { - | [] => Doc.concat([Doc.lbrace, Doc.rbrace]) - | _ => - let get_loc = (item: Parsetree.provide_item) => { - switch (item) { - | PProvideValue({loc}) - | PProvideModule({loc}) - | PProvideException({loc}) - | PProvideType({loc}) => loc - }; - }; - - let comments_in_expression = - Comment_utils.get_comments_inside_location( - ~location=data.ptop_loc, - comments, - ); - - let after_brace_comments = - Comment_utils.get_after_brace_comments( - ~loc=data.ptop_loc, - comments_in_expression, - ); - let cleaned_comments = - remove_used_comments( - ~remove_comments=after_brace_comments, - comments_in_expression, - ); - - let print_item = (~comments, item: Parsetree.provide_item) => { - let item_name = (name, alias) => { - Location.( - switch (alias) { - | None => print_ident(name.txt) - | Some(alias) => - Doc.concat([ - print_ident(name.txt), - Doc.space, - Doc.text("as"), - Doc.space, - print_ident(alias.txt), - ]) - } - ); - }; - - switch (item) { - | PProvideValue({name, alias}) => item_name(name, alias) - | PProvideModule({name, alias}) => - Doc.concat([Doc.text("module "), item_name(name, alias)]) - | PProvideType({name, alias}) => - Doc.concat([Doc.text("type "), item_name(name, alias)]) - | PProvideException({name, alias}) => - Doc.concat([Doc.text("exception "), item_name(name, alias)]) - }; - }; - - let printed_items = - block_item_iterator_line( - ~previous=Block(data.ptop_loc), - ~get_loc, - ~print_item, - ~comments=cleaned_comments, - ~separator=Doc.comma, - ~original_source, - items, - ); - - Doc.concat([ - Doc.lbrace, - Comment_utils.single_line_of_comments(after_brace_comments), - Doc.indent( - Doc.concat([ - force_break_if_line_comment( - ~separator=Doc.line, - after_brace_comments, - ), - printed_items, - ]), - ), - Doc.line, - Doc.rbrace, - ]); - }; - Doc.concat([Doc.text("provide"), Doc.space, items]); - | PTopModule(provide_flag, md) => - let get_loc = (stmt: Parsetree.toplevel_stmt) => { - stmt.ptop_loc; - }; - - let print_item = (~comments, stmt: Parsetree.toplevel_stmt) => { - toplevel_print(~original_source, ~comments, stmt); - }; - - let get_attributes = (stmt: Parsetree.toplevel_stmt) => { - let attributes = stmt.ptop_attributes; - print_attributes(attributes); - }; - - let comments = - Comment_utils.get_comments_inside_location( - ~location=data.ptop_loc, - comments, - ); - - let after_brace_comments = - Comment_utils.get_after_brace_comments(~loc=data.ptop_loc, comments); - let cleaned_comments = - remove_used_comments(~remove_comments=after_brace_comments, comments); - - let top_level_stmts = - block_item_iterator( - ~previous=TopOfFile, - ~get_loc, - ~print_item, - ~comments=cleaned_comments, - ~print_attribute=get_attributes, - ~original_source, - md.pmod_stmts, - ); - - let provide = - switch (provide_flag) { - | NotProvided => Doc.nil - | Provided => Doc.text("provide ") - | Abstract => Doc.text("abstract ") - }; - - let start_after_brace = Doc.concat([Doc.hardLine, top_level_stmts]); - - Doc.concat([ - provide, - Doc.text("module "), - Doc.text(md.pmod_name.txt), - Doc.space, - Doc.lbrace, - Comment_utils.single_line_of_comments(after_brace_comments), - Doc.indent(start_after_brace), - Doc.hardLine, - Doc.rbrace, - ]); - }; - Doc.group(without_comments); -}; - -let parse_source = (program_str: string) => { - switch ( - { - let lines = String.split_on_char('\n', program_str); - let eol = Fs_access.determine_eol(List.nth_opt(lines, 0)); - let compile_state = - Compile.compile_string( - ~is_root_file=true, - ~hook=stop_after_parse, - ~name=?None, - program_str, - ); - - (compile_state, lines, eol); - } - ) { - | exception exn => Error(ParseError(exn)) - | ({cstate_desc: Parsed(parsed_program)}, lines, eol) => - Ok((parsed_program, Array.of_list(lines), eol)) - | _ => Error(InvalidCompilationState) - }; -}; - -let format_ast = - ( - ~original_source: array(string), - ~eol: Fs_access.eol, - parsed_program: Parsetree.parsed_program, - ) => { - let get_loc = (stmt: Parsetree.toplevel_stmt) => { - stmt.ptop_loc; - }; - - let print_item = (~comments, stmt: Parsetree.toplevel_stmt) => { - toplevel_print(~original_source, ~comments, stmt); - }; - - let get_attributes = (stmt: Parsetree.toplevel_stmt) => { - let attributes = stmt.ptop_attributes; - print_attributes(attributes); - }; - - let leading_comments = - Comment_utils.get_comments_before_location( - ~location=parsed_program.module_name.loc, - parsed_program.comments, - ); - let remaining_comments = - Comment_utils.get_comments_after_location( - ~location=parsed_program.module_name.loc, - parsed_program.comments, - ); - - let module_header = - Doc.concat([ - Comment_utils.new_comments_to_docs(leading_comments), - if (leading_comments == []) { - Doc.nil; - } else { - Doc.hardLine; - }, - Doc.text("module"), - Doc.space, - Doc.text(parsed_program.module_name.txt), - Doc.hardLine, - Doc.hardLine, - ]); - - // special case where we have no code, we still format the comments - - let final_doc = - switch (parsed_program.statements) { - | [] => - Doc.concat([ - module_header, - Comment_utils.new_comments_to_docs(remaining_comments), - ]) - | _ => - let top_level_stmts = - block_item_iterator( - ~previous=TopOfFile, - ~get_loc, - ~print_item, - ~comments=remaining_comments, - ~print_attribute=get_attributes, - ~original_source, - parsed_program.statements, - ); - Doc.concat([module_header, top_level_stmts, Doc.hardLine]); - }; - - Doc.toString(~width=80, ~eol, final_doc); -}; diff --git a/compiler/src/formatting/res_doc.re b/compiler/src/formatting/res_doc.re deleted file mode 100644 index 181825568c..0000000000 --- a/compiler/src/formatting/res_doc.re +++ /dev/null @@ -1,459 +0,0 @@ -/* - This code is taken directly from the Rescript project - https://github.com/rescript-lang/syntax - - Original license reproduced below: - - MIT License - - Copyright (c) 2020 ReScript - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE. - - - */ -open Grain_utils; - -module MiniBuffer = Res_minibuffer; - -type mode = - | Break - | Flat; - -type lineStyle = - | Classic /* fits? -> replace with space */ - | Soft /* fits? -> replaced with nothing */ - | Hard /* always included, forces breaks in parents */ - /* always included, forces breaks in parents, but doesn't increase indentation - use case: template literals, multiline string content */ - | Literal; - -type t = - | Nil - | Text(string) - | Concat(list(t)) - | Indent(t) - | IfBreaks({ - yes: t, - no: t, - mutable broken: bool, - }) /* when broken is true, treat as the yes branch */ - | LineSuffix(t) - | LineBreak(lineStyle) - | Group({ - mutable shouldBreak: bool, - doc: t, - }) - | CustomLayout(list(t)) - | BreakParent; - -let nil = Nil; -let line = LineBreak(Classic); -let hardLine = LineBreak(Hard); -let softLine = LineBreak(Soft); -let literalLine = LineBreak(Literal); -let text = s => Text(s); - -/* Optimization. We eagerly collapse and reduce whatever allocation we can */ -let rec _concat = (acc, l) => - switch (l) { - | [Text(s1), Text(s2), ...rest] => [ - Text(s1 ++ s2), - ..._concat(acc, rest), - ] - | [Nil, ...rest] => _concat(acc, rest) - | [Concat(l2), ...rest] => _concat(_concat(acc, rest), l2) /* notice the order here */ - | [x, ...rest] => - let rest1 = _concat(acc, rest); - if (rest1 === rest) { - l; - } else { - [x, ...rest1]; - }; - | [] => acc - }; - -let concat = l => Concat(_concat([], l)); - -let indent = d => Indent(d); -let ifBreaks = (t, f) => IfBreaks({yes: t, no: f, broken: false}); -let lineSuffix = d => LineSuffix(d); -let group = d => Group({shouldBreak: false, doc: d}); -let breakableGroup = (~forceBreak, d) => - Group({shouldBreak: forceBreak, doc: d}); -let customLayout = gs => CustomLayout(gs); -let breakParent = BreakParent; - -let space = Text(" "); -let comma = Text(","); -let dot = Text("."); -let dotdot = Text(".."); -let dotdotdot = Text("..."); -let lessThan = Text("<"); -let greaterThan = Text(">"); -let lbrace = Text("{"); -let rbrace = Text("}"); -let lparen = Text("("); -let rparen = Text(")"); -let lbracket = Text("["); -let rbracket = Text("]"); -let question = Text("?"); -let tilde = Text("~"); -let equal = Text("="); -let trailingComma = ifBreaks(comma, nil); -let doubleQuote = Text("\""); - -let propagateForcedBreaks = doc => { - let rec walk = doc => - switch (doc) { - | Text(_) - | Nil - | LineSuffix(_) => false - | BreakParent => true - | LineBreak(Hard | Literal) => true - | LineBreak(Classic | Soft) => false - | Indent(children) => - let childForcesBreak = walk(children); - childForcesBreak; - | IfBreaks({yes: trueDoc, no: falseDoc} as ib) => - let falseForceBreak = walk(falseDoc); - if (falseForceBreak) { - let _ = walk(trueDoc); - ib.broken = true; - true; - } else { - let forceBreak = walk(trueDoc); - forceBreak; - }; - | Group({shouldBreak: forceBreak, doc: children} as gr) => - let childForcesBreak = walk(children); - let shouldBreak = forceBreak || childForcesBreak; - gr.shouldBreak = shouldBreak; - shouldBreak; - | Concat(children) => - List.fold_left( - (forceBreak, child) => { - let childForcesBreak = walk(child); - forceBreak || childForcesBreak; - }, - false, - children, - ) - | CustomLayout(children) => - /* When using CustomLayout, we don't want to propagate forced breaks - * from the children up. By definition it picks the first layout that fits - * otherwise it takes the last of the list. - * However we do want to propagate forced breaks in the sublayouts. They - * might need to be broken. We just don't propagate them any higher here */ - let _ = walk(Concat(children)); - false; - }; - - let _ = walk(doc); - (); -}; - -/* See documentation in interface file */ -let rec willBreak = doc => - switch (doc) { - | LineBreak(Hard | Literal) - | BreakParent - | Group({shouldBreak: true}) => true - | Group({doc}) - | Indent(doc) - | CustomLayout([doc, ..._]) => willBreak(doc) - | Concat(docs) => List.exists(willBreak, docs) - | IfBreaks({yes, no}) => willBreak(yes) || willBreak(no) - | _ => false - }; - -let rec willIndent = doc => - switch (doc) { - | Indent(doc) => true - | Group({doc}) => willIndent(doc) - | CustomLayout([doc, ..._]) => willIndent(doc) - | Concat(docs) => List.exists(willIndent, docs) - | _ => false - }; - -let join = (~sep, docs) => { - let rec loop = (acc, sep, docs) => - switch (docs) { - | [] => List.rev(acc) - | [x] => List.rev([x, ...acc]) - | [x, ...xs] => loop([sep, x, ...acc], sep, xs) - }; - - concat(loop([], sep, docs)); -}; - -let fits = (w, stack) => { - let width = ref(w); - let result = ref(None); - - let rec calculate = (indent, mode, doc) => - switch (mode, doc) { - | _ when result.contents !== None => () - | _ when width.contents < 0 => result := Some(false) - | (_, Nil) - | (_, LineSuffix(_)) - | (_, BreakParent) => () - | (_, Text(txt)) => width := width.contents - String.length(txt) - | (_, Indent(doc)) => calculate(indent + 2, mode, doc) - | (Flat, LineBreak(Hard)) - | (Flat, LineBreak(Literal)) => result := Some(true) - | (Flat, LineBreak(Classic)) => width := width.contents - 1 - | (Flat, LineBreak(Soft)) => () - | (Break, LineBreak(_)) => result := Some(true) - | (_, Group({shouldBreak: true, doc})) => calculate(indent, Break, doc) - | (_, Group({doc})) => calculate(indent, mode, doc) - | (_, IfBreaks({yes: breakDoc, broken: true})) => - calculate(indent, mode, breakDoc) - | (Break, IfBreaks({yes: breakDoc})) => - calculate(indent, mode, breakDoc) - | (Flat, IfBreaks({no: flatDoc})) => calculate(indent, mode, flatDoc) - | (_, Concat(docs)) => calculateConcat(indent, mode, docs) - | (_, CustomLayout([hd, ..._])) => - // TODO: if we have nested custom layouts, what we should do here? - calculate(indent, mode, hd) - | (_, CustomLayout([])) => () - } - and calculateConcat = (indent, mode, docs) => - if (result.contents === None) { - switch (docs) { - | [] => () - | [doc, ...rest] => - calculate(indent, mode, doc); - calculateConcat(indent, mode, rest); - }; - }; - - let rec calculateAll = stack => - switch (result.contents, stack) { - | (Some(r), _) => r - | (None, []) => width^ >= 0 - | (None, [(indent, mode, doc), ...rest]) => - calculate(indent, mode, doc); - calculateAll(rest); - }; - - calculateAll(stack); -}; - -let toString = (~width, ~eol: Fs_access.eol, doc) => { - propagateForcedBreaks(doc); - let buffer = MiniBuffer.create(~eol, 1024); - - let rec process = (~pos, lineSuffices, stack) => - switch (stack) { - | [(ind, mode, doc) as cmd, ...rest] => - switch (doc) { - | Nil - | BreakParent => process(~pos, lineSuffices, rest) - | Text(txt) => - MiniBuffer.add_string(buffer, txt); - process(~pos=String.length(txt) + pos, lineSuffices, rest); - | LineSuffix(doc) => - process(~pos, [(ind, mode, doc), ...lineSuffices], rest) - | Concat(docs) => - let ops = List.map(doc => (ind, mode, doc), docs); - process(~pos, lineSuffices, List.append(ops, rest)); - | Indent(doc) => - process(~pos, lineSuffices, [(ind + 2, mode, doc), ...rest]) - | IfBreaks({yes: breakDoc, broken: true}) => - process(~pos, lineSuffices, [(ind, mode, breakDoc), ...rest]) - | IfBreaks({yes: breakDoc, no: flatDoc}) => - if (mode == Break) { - process(~pos, lineSuffices, [(ind, mode, breakDoc), ...rest]); - } else { - process(~pos, lineSuffices, [(ind, mode, flatDoc), ...rest]); - } - | LineBreak(lineStyle) => - if (mode == Break) { - switch (lineSuffices) { - | [] => - if (lineStyle == Literal) { - if (eol == CRLF) { - MiniBuffer.add_char(buffer, '\r'); - }; - MiniBuffer.add_char(buffer, '\n'); - process(~pos=0, [], rest); - } else { - MiniBuffer.flush_newline(buffer); - MiniBuffer.add_string( - buffer, - [@doesNotRaise] String.make(ind, ' '), - ); - process(~pos=ind, [], rest); - } - | _docs => - process( - ~pos=ind, - [], - List.concat([List.rev(lineSuffices), [cmd, ...rest]]), - ) - }; - } else /* mode = Flat */ { - let pos = - switch (lineStyle) { - | Classic => - MiniBuffer.add_string(buffer, " "); - pos + 1; - | Hard => - MiniBuffer.flush_newline(buffer); - 0; - | Literal => - if (eol == CRLF) { - MiniBuffer.add_char(buffer, '\r'); - }; - MiniBuffer.add_char(buffer, '\n'); - 0; - | Soft => pos - }; - - process(~pos, lineSuffices, rest); - } - | Group({shouldBreak, doc}) => - if (shouldBreak || !fits(width - pos, [(ind, Flat, doc), ...rest])) { - process(~pos, lineSuffices, [(ind, Break, doc), ...rest]); - } else { - process(~pos, lineSuffices, [(ind, Flat, doc), ...rest]); - } - | CustomLayout(docs) => - let rec findGroupThatFits = groups => - switch (groups) { - | [] => Nil - | [lastGroup] => lastGroup - | [doc, ...docs] => - if (fits(width - pos, [(ind, Flat, doc), ...rest])) { - doc; - } else { - findGroupThatFits(docs); - } - }; - - let doc = findGroupThatFits(docs); - process(~pos, lineSuffices, [(ind, Flat, doc), ...rest]); - } - | [] => - switch (lineSuffices) { - | [] => () - | suffices => process(~pos=0, [], List.rev(suffices)) - } - }; - - process(~pos=0, [], [(0, Flat, doc)]); - MiniBuffer.contents(buffer); -}; - -[@live] -let debug = (~eol, t) => { - let rec toDoc = - fun - | Nil => text("nil") - | BreakParent => text("breakparent") - | Text(txt) => text("text(\"" ++ txt ++ "\")") - | LineSuffix(doc) => - group( - concat([ - text("linesuffix("), - indent(concat([line, toDoc(doc)])), - line, - text(")"), - ]), - ) - | Concat([]) => text("concat()") - | Concat(docs) => - group( - concat([ - text("concat("), - indent( - concat([ - line, - join(~sep=concat([text(","), line]), List.map(toDoc, docs)), - ]), - ), - line, - text(")"), - ]), - ) - | CustomLayout(docs) => - group( - concat([ - text("customLayout("), - indent( - concat([ - line, - join(~sep=concat([text(","), line]), List.map(toDoc, docs)), - ]), - ), - line, - text(")"), - ]), - ) - | Indent(doc) => - concat([text("indent("), softLine, toDoc(doc), softLine, text(")")]) - | IfBreaks({yes: trueDoc, broken: true}) => toDoc(trueDoc) - | IfBreaks({yes: trueDoc, no: falseDoc}) => - group( - concat([ - text("ifBreaks("), - indent( - concat([ - line, - toDoc(trueDoc), - concat([text(","), line]), - toDoc(falseDoc), - ]), - ), - line, - text(")"), - ]), - ) - | LineBreak(break) => { - let breakTxt = - switch (break) { - | Classic => "Classic" - | Soft => "Soft" - | Hard => "Hard" - | Literal => "Liteal" - }; - - text("LineBreak(" ++ breakTxt ++ ")"); - } - | Group({shouldBreak, doc}) => - group( - concat([ - text("Group("), - indent( - concat([ - line, - text("{shouldBreak: " ++ string_of_bool(shouldBreak) ++ "}"), - concat([text(","), line]), - toDoc(doc), - ]), - ), - line, - text(")"), - ]), - ); - - let doc = toDoc(t); - toString(~width=10, ~eol, doc) |> print_endline; -}; diff --git a/compiler/src/formatting/res_doc.rei b/compiler/src/formatting/res_doc.rei deleted file mode 100644 index 192e61cfe2..0000000000 --- a/compiler/src/formatting/res_doc.rei +++ /dev/null @@ -1,123 +0,0 @@ -/* - This code is taken directly from the Rescript project - https://github.com/rescript-lang/syntax - - Original license reproduced below: - - MIT License - - Copyright (c) 2020 ReScript - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE. - - - */ - -type lineStyle = - | Classic /* fits? -> replace with space */ - | Soft /* fits? -> replaced with nothing */ - | Hard /* always included, forces breaks in parents */ - /* always included, forces breaks in parents, but doesn't increase indentation - use case: template literals, multiline string content */ - | Literal; - -type t = - | Nil - | Text(string) - | Concat(list(t)) - | Indent(t) - | IfBreaks({ - yes: t, - no: t, - mutable broken: bool, - }) /* when broken is true, treat as the yes branch */ - | LineSuffix(t) - | LineBreak(lineStyle) - | Group({ - mutable shouldBreak: bool, - doc: t, - }) - | CustomLayout(list(t)) - | BreakParent; - -let nil: t; -let line: t; -let hardLine: t; -let softLine: t; -let literalLine: t; -let text: string => t; -let concat: list(t) => t; -let indent: t => t; -let ifBreaks: (t, t) => t; -let lineSuffix: t => t; -let group: t => t; -let breakableGroup: (~forceBreak: bool, t) => t; -/* `customLayout docs` will pick the layout that fits from `docs`. - * This is a very expensive computation as every layout from the list - * will be checked until one fits. */ -let customLayout: list(t) => t; -let breakParent: t; -let join: (~sep: t, list(t)) => t; - -let space: t; -let comma: t; -let dot: t; -let dotdot: t; -let dotdotdot: t; -let lessThan: t; -let greaterThan: t; -let lbrace: t; -let rbrace: t; -let lparen: t; -let rparen: t; -let lbracket: t; -let rbracket: t; -let question: t; -let tilde: t; -let equal: t; -let trailingComma: t; -[@live] -let doubleQuote: t; - -/* - * `willBreak doc` checks whether `doc` contains forced line breaks. - * This is more or less a "workaround" to make the parent of a `customLayout` break. - * Forced breaks are not propagated through `customLayout`; otherwise we would always - * get the last layout the algorithm tries… - * This might result into some weird layouts: - * [fn(x => { - * let _ = x - * }), fn(y => { - * let _ = y - * }), fn(z => { - * let _ = z - * })] - * The `[` and `]` would be a lot better broken out. - * Although the layout of `fn(x => {...})` is correct, we need to break its parent (the array). - * `willBreak` can be used in this scenario to check if the `fn…` contains any forced breaks. - * The consumer can then manually insert a `breakParent` doc, to manually propagate the - * force breaks from bottom to top. - */ -let willBreak: t => bool; - -let willIndent: t => bool; - -let toString: (~width: int, ~eol: Grain_utils.Fs_access.eol, t) => string; -[@live] -let debug: (~eol: Grain_utils.Fs_access.eol, t) => unit; diff --git a/compiler/src/formatting/res_minibuffer.re b/compiler/src/formatting/res_minibuffer.re deleted file mode 100644 index 02667fa3a4..0000000000 --- a/compiler/src/formatting/res_minibuffer.re +++ /dev/null @@ -1,103 +0,0 @@ -/* - This code is taken directly from the Rescript project - https://github.com/rescript-lang/syntax - - Original license reproduced below: - - MIT License - - Copyright (c) 2020 ReScript - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE. - - - */ - -type t = { - mutable buffer: bytes, - mutable position: int, - mutable length: int, - eol: Grain_utils.Fs_access.eol, -}; - -let create = (~eol, n) => { - let n = - if (n < 1) { - 1; - } else { - n; - }; - let s = ([@doesNotRaise] Bytes.create)(n); - {buffer: s, position: 0, length: n, eol}; -}; - -let contents = b => - ([@doesNotRaise] Bytes.sub_string)(b.buffer, 0, b.position); - -/* Can't be called directly, don't add to the interface */ -let resize_internal = (b, more) => { - let len = b.length; - let new_len = ref(len); - while (b.position + more > new_len^) { - new_len := 2 * new_len^; - }; - if (new_len^ > Sys.max_string_length) { - if (b.position + more <= Sys.max_string_length) { - new_len := Sys.max_string_length; - }; - }; - let new_buffer = ([@doesNotRaise] Bytes.create)(new_len^); - /* PR#6148: let's keep using [blit] rather than [unsafe_blit] in - this tricky function that is slow anyway. */ - [@doesNotRaise] Bytes.blit(b.buffer, 0, new_buffer, 0, b.position); - b.buffer = new_buffer; - b.length = new_len^; -}; - -let add_char = (b, c) => { - let pos = b.position; - if (pos >= b.length) { - resize_internal(b, 1); - }; - Bytes.unsafe_set(b.buffer, pos, c); - b.position = pos + 1; -}; - -let add_string = (b, s) => { - let len = String.length(s); - let new_position = b.position + len; - if (new_position > b.length) { - resize_internal(b, len); - }; - [@doesNotRaise] Bytes.blit_string(s, 0, b.buffer, b.position, len); - b.position = new_position; -}; - -/* adds newline and trims all preceding whitespace */ -let flush_newline = b => { - let position = ref(b.position); - while (Bytes.unsafe_get(b.buffer, position^ - 1) == ' ' && position^ >= 0) { - position := position^ - 1; - }; - b.position = position^; - if (b.eol == CRLF) { - add_char(b, '\r'); - }; - add_char(b, '\n'); -}; diff --git a/compiler/src/formatting/res_minibuffer.rei b/compiler/src/formatting/res_minibuffer.rei deleted file mode 100644 index 029ed0522e..0000000000 --- a/compiler/src/formatting/res_minibuffer.rei +++ /dev/null @@ -1,37 +0,0 @@ -/* - This code is taken directly from the Rescript project - https://github.com/rescript-lang/syntax - - Original license reproduced below: - - MIT License - - Copyright (c) 2020 ReScript - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE. - - - */ - -type t; -let add_char: (t, char) => unit; -let add_string: (t, string) => unit; -let contents: t => string; -let create: (~eol: Grain_utils.Fs_access.eol, int) => t; -let flush_newline: t => unit; diff --git a/compiler/src/language_server/code_file.re b/compiler/src/language_server/code_file.re index 65a2fa4ee2..b8d1432d99 100644 --- a/compiler/src/language_server/code_file.re +++ b/compiler/src/language_server/code_file.re @@ -64,9 +64,9 @@ let compile_source = (uri, source) => { switch (Grain_parsing.Location.error_of_exn(exn)) { | Some(`Ok(e)) => let (file, line, startchar) = - Grain_parsing.Location.get_pos_info(e.loc.loc_start); + Grain_parsing.Location.get_pos_info(e.error_loc.loc_start); let (_, endline, endchar) = - Grain_parsing.Location.get_pos_info(e.loc.loc_end); + Grain_parsing.Location.get_pos_info(e.error_loc.loc_end); let startchar = startchar < 0 ? 0 : startchar; let endchar = endchar < 0 ? 0 : endchar; diff --git a/compiler/src/language_server/formatting.re b/compiler/src/language_server/formatting.re index 6471668ba1..d1769655af 100644 --- a/compiler/src/language_server/formatting.re +++ b/compiler/src/language_server/formatting.re @@ -56,12 +56,12 @@ let process = }, ) | Some(compiled_code) => - switch (Format.parse_source(compiled_code)) { + switch (Fmt.parse_source(compiled_code)) { | Ok((parsed_program, lines, eol)) => // I'm pretty sure this code path can raise errors. We should change these to Results try({ let formatted_code = - Format.format_ast(~original_source=lines, ~eol, parsed_program); + Fmt.format_to_string(~source=lines, ~eol, parsed_program); let range: Protocol.range = { range_start: { diff --git a/compiler/src/language_server/sourcetree.re b/compiler/src/language_server/sourcetree.re index dd9207cd0a..f07e378dcf 100644 --- a/compiler/src/language_server/sourcetree.re +++ b/compiler/src/language_server/sourcetree.re @@ -288,18 +288,6 @@ module Sourcetree: Sourcetree = { module Iterator = TypedtreeIter.MakeIterator({ include TypedtreeIter.DefaultIteratorArgument; - let process_value_description = (id, instance_ty, ty) => { - // Never consider special idents when deciding to display generalized - // types, i.e. always display instance types for lists - Parsetree.( - Identifier.( - switch (id) { - | {txt: IdentName({txt: "[]" | "[...]"})} => instance_ty - | _ => ty - } - ) - ); - }; let enter_expression = exp => { Parsetree.( Path.( @@ -340,12 +328,7 @@ module Sourcetree: Sourcetree = { loc_to_interval(exp.exp_loc), Value({ env: exp.exp_env, - value_type: - process_value_description( - id, - exp.exp_type, - desc.val_type, - ), + value_type: desc.val_type, loc: exp.exp_loc, definition: Some(desc.val_loc), }), diff --git a/compiler/src/parsing/ast_helper.re b/compiler/src/parsing/ast_helper.re index c69629fd7c..132640282d 100644 --- a/compiler/src/parsing/ast_helper.re +++ b/compiler/src/parsing/ast_helper.re @@ -20,29 +20,12 @@ open Parsetree; exception SyntaxError(Location.t, string); exception BadEncoding(Location.t); -type listitem('a) = - | ListItem('a) - | ListSpread('a, Location.t); - -type recorditem = - | RecordItem(loc(Identifier.t), expression) - | RecordSpread(expression, Location.t); - type location('a) = loc('a); type id = loc(Identifier.t); type str = loc(string); type loc = Location.t; -let ident_empty = { - txt: Identifier.IdentName(Location.mknoloc("[]")), - loc: Location.dummy_loc, -}; -let ident_cons = { - txt: Identifier.IdentName(Location.mknoloc("[...]")), - loc: Location.dummy_loc, -}; - let record_pattern_info = record_pats => List.fold_right( ((pat_opt, closed), (pats, closed_acc)) => @@ -204,51 +187,31 @@ module Pattern = { construct(~loc, a, PPatConstrRecord(patterns, closed)); }; let list = (~loc, a) => { - let empty = tuple_construct(~loc, ident_empty, []); - let a = List.rev(a); - switch (a) { - | [] => empty - | [base, ...rest] => - let base = - switch (base) { - | ListItem(pat) => tuple_construct(~loc, ident_cons, [pat, empty]) - | ListSpread(pat, _) => pat - }; - List.fold_left( - (acc, pat) => { - switch (pat) { - | ListItem(pat) => tuple_construct(~loc, ident_cons, [pat, acc]) - | ListSpread(_, loc) => - raise( - SyntaxError( - loc, - "A list spread can only appear at the end of a list.", - ), - ) - } - }, - base, - rest, - ); - }; + mk(~loc, PPatList(a)); }; let or_ = (~loc, a, b) => mk(~loc, PPatOr(a, b)); let alias = (~loc, a, b) => mk(~loc, PPatAlias(a, b)); }; module Expression = { - let mk = (~loc, ~attributes=?, d) => { + let mk = (~loc, ~core_loc, ~attributes=?, d) => { let attributes = Option.value(~default=[], attributes); - {pexp_desc: d, pexp_attributes: attributes, pexp_loc: loc}; + { + pexp_desc: d, + pexp_attributes: attributes, + pexp_loc: loc, + pexp_core_loc: core_loc, + }; }; - let ident = (~loc, ~attributes=?, a) => mk(~loc, ~attributes?, PExpId(a)); - let constant = (~loc, ~attributes=?, a) => - mk(~loc, ~attributes?, PExpConstant(a)); - let tuple = (~loc, ~attributes=?, a) => - mk(~loc, ~attributes?, PExpTuple(a)); - let record = (~loc, ~attributes=?, a, b) => - mk(~loc, ~attributes?, PExpRecord(a, b)); - let record_fields = (~loc, ~attributes=?, a) => + let ident = (~loc, ~core_loc, ~attributes=?, a) => + mk(~loc, ~core_loc, ~attributes?, PExpId(a)); + let constant = (~loc, ~core_loc, ~attributes=?, a) => + mk(~loc, ~core_loc, ~attributes?, PExpConstant(a)); + let tuple = (~loc, ~core_loc, ~attributes=?, a) => + mk(~loc, ~core_loc, ~attributes?, PExpTuple(a)); + let record = (~loc, ~core_loc, ~attributes=?, a, b) => + mk(~loc, ~core_loc, ~attributes?, PExpRecord(a, b)); + let record_fields = (~loc, ~core_loc, ~attributes=?, a) => switch (a) { | [] => failwith("Impossible: empty record field list") | [base, ...rest] => @@ -285,60 +248,61 @@ module Expression = { rest, ); let record_items = List.rev(record_items); - record(~loc, ~attributes?, spread_base, record_items); + record(~loc, ~core_loc, ~attributes?, spread_base, record_items); }; - let record_get = (~loc, ~attributes=?, a, b) => - mk(~loc, ~attributes?, PExpRecordGet(a, b)); - let record_set = (~loc, ~attributes=?, a, b, c) => - mk(~loc, ~attributes?, PExpRecordSet(a, b, c)); - let array = (~loc, ~attributes=?, a) => - mk(~loc, ~attributes?, PExpArray(a)); - let array_get = (~loc, ~attributes=?, a, b) => - mk(~loc, ~attributes?, PExpArrayGet(a, b)); - let array_set = (~loc, ~attributes=?, a, b, c) => - mk(~loc, ~attributes?, PExpArraySet(a, b, c)); - let let_ = (~loc, ~attributes=?, a, b, c) => - mk(~loc, ~attributes?, PExpLet(a, b, c)); - let match = (~loc, ~attributes=?, a, b) => - mk(~loc, ~attributes?, PExpMatch(a, b)); - let prim0 = (~loc, ~attributes=?, a) => - mk(~loc, ~attributes?, PExpPrim0(a)); - let prim1 = (~loc, ~attributes=?, a, b) => - mk(~loc, ~attributes?, PExpPrim1(a, b)); - let prim2 = (~loc, ~attributes=?, a, b, c) => - mk(~loc, ~attributes?, PExpPrim2(a, b, c)); - let primn = (~loc, ~attributes=?, a, b) => - mk(~loc, ~attributes?, PExpPrimN(a, b)); - let if_ = (~loc, ~attributes=?, a, b, c) => - mk(~loc, ~attributes?, PExpIf(a, b, c)); - let while_ = (~loc, ~attributes=?, a, b) => - mk(~loc, ~attributes?, PExpWhile(a, b)); - let for_ = (~loc, ~attributes=?, a, b, c, d) => - mk(~loc, ~attributes?, PExpFor(a, b, c, d)); - let continue = (~loc, ~attributes=?, ()) => - mk(~loc, ~attributes?, PExpContinue); - let break = (~loc, ~attributes=?, ()) => mk(~loc, ~attributes?, PExpBreak); - let return = (~loc, ~attributes=?, a) => - mk(~loc, ~attributes?, PExpReturn(a)); - let constraint_ = (~loc, ~attributes=?, a, b) => - mk(~loc, ~attributes?, PExpConstraint(a, b)); - let use = (~loc, ~attributes=?, a, b) => - mk(~loc, ~attributes?, PExpUse(a, b)); - let box_assign = (~loc, ~attributes=?, a, b) => - mk(~loc, ~attributes?, PExpBoxAssign(a, b)); - let assign = (~loc, ~attributes=?, a, b) => - mk(~loc, ~attributes?, PExpAssign(a, b)); - let lambda = (~loc, ~attributes=?, a, b) => - mk(~loc, ~attributes?, PExpLambda(a, b)); - let apply = (~loc, ~attributes=?, a, b) => - mk(~loc, ~attributes?, PExpApp(a, b)); - let construct = (~loc, ~attributes=?, a, b) => - mk(~loc, ~attributes?, PExpConstruct(a, b)); - let singleton_construct = (~loc, ~attributes=?, a) => - construct(~loc, ~attributes?, a, PExpConstrSingleton); - let tuple_construct = (~loc, ~attributes=?, a, b) => - construct(~loc, ~attributes?, a, PExpConstrTuple(b)); - let record_construct = (~loc, ~attributes=?, a, b) => { + let record_get = (~loc, ~core_loc, ~attributes=?, a, b) => + mk(~loc, ~core_loc, ~attributes?, PExpRecordGet(a, b)); + let record_set = (~loc, ~core_loc, ~attributes=?, a, b, c) => + mk(~loc, ~core_loc, ~attributes?, PExpRecordSet(a, b, c)); + let array = (~loc, ~core_loc, ~attributes=?, a) => + mk(~loc, ~core_loc, ~attributes?, PExpArray(a)); + let array_get = (~loc, ~core_loc, ~attributes=?, a, b) => + mk(~loc, ~core_loc, ~attributes?, PExpArrayGet(a, b)); + let array_set = (~loc, ~core_loc, ~attributes=?, a, b, c) => + mk(~loc, ~core_loc, ~attributes?, PExpArraySet(a, b, c)); + let let_ = (~loc, ~core_loc, ~attributes=?, a, b, c) => + mk(~loc, ~core_loc, ~attributes?, PExpLet(a, b, c)); + let match = (~loc, ~core_loc, ~attributes=?, a, b) => + mk(~loc, ~core_loc, ~attributes?, PExpMatch(a, b)); + let prim0 = (~loc, ~core_loc, ~attributes=?, a) => + mk(~loc, ~core_loc, ~attributes?, PExpPrim0(a)); + let prim1 = (~loc, ~core_loc, ~attributes=?, a, b) => + mk(~loc, ~core_loc, ~attributes?, PExpPrim1(a, b)); + let prim2 = (~loc, ~core_loc, ~attributes=?, a, b, c) => + mk(~loc, ~core_loc, ~attributes?, PExpPrim2(a, b, c)); + let primn = (~loc, ~core_loc, ~attributes=?, a, b) => + mk(~loc, ~core_loc, ~attributes?, PExpPrimN(a, b)); + let if_ = (~loc, ~core_loc, ~attributes=?, a, b, c) => + mk(~loc, ~core_loc, ~attributes?, PExpIf(a, b, c)); + let while_ = (~loc, ~core_loc, ~attributes=?, a, b) => + mk(~loc, ~core_loc, ~attributes?, PExpWhile(a, b)); + let for_ = (~loc, ~core_loc, ~attributes=?, a, b, c, d) => + mk(~loc, ~core_loc, ~attributes?, PExpFor(a, b, c, d)); + let continue = (~loc, ~core_loc, ~attributes=?, ()) => + mk(~loc, ~core_loc, ~attributes?, PExpContinue); + let break = (~loc, ~core_loc, ~attributes=?, ()) => + mk(~loc, ~core_loc, ~attributes?, PExpBreak); + let return = (~loc, ~core_loc, ~attributes=?, a) => + mk(~loc, ~core_loc, ~attributes?, PExpReturn(a)); + let constraint_ = (~loc, ~core_loc, ~attributes=?, a, b) => + mk(~loc, ~core_loc, ~attributes?, PExpConstraint(a, b)); + let use = (~loc, ~core_loc, ~attributes=?, a, b) => + mk(~loc, ~core_loc, ~attributes?, PExpUse(a, b)); + let box_assign = (~loc, ~core_loc, ~attributes=?, a, b) => + mk(~loc, ~core_loc, ~attributes?, PExpBoxAssign(a, b)); + let assign = (~loc, ~core_loc, ~attributes=?, a, b) => + mk(~loc, ~core_loc, ~attributes?, PExpAssign(a, b)); + let lambda = (~loc, ~core_loc, ~attributes=?, a, b) => + mk(~loc, ~core_loc, ~attributes?, PExpLambda(a, b)); + let apply = (~loc, ~core_loc, ~attributes=?, a, b) => + mk(~loc, ~core_loc, ~attributes?, PExpApp(a, b)); + let construct = (~loc, ~core_loc, ~attributes=?, a, b) => + mk(~loc, ~core_loc, ~attributes?, PExpConstruct(a, b)); + let singleton_construct = (~loc, ~core_loc, ~attributes=?, a) => + construct(~loc, ~core_loc, ~attributes?, a, PExpConstrSingleton); + let tuple_construct = (~loc, ~core_loc, ~attributes=?, a, b) => + construct(~loc, ~core_loc, ~attributes?, a, PExpConstrTuple(b)); + let record_construct = (~loc, ~core_loc, ~attributes=?, a, b) => { let record_items = List.map( expr => { @@ -355,7 +319,13 @@ module Expression = { }, b, ); - construct(~loc, ~attributes?, a, PExpConstrRecord(record_items)); + construct( + ~loc, + ~core_loc, + ~attributes?, + a, + PExpConstrRecord(record_items), + ); }; // It's difficult to parse rational numbers while division exists (in the // parser state where you've read NUMBER_INT and you're looking ahead at /, @@ -364,7 +334,7 @@ module Expression = { // and if you choose to shift then 1 / foo would always be a syntax error // because the parser would expect a number). It's easier to just parse it // as division and have this action decide that it's actually a rational. - let binop = (~loc, ~attributes=?, f, a, b) => { + let binop = (~loc, ~core_loc, ~attributes=?, f, a, b) => { // Locations of nested binops are difficult to compute in the parser so we // just set the location manually here let loc = @@ -383,10 +353,16 @@ module Expression = { {pexp_desc: PExpConstant(PConstNumber(PConstNumberInt(x)))}, {pexp_desc: PExpConstant(PConstNumber(PConstNumberInt(y)))}, ) => - constant(~loc, ~attributes?, PConstNumber(PConstNumberRational(x, y))) + constant( + ~loc, + ~core_loc, + ~attributes?, + PConstNumber(PConstNumberRational(x, y)), + ) | _ => mk( ~loc, + ~core_loc, ~attributes?, PExpApp( f, @@ -398,71 +374,54 @@ module Expression = { ) }; }; - let block = (~loc, ~attributes=?, a) => - mk(~loc, ~attributes?, PExpBlock(a)); - let list = (~loc, ~attributes=?, a) => { - let empty = tuple_construct(~loc, ident_empty, []); - let list = - switch (List.rev(a)) { - | [] => empty - | [base, ...rest] => - let base = - switch (base) { - | ListItem(expr) => - tuple_construct(~loc, ~attributes?, ident_cons, [expr, empty]) - | ListSpread(expr, _) => expr - }; - List.fold_left( - (acc, expr) => { - switch (expr) { - | ListItem(expr) => - tuple_construct(~loc, ~attributes?, ident_cons, [expr, acc]) - | ListSpread(_, loc) => - raise( - SyntaxError( - loc, - "A list spread can only appear at the end of a list.", - ), - ) - } - }, - base, - rest, - ); - }; - {...list, pexp_loc: loc}; + let block = (~loc, ~core_loc, ~attributes=?, a) => + mk(~loc, ~core_loc, ~attributes?, PExpBlock(a)); + let list = (~loc, ~core_loc, ~attributes=?, a) => { + mk(~loc, ~core_loc, ~attributes?, PExpList(a)); }; let ignore = e => switch (e.pexp_desc) { | PExpLet(_) => e - | _ => prim1(~loc=e.pexp_loc, ~attributes=e.pexp_attributes, Ignore, e) + | _ => + prim1( + ~loc=e.pexp_loc, + ~core_loc=e.pexp_core_loc, + ~attributes=e.pexp_attributes, + Ignore, + e, + ) }; }; module Toplevel = { - let mk = (~loc, ~attributes=?, d) => { + let mk = (~loc, ~core_loc, ~attributes=?, d) => { let attributes = Option.value(~default=[], attributes); - {ptop_desc: d, ptop_attributes: attributes, ptop_loc: loc}; + { + ptop_desc: d, + ptop_attributes: attributes, + ptop_loc: loc, + ptop_core_loc: core_loc, + }; }; - let include_ = (~loc, ~attributes=?, i) => - mk(~loc, ~attributes?, PTopInclude(i)); - let foreign = (~loc, ~attributes=?, e, d) => - mk(~loc, ~attributes?, PTopForeign(e, d)); - let module_ = (~loc, ~attributes=?, e, m) => - mk(~loc, ~attributes?, PTopModule(e, m)); - let primitive = (~loc, ~attributes=?, e, d) => - mk(~loc, ~attributes?, PTopPrimitive(e, d)); - let data = (~loc, ~attributes=?, elts) => - mk(~loc, ~attributes?, PTopData(elts)); - let let_ = (~loc, ~attributes=?, e, r, m, vb) => - mk(~loc, ~attributes?, PTopLet(e, r, m, vb)); - let expr = (~loc, ~attributes=?, e) => - mk(~loc, ~attributes?, PTopExpr(e)); - let grain_exception = (~loc, ~attributes=?, e, ext) => - mk(~loc, ~attributes?, PTopException(e, ext)); - let provide = (~loc, ~attributes=?, e) => - mk(~loc, ~attributes?, PTopProvide(e)); + let include_ = (~loc, ~core_loc, ~attributes=?, i) => + mk(~loc, ~core_loc, ~attributes?, PTopInclude(i)); + let foreign = (~loc, ~core_loc, ~attributes=?, e, d) => + mk(~loc, ~core_loc, ~attributes?, PTopForeign(e, d)); + let module_ = (~loc, ~core_loc, ~attributes=?, e, m) => + mk(~loc, ~core_loc, ~attributes?, PTopModule(e, m)); + let primitive = (~loc, ~core_loc, ~attributes=?, e, d) => + mk(~loc, ~core_loc, ~attributes?, PTopPrimitive(e, d)); + let data = (~loc, ~core_loc, ~attributes=?, elts) => + mk(~loc, ~core_loc, ~attributes?, PTopData(elts)); + let let_ = (~loc, ~core_loc, ~attributes=?, e, r, m, vb) => + mk(~loc, ~core_loc, ~attributes?, PTopLet(e, r, m, vb)); + let expr = (~loc, ~core_loc, ~attributes=?, e) => + mk(~loc, ~core_loc, ~attributes?, PTopExpr(e)); + let grain_exception = (~loc, ~core_loc, ~attributes=?, e, ext) => + mk(~loc, ~core_loc, ~attributes?, PTopException(e, ext)); + let provide = (~loc, ~core_loc, ~attributes=?, e) => + mk(~loc, ~core_loc, ~attributes?, PTopProvide(e)); }; module PrimitiveDescription = { diff --git a/compiler/src/parsing/ast_helper.rei b/compiler/src/parsing/ast_helper.rei index 5289ad1833..c2fdb26e33 100644 --- a/compiler/src/parsing/ast_helper.rei +++ b/compiler/src/parsing/ast_helper.rei @@ -20,14 +20,6 @@ open Parsetree; exception SyntaxError(Location.t, string); exception BadEncoding(Location.t); -type listitem('a) = - | ListItem('a) - | ListSpread('a, Location.t); - -type recorditem = - | RecordItem(loc(Identifier.t), expression) - | RecordSpread(expression, Location.t); - type location('a) = loc('a); type id = loc(Identifier.t); @@ -142,7 +134,7 @@ module Pattern: { let record: (~loc: loc, list((option((id, pattern)), Asttypes.closed_flag))) => pattern; - let list: (~loc: loc, list(listitem(pattern))) => pattern; + let list: (~loc: loc, list(list_item(pattern))) => pattern; let constant: (~loc: loc, constant) => pattern; let constraint_: (~loc: loc, pattern, parsed_type) => pattern; let construct: (~loc: loc, id, constructor_pattern) => pattern; @@ -157,37 +149,80 @@ module Pattern: { module Expression: { let mk: - (~loc: loc, ~attributes: attributes=?, expression_desc) => expression; - let ident: (~loc: loc, ~attributes: attributes=?, id) => expression; - let constant: (~loc: loc, ~attributes: attributes=?, constant) => expression; + (~loc: loc, ~core_loc: loc, ~attributes: attributes=?, expression_desc) => + expression; + let ident: + (~loc: loc, ~core_loc: loc, ~attributes: attributes=?, id) => expression; + let constant: + (~loc: loc, ~core_loc: loc, ~attributes: attributes=?, constant) => + expression; let tuple: - (~loc: loc, ~attributes: attributes=?, list(expression)) => expression; + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + list(expression) + ) => + expression; let record: ( ~loc: loc, + ~core_loc: loc, ~attributes: attributes=?, option(expression), list((id, expression)) ) => expression; let record_fields: - (~loc: loc, ~attributes: attributes=?, list(recorditem)) => expression; + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + list(record_item(expression)) + ) => + expression; let record_get: - (~loc: loc, ~attributes: attributes=?, expression, id) => expression; + (~loc: loc, ~core_loc: loc, ~attributes: attributes=?, expression, id) => + expression; let record_set: - (~loc: loc, ~attributes: attributes=?, expression, id, expression) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + expression, + id, + expression + ) => expression; let list: - (~loc: loc, ~attributes: attributes=?, list(listitem(expression))) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + list(list_item(expression)) + ) => expression; let array: - (~loc: loc, ~attributes: attributes=?, list(expression)) => expression; + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + list(expression) + ) => + expression; let array_get: - (~loc: loc, ~attributes: attributes=?, expression, expression) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + expression, + expression + ) => expression; let array_set: ( ~loc: loc, + ~core_loc: loc, ~attributes: attributes=?, expression, expression, @@ -197,6 +232,7 @@ module Expression: { let let_: ( ~loc: loc, + ~core_loc: loc, ~attributes: attributes=?, rec_flag, mut_flag, @@ -204,20 +240,48 @@ module Expression: { ) => expression; let match: - (~loc: loc, ~attributes: attributes=?, expression, list(match_branch)) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + expression, + location(list(match_branch)) + ) => expression; - let prim0: (~loc: loc, ~attributes: attributes=?, prim0) => expression; + let prim0: + (~loc: loc, ~core_loc: loc, ~attributes: attributes=?, prim0) => expression; let prim1: - (~loc: loc, ~attributes: attributes=?, prim1, expression) => expression; + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + prim1, + expression + ) => + expression; let prim2: - (~loc: loc, ~attributes: attributes=?, prim2, expression, expression) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + prim2, + expression, + expression + ) => expression; let primn: - (~loc: loc, ~attributes: attributes=?, primn, list(expression)) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + primn, + list(expression) + ) => expression; let if_: ( ~loc: loc, + ~core_loc: loc, ~attributes: attributes=?, expression, expression, @@ -225,11 +289,18 @@ module Expression: { ) => expression; let while_: - (~loc: loc, ~attributes: attributes=?, expression, expression) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + expression, + expression + ) => expression; let for_: ( ~loc: loc, + ~core_loc: loc, ~attributes: attributes=?, option(expression), option(expression), @@ -237,23 +308,52 @@ module Expression: { expression ) => expression; - let continue: (~loc: loc, ~attributes: attributes=?, unit) => expression; - let break: (~loc: loc, ~attributes: attributes=?, unit) => expression; + let continue: + (~loc: loc, ~core_loc: loc, ~attributes: attributes=?, unit) => expression; + let break: + (~loc: loc, ~core_loc: loc, ~attributes: attributes=?, unit) => expression; let return: - (~loc: loc, ~attributes: attributes=?, option(expression)) => expression; + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + option(expression) + ) => + expression; let constraint_: - (~loc: loc, ~attributes: attributes=?, expression, parsed_type) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + expression, + parsed_type + ) => + expression; + let use: + (~loc: loc, ~core_loc: loc, ~attributes: attributes=?, id, use_items) => expression; - let use: (~loc: loc, ~attributes: attributes=?, id, use_items) => expression; let box_assign: - (~loc: loc, ~attributes: attributes=?, expression, expression) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + expression, + expression + ) => expression; let assign: - (~loc: loc, ~attributes: attributes=?, expression, expression) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + expression, + expression + ) => expression; let lambda: ( ~loc: loc, + ~core_loc: loc, ~attributes: attributes=?, list(lambda_argument), expression @@ -262,24 +362,51 @@ module Expression: { let apply: ( ~loc: loc, + ~core_loc: loc, ~attributes: attributes=?, expression, list(application_argument) ) => expression; let construct: - (~loc: loc, ~attributes: attributes=?, id, constructor_expression) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + id, + constructor_expression + ) => expression; let singleton_construct: - (~loc: loc, ~attributes: attributes=?, Location.loc(Identifier.t)) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + Location.loc(Identifier.t) + ) => expression; let tuple_construct: - (~loc: loc, ~attributes: attributes=?, id, list(expression)) => expression; + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + id, + list(expression) + ) => + expression; let record_construct: - (~loc: loc, ~attributes: attributes=?, id, list(recorditem)) => expression; + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + id, + list(record_item(expression)) + ) => + expression; let binop: ( ~loc: loc, + ~core_loc: loc, ~attributes: attributes=?, expression, expression, @@ -287,25 +414,55 @@ module Expression: { ) => expression; let block: - (~loc: loc, ~attributes: attributes=?, list(expression)) => expression; + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + list(expression) + ) => + expression; let ignore: expression => expression; }; module Toplevel: { let mk: - (~loc: loc, ~attributes: attributes=?, toplevel_stmt_desc) => toplevel_stmt; + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + toplevel_stmt_desc + ) => + toplevel_stmt; let include_: - (~loc: loc, ~attributes: attributes=?, include_declaration) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + include_declaration + ) => toplevel_stmt; let foreign: - (~loc: loc, ~attributes: attributes=?, provide_flag, value_description) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + provide_flag, + value_description + ) => toplevel_stmt; let module_: - (~loc: loc, ~attributes: attributes=?, provide_flag, module_declaration) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + provide_flag, + module_declaration + ) => toplevel_stmt; let primitive: ( ~loc: loc, + ~core_loc: loc, ~attributes: attributes=?, provide_flag, primitive_description @@ -314,13 +471,15 @@ module Toplevel: { let data: ( ~loc: loc, + ~core_loc: loc, ~attributes: attributes=?, - list((provide_flag, data_declaration)) + list((provide_flag, data_declaration, loc)) ) => toplevel_stmt; let let_: ( ~loc: loc, + ~core_loc: loc, ~attributes: attributes=?, provide_flag, rec_flag, @@ -329,12 +488,24 @@ module Toplevel: { ) => toplevel_stmt; let expr: - (~loc: loc, ~attributes: attributes=?, expression) => toplevel_stmt; + (~loc: loc, ~core_loc: loc, ~attributes: attributes=?, expression) => + toplevel_stmt; let grain_exception: - (~loc: loc, ~attributes: attributes=?, provide_flag, type_exception) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + provide_flag, + type_exception + ) => toplevel_stmt; let provide: - (~loc: loc, ~attributes: attributes=?, list(provide_item)) => + ( + ~loc: loc, + ~core_loc: loc, + ~attributes: attributes=?, + list(provide_item) + ) => toplevel_stmt; }; diff --git a/compiler/src/parsing/ast_mapper.re b/compiler/src/parsing/ast_mapper.re index 2a42551096..39336774ac 100644 --- a/compiler/src/parsing/ast_mapper.re +++ b/compiler/src/parsing/ast_mapper.re @@ -46,25 +46,66 @@ module Cnst = { }; module E = { - let map = (sub, {pexp_desc: desc, pexp_attributes: attrs, pexp_loc: loc}) => { + let map = + ( + sub, + { + pexp_desc: desc, + pexp_attributes: attrs, + pexp_loc: loc, + pexp_core_loc: core_loc, + }, + ) => { open Expression; let loc = sub.location(sub, loc); + let core_loc = sub.location(sub, core_loc); let attributes = List.map( - ((attr, args)) => - (map_loc(sub, attr), List.map(map_loc(sub), args)), + ({Asttypes.attr_name, attr_args, attr_loc}) => + { + Asttypes.attr_name: map_loc(sub, attr_name), + attr_args: List.map(map_loc(sub), attr_args), + attr_loc: sub.location(sub, attr_loc), + }, attrs, ); switch (desc) { - | PExpId(i) => ident(~loc, ~attributes, map_identifier(sub, i)) - | PExpConstant(c) => constant(~loc, ~attributes, sub.constant(sub, c)) - | PExpTuple(es) => tuple(~loc, ~attributes, List.map(sub.expr(sub), es)) - | PExpArray(es) => array(~loc, ~attributes, List.map(sub.expr(sub), es)) + | PExpId(i) => + ident(~loc, ~core_loc, ~attributes, map_identifier(sub, i)) + | PExpConstant(c) => + constant(~loc, ~core_loc, ~attributes, sub.constant(sub, c)) + | PExpTuple(es) => + tuple(~loc, ~core_loc, ~attributes, List.map(sub.expr(sub), es)) + | PExpList(es) => + list( + ~loc, + ~core_loc, + ~attributes, + List.map( + item => { + switch (item) { + | ListItem(e) => ListItem(sub.expr(sub, e)) + | ListSpread(e, loc) => + ListSpread(sub.expr(sub, e), sub.location(sub, loc)) + } + }, + es, + ), + ) + | PExpArray(es) => + array(~loc, ~core_loc, ~attributes, List.map(sub.expr(sub), es)) | PExpArrayGet(a, i) => - array_get(~loc, ~attributes, sub.expr(sub, a), sub.expr(sub, i)) + array_get( + ~loc, + ~core_loc, + ~attributes, + sub.expr(sub, a), + sub.expr(sub, i), + ) | PExpArraySet(a, i, arg) => array_set( ~loc, + ~core_loc, ~attributes, sub.expr(sub, a), sub.expr(sub, i), @@ -73,65 +114,113 @@ module E = { | PExpRecord(b, es) => record( ~loc, + ~core_loc, ~attributes, Option.map(sub.expr(sub), b), map_record_fields(sub, es), ) | PExpRecordGet(e, f) => - record_get(~loc, ~attributes, sub.expr(sub, e), map_loc(sub, f)) + record_get( + ~loc, + ~core_loc, + ~attributes, + sub.expr(sub, e), + map_loc(sub, f), + ) | PExpRecordSet(e, f, v) => record_set( ~loc, + ~core_loc, ~attributes, sub.expr(sub, e), map_loc(sub, f), sub.expr(sub, v), ) | PExpLet(r, m, vbs) => - let_(~loc, ~attributes, r, m, List.map(sub.value_binding(sub), vbs)) + let_( + ~loc, + ~core_loc, + ~attributes, + r, + m, + List.map(sub.value_binding(sub), vbs), + ) | PExpMatch(e, mbs) => match( ~loc, + ~core_loc, ~attributes, sub.expr(sub, e), - List.map(sub.match_branch(sub), mbs), + { + txt: List.map(sub.match_branch(sub), mbs.txt), + loc: sub.location(sub, mbs.loc), + }, ) - | PExpPrim0(p0) => prim0(~loc, ~attributes, p0) - | PExpPrim1(p1, e) => prim1(~loc, ~attributes, p1, sub.expr(sub, e)) + | PExpPrim0(p0) => prim0(~loc, ~core_loc, ~attributes, p0) + | PExpPrim1(p1, e) => + prim1(~loc, ~core_loc, ~attributes, p1, sub.expr(sub, e)) | PExpPrim2(p2, e1, e2) => - prim2(~loc, ~attributes, p2, sub.expr(sub, e1), sub.expr(sub, e2)) + prim2( + ~loc, + ~core_loc, + ~attributes, + p2, + sub.expr(sub, e1), + sub.expr(sub, e2), + ) | PExpPrimN(p, es) => - primn(~loc, ~attributes, p, List.map(sub.expr(sub), es)) + primn(~loc, ~core_loc, ~attributes, p, List.map(sub.expr(sub), es)) | PExpBoxAssign(e1, e2) => - box_assign(~loc, ~attributes, sub.expr(sub, e1), sub.expr(sub, e2)) + box_assign( + ~loc, + ~core_loc, + ~attributes, + sub.expr(sub, e1), + sub.expr(sub, e2), + ) | PExpAssign(e1, e2) => - assign(~loc, ~attributes, sub.expr(sub, e1), sub.expr(sub, e2)) + assign( + ~loc, + ~core_loc, + ~attributes, + sub.expr(sub, e1), + sub.expr(sub, e2), + ) | PExpIf(c, t, f) => if_( ~loc, + ~core_loc, ~attributes, sub.expr(sub, c), sub.expr(sub, t), Option.map(sub.expr(sub), f), ) | PExpWhile(c, e) => - while_(~loc, ~attributes, sub.expr(sub, c), sub.expr(sub, e)) + while_( + ~loc, + ~core_loc, + ~attributes, + sub.expr(sub, c), + sub.expr(sub, e), + ) | PExpFor(i, c, inc, e) => for_( ~loc, + ~core_loc, ~attributes, Option.map(sub.expr(sub), i), Option.map(sub.expr(sub), c), Option.map(sub.expr(sub), inc), sub.expr(sub, e), ) - | PExpContinue => continue(~loc, ~attributes, ()) - | PExpBreak => break(~loc, ~attributes, ()) + | PExpContinue => continue(~loc, ~core_loc, ~attributes, ()) + | PExpBreak => break(~loc, ~core_loc, ~attributes, ()) | PExpReturn(e) => - return(~loc, ~attributes, Option.map(sub.expr(sub), e)) + return(~loc, ~core_loc, ~attributes, Option.map(sub.expr(sub), e)) | PExpLambda(pl, e) => lambda( ~loc, + ~core_loc, ~attributes, List.map( arg => @@ -148,6 +237,7 @@ module E = { | PExpApp(e, el) => apply( ~loc, + ~core_loc, ~attributes, sub.expr(sub, e), List.map( @@ -163,6 +253,7 @@ module E = { | PExpConstruct(id, e) => construct( ~loc, + ~core_loc, ~attributes, map_identifier(sub, id), switch (e) { @@ -173,9 +264,16 @@ module E = { PExpConstrRecord(map_record_fields(sub, es)) }, ) - | PExpBlock(el) => block(~loc, ~attributes, List.map(sub.expr(sub), el)) + | PExpBlock(el) => + block(~loc, ~core_loc, ~attributes, List.map(sub.expr(sub), el)) | PExpConstraint(e, t) => - constraint_(~loc, ~attributes, sub.expr(sub, e), sub.typ(sub, t)) + constraint_( + ~loc, + ~core_loc, + ~attributes, + sub.expr(sub, e), + sub.typ(sub, t), + ) | PExpUse(id, u) => let u = switch (u) { @@ -215,7 +313,7 @@ module E = { ) | PUseAll => PUseAll }; - use(~loc, ~attributes, map_identifier(sub, id), u); + use(~loc, ~core_loc, ~attributes, map_identifier(sub, id), u); }; }; }; @@ -228,6 +326,20 @@ module P = { | PPatAny => any(~loc, ()) | PPatVar(sl) => var(~loc, map_loc(sub, sl)) | PPatTuple(pl) => tuple(~loc, List.map(sub.pat(sub), pl)) + | PPatList(pl) => + list( + ~loc, + List.map( + item => { + switch (item) { + | ListItem(p) => ListItem(sub.pat(sub, p)) + | ListSpread(p, loc) => + ListSpread(sub.pat(sub, p), sub.location(sub, loc)) + } + }, + pl, + ), + ) | PPatArray(pl) => array(~loc, List.map(sub.pat(sub), pl)) | PPatRecord(fs, c) => record( @@ -482,23 +594,49 @@ module VD = { }; module TL = { - let map = (sub, {ptop_desc: desc, ptop_attributes: attrs, ptop_loc: loc}) => { + let map = + ( + sub, + { + ptop_desc: desc, + ptop_attributes: attrs, + ptop_loc: loc, + ptop_core_loc: core_loc, + }, + ) => { open Toplevel; let loc = sub.location(sub, loc); + let core_loc = sub.location(sub, core_loc); let attributes = List.map( - ((attr, args)) => - (map_loc(sub, attr), List.map(map_loc(sub), args)), + ({Asttypes.attr_name, attr_args, attr_loc}) => + { + Asttypes.attr_name: map_loc(sub, attr_name), + attr_args: List.map(map_loc(sub), attr_args), + attr_loc: sub.location(sub, attr_loc), + }, attrs, ); switch (desc) { | PTopInclude(decls) => - Toplevel.include_(~loc, ~attributes, sub.include_(sub, decls)) + Toplevel.include_( + ~loc, + ~core_loc, + ~attributes, + sub.include_(sub, decls), + ) | PTopForeign(e, d) => - Toplevel.foreign(~loc, ~attributes, e, sub.value_description(sub, d)) + Toplevel.foreign( + ~loc, + ~core_loc, + ~attributes, + e, + sub.value_description(sub, d), + ) | PTopPrimitive(e, d) => Toplevel.primitive( ~loc, + ~core_loc, ~attributes, e, sub.primitive_description(sub, d), @@ -506,12 +644,17 @@ module TL = { | PTopData(dd) => Toplevel.data( ~loc, + ~core_loc, ~attributes, - List.map(((e, d)) => (e, sub.data(sub, d)), dd), + List.map( + ((e, d, l)) => (e, sub.data(sub, d), sub.location(sub, l)), + dd, + ), ) | PTopLet(e, r, m, vb) => Toplevel.let_( ~loc, + ~core_loc, ~attributes, e, r, @@ -521,20 +664,23 @@ module TL = { | PTopModule(e, d) => Toplevel.module_( ~loc, + ~core_loc, ~attributes, e, {...d, pmod_stmts: List.map(sub.toplevel(sub), d.pmod_stmts)}, ) - | PTopExpr(e) => Toplevel.expr(~loc, ~attributes, sub.expr(sub, e)) + | PTopExpr(e) => + Toplevel.expr(~loc, ~core_loc, ~attributes, sub.expr(sub, e)) | PTopException(e, d) => Toplevel.grain_exception( ~loc, + ~core_loc, ~attributes, e, sub.grain_exception(sub, d), ) | PTopProvide(ex) => - Toplevel.provide(~loc, ~attributes, sub.provide(sub, ex)) + Toplevel.provide(~loc, ~core_loc, ~attributes, sub.provide(sub, ex)) }; }; }; diff --git a/compiler/src/parsing/asttypes.re b/compiler/src/parsing/asttypes.re index 01eec717c1..e91d1ad833 100644 --- a/compiler/src/parsing/asttypes.re +++ b/compiler/src/parsing/asttypes.re @@ -111,7 +111,11 @@ let mknoloc = Location.mknoloc; /** Addtional expression information that may affect compilation. */ [@deriving (sexp, yojson)] -type attribute = (loc(string), list(loc(string))); +type attribute = { + attr_name: loc(string), + attr_args: list(loc(string)), + attr_loc: Location.t, +}; [@deriving (sexp, yojson)] type attributes = list(attribute); diff --git a/compiler/src/parsing/location.re b/compiler/src/parsing/location.re index aff08d4e83..b741042d9c 100644 --- a/compiler/src/parsing/location.re +++ b/compiler/src/parsing/location.re @@ -435,7 +435,7 @@ let mkloc = (txt, loc) => {txt, loc}; let mknoloc = txt => mkloc(txt, dummy_loc); type error = { - loc: t, + error_loc: t, msg: string, sub: list(error), if_highlight: string /* alternative message if locations are highlighted */ @@ -468,12 +468,12 @@ let print_phantom_error_prefix = ppf => let errorf = (~loc=dummy_loc, ~sub=[], ~if_highlight="", fmt) => pp_ksprintf( ~before=print_phantom_error_prefix, - msg => {loc, msg, sub, if_highlight}, + msg => {error_loc: loc, msg, sub, if_highlight}, fmt, ); let error = (~loc=dummy_loc, ~sub=[], ~if_highlight="", msg) => { - loc, + error_loc: loc, msg, sub, if_highlight, @@ -504,8 +504,8 @@ let error_of_exn = exn => loop(error_of_exn^); }; -let rec default_error_reporter = (ppf, {loc, msg, sub, if_highlight}) => { - fprintf(ppf, "@[%a %s", print_error, loc, msg); +let rec default_error_reporter = (ppf, {error_loc, msg, sub, if_highlight}) => { + fprintf(ppf, "@[%a %s", print_error, error_loc, msg); List.iter(Format.fprintf(ppf, "@,@[<2>%a@]", default_error_reporter), sub); fprintf(ppf, "@]"); }; @@ -565,5 +565,5 @@ let () = let raise_errorf = (~loc=dummy_loc, ~sub=[], ~if_highlight="") => pp_ksprintf(~before=print_phantom_error_prefix, msg => - raise(Error({loc, msg, sub, if_highlight})) + raise(Error({error_loc: loc, msg, sub, if_highlight})) ); diff --git a/compiler/src/parsing/location.rei b/compiler/src/parsing/location.rei index 913e3fe54f..9634c078c4 100644 --- a/compiler/src/parsing/location.rei +++ b/compiler/src/parsing/location.rei @@ -103,7 +103,7 @@ let mkloc: ('a, t) => loc('a); /** Support for located errors */; type error = { - loc: t, + error_loc: t, msg: string, sub: list(error), if_highlight: string /* alternative message if locations are highlighted */ diff --git a/compiler/src/parsing/parser.mly b/compiler/src/parsing/parser.mly index ee967be990..8e1c13058f 100644 --- a/compiler/src/parsing/parser.mly +++ b/compiler/src/parsing/parser.mly @@ -245,11 +245,11 @@ non_stmt_expr: annotated_expr: | non_binop_expr %prec COLON { $1 } - | non_binop_expr colon typ { Expression.constraint_ ~loc:(to_loc $loc) $1 $3 } + | non_binop_expr colon typ { Expression.constraint_ ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 $3 } binop_expr: - | non_stmt_expr infix_op opt_eols non_stmt_expr { Expression.binop ~loc:(to_loc $loc) (mkid_expr $loc($2) [mkstr $loc($2) $2]) $1 $4 } - | non_stmt_expr rcaret_rcaret_op opt_eols non_stmt_expr %prec INFIX_100 { Expression.binop ~loc:(to_loc $loc) (mkid_expr $loc($2) [mkstr $loc($2) $2]) $1 $4 } + | non_stmt_expr infix_op opt_eols non_stmt_expr { Expression.binop ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) (mkid_expr $loc($2) [mkstr $loc($2) $2]) $1 $4 } + | non_stmt_expr rcaret_rcaret_op opt_eols non_stmt_expr %prec INFIX_100 { Expression.binop ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) (mkid_expr $loc($2) [mkstr $loc($2) $2]) $1 $4 } ellipsis_prefix(X): | ELLIPSIS X {$2} @@ -354,7 +354,7 @@ use_shape: | lbrace use_items? rbrace { PUseItems (Option.value ~default:[] $2) } use_stmt: - | FROM qualified_uid USE use_shape { Expression.use ~loc:(to_loc $loc) $2 $4 } + | FROM qualified_uid USE use_shape { Expression.use ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $2 $4 } include_alias: | AS opt_eols qualified_uid { make_module_alias $3 } @@ -363,9 +363,9 @@ include_stmt: | INCLUDE file_path include_alias? { IncludeDeclaration.mk ~loc:(to_loc $loc) $2 $3 } data_declaration_stmt: - | ABSTRACT data_declaration { (Abstract, $2) } - | PROVIDE data_declaration { (Provided, $2) } - | data_declaration { (NotProvided, $1) } + | ABSTRACT data_declaration { (Abstract, $2, to_loc($loc)) } + | PROVIDE data_declaration { (Provided, $2, to_loc($loc)) } + | data_declaration { (NotProvided, $1, to_loc($loc)) } data_declaration_stmts: | separated_nonempty_list(AND, data_declaration_stmt) { $1 } @@ -383,15 +383,15 @@ provide_shape: | lbrace provide_items? rbrace { Option.value ~default:[] $2 } provide_stmt: - | attributes PROVIDE LET REC value_binds { Toplevel.let_ ~loc:(to_loc $sloc) ~attributes:$1 Provided Recursive Immutable $5 } - | attributes PROVIDE LET value_binds { Toplevel.let_ ~loc:(to_loc $sloc) ~attributes:$1 Provided Nonrecursive Immutable $4 } - | attributes PROVIDE LET REC MUT value_binds { Toplevel.let_ ~loc:(to_loc $sloc) ~attributes:$1 Provided Recursive Mutable $6 } - | attributes PROVIDE LET MUT value_binds { Toplevel.let_ ~loc:(to_loc $sloc) ~attributes:$1 Provided Nonrecursive Mutable $5 } - | attributes PROVIDE foreign_stmt { Toplevel.foreign ~loc:(to_loc $sloc) ~attributes:$1 Provided $3 } - | attributes PROVIDE primitive_stmt { Toplevel.primitive ~loc:(to_loc $sloc) ~attributes:$1 Provided $3 } - | attributes PROVIDE exception_stmt { Toplevel.grain_exception ~loc:(to_loc $sloc) ~attributes:$1 Provided $3 } - | attributes PROVIDE provide_shape { Toplevel.provide ~loc:(to_loc $sloc) ~attributes:$1 $3 } - | attributes PROVIDE module_stmt { Toplevel.module_ ~loc:(to_loc $loc) ~attributes:$1 Provided $3 } + | attributes PROVIDE LET REC value_binds { Toplevel.let_ ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 Provided Recursive Immutable $5 } + | attributes PROVIDE LET value_binds { Toplevel.let_ ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 Provided Nonrecursive Immutable $4 } + | attributes PROVIDE LET REC MUT value_binds { Toplevel.let_ ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 Provided Recursive Mutable $6 } + | attributes PROVIDE LET MUT value_binds { Toplevel.let_ ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 Provided Nonrecursive Mutable $5 } + | attributes PROVIDE foreign_stmt { Toplevel.foreign ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 Provided $3 } + | attributes PROVIDE primitive_stmt { Toplevel.primitive ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 Provided $3 } + | attributes PROVIDE exception_stmt { Toplevel.grain_exception ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 Provided $3 } + | attributes PROVIDE provide_shape { Toplevel.provide ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 $3 } + | attributes PROVIDE module_stmt { Toplevel.module_ ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 Provided $3 } data_constructor: | UIDENT { ConstructorDeclaration.singleton ~loc:(to_loc $loc) (mkstr $loc($1) $1) } @@ -426,7 +426,7 @@ data_declaration: | RECORD rec_flag? UIDENT id_vec? data_record_body { DataDeclaration.record ~loc:(to_loc $loc) ?rec_flag:$2 (mkstr $loc($3) $3) (Option.value ~default:[] $4) $5 } unop_expr: - | prefix_op non_assign_expr { Expression.apply ~loc:(to_loc $loc) (mkid_expr $loc($1) [mkstr $loc($1) $1]) [{paa_label=Unlabeled; paa_expr=$2; paa_loc=(to_loc $loc($2))}] } + | prefix_op non_assign_expr { Expression.apply ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) (mkid_expr $loc($1) [mkstr $loc($1) $1]) [{paa_label=Unlabeled; paa_expr=$2; paa_loc=(to_loc $loc($2))}] } paren_expr: | lparen expr rparen { $2 } @@ -436,15 +436,15 @@ app_arg: | id_str EQUAL expr { {paa_label=(Labeled $1); paa_expr=$3; paa_loc=to_loc $loc} } app_expr: - | left_accessor_expr lparen lseparated_list(comma, app_arg) comma? rparen { Expression.apply ~loc:(to_loc $loc) $1 $3 } + | left_accessor_expr lparen lseparated_list(comma, app_arg) comma? rparen { Expression.apply ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 $3 } rcaret_rcaret_op: | lnonempty_list(RCARET) RCARET { (String.init (1 + List.length $1) (fun _ -> '>')) } construct_expr: - | qualified_uid lparen lseparated_list(comma, expr) comma? rparen { Expression.tuple_construct ~loc:(to_loc $loc) $1 $3 } - | qualified_uid lbrace lseparated_nonempty_list(comma, record_field) comma? rbrace { Expression.record_construct ~loc:(to_loc $loc) $1 $3 } - | qualified_uid %prec LPAREN { Expression.singleton_construct ~loc:(to_loc $loc) $1 } + | qualified_uid lparen lseparated_list(comma, expr) comma? rparen { Expression.tuple_construct ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 $3 } + | qualified_uid lbrace lseparated_nonempty_list(comma, record_field) comma? rbrace { Expression.record_construct ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 $3 } + | qualified_uid %prec LPAREN { Expression.singleton_construct ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 } // These are all inlined to carry over their precedence. %inline infix_op: @@ -497,19 +497,19 @@ uid: id_expr: // Force any following colon to cause a shift - | qualified_lid %prec COLON { Expression.ident ~loc:(to_loc $loc) $1 } + | qualified_lid %prec COLON { Expression.ident ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 } simple_expr: - | const { Expression.constant ~loc:(to_loc (snd $1)) (fst $1) } - | lparen tuple_exprs rparen { Expression.tuple ~loc:(to_loc $loc) $2 } + | const { Expression.constant ~loc:(to_loc (snd $1)) ~core_loc:(to_loc (snd $1)) (fst $1) } + | lparen tuple_exprs rparen { Expression.tuple ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $2 } | id_expr { $1 } braced_expr: - | lbrace block_body rbrace { Expression.block ~loc:(to_loc $loc) $2 } - | lbrace record_exprs rbrace { Expression.record_fields ~loc:(to_loc $loc) $2 } + | lbrace block_body rbrace { Expression.block ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $2 } + | lbrace record_exprs rbrace { Expression.record_fields ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $2 } block: - | lbrace block_body rbrace { Expression.block ~loc:(to_loc $loc) $2 } + | lbrace block_body rbrace { Expression.block ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $2 } arg_default: | EQUAL non_stmt_expr { $2 } @@ -521,8 +521,8 @@ lam_args: | lseparated_nonempty_list(comma, lam_arg) comma? { $1 } lam_expr: - | FUN lparen lam_args? rparen thickarrow expr { Expression.lambda ~loc:(to_loc $loc) (Option.value ~default:[] $3) $6 } - | FUN LIDENT thickarrow expr { Expression.lambda ~loc:(to_loc $loc) [LambdaArgument.mk ~loc:(to_loc $loc($2)) (Pattern.var ~loc:(to_loc $loc($2)) (mkstr $loc($2) $2)) None] $4 } + | FUN lparen lam_args? rparen thickarrow expr { Expression.lambda ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) (Option.value ~default:[] $3) $6 } + | FUN LIDENT thickarrow expr { Expression.lambda ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) [LambdaArgument.mk ~loc:(to_loc $loc($2)) (Pattern.var ~loc:(to_loc $loc($2)) (mkstr $loc($2) $2)) None] $4 } attribute_argument: | STRING { mkstr $loc $1 } @@ -531,32 +531,32 @@ attribute_arguments: | lparen lseparated_list(comma, attribute_argument) rparen { $2 } attribute: - | AT id_str loption(attribute_arguments) opt_eols { $2, $3 } + | AT id_str loption(attribute_arguments) opt_eols { {attr_name=$2; attr_args=$3; attr_loc=(to_loc $loc)} } attributes: | attribute* { $1 } let_expr: - | attributes LET REC value_binds { Expression.let_ ~loc:(to_loc $sloc) ~attributes:$1 Recursive Immutable $4 } - | attributes LET value_binds { Expression.let_ ~loc:(to_loc $sloc) ~attributes:$1 Nonrecursive Immutable $3 } - | attributes LET REC MUT value_binds { Expression.let_ ~loc:(to_loc $sloc) ~attributes:$1 Recursive Mutable $5 } - | attributes LET MUT value_binds { Expression.let_ ~loc:(to_loc $sloc) ~attributes:$1 Nonrecursive Mutable $4 } + | attributes LET REC value_binds { Expression.let_ ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 Recursive Immutable $4 } + | attributes LET value_binds { Expression.let_ ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 Nonrecursive Immutable $3 } + | attributes LET REC MUT value_binds { Expression.let_ ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 Recursive Mutable $5 } + | attributes LET MUT value_binds { Expression.let_ ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 Nonrecursive Mutable $4 } %inline else_expr: | ELSE opt_eols expr { $3 } if_expr: - | IF lparen expr rparen opt_eols expr ioption(else_expr) %prec _if { Expression.if_ ~loc:(to_loc $loc) $3 $6 $7 } + | IF lparen expr rparen opt_eols expr ioption(else_expr) %prec _if { Expression.if_ ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $3 $6 $7 } while_expr: - | WHILE lparen expr rparen block { Expression.while_ ~loc:(to_loc $loc) $3 $5 } + | WHILE lparen expr rparen block { Expression.while_ ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $3 $5 } for_inner_expr: | %prec EOL { None } | expr { Some $1 } for_expr: - | FOR lparen block_body_expr? opt_eols SEMI opt_eols for_inner_expr opt_eols SEMI opt_eols for_inner_expr rparen block { Expression.for_ ~loc:(to_loc $loc) $3 $7 $11 $13 } + | FOR lparen block_body_expr? opt_eols SEMI opt_eols for_inner_expr opt_eols SEMI opt_eols for_inner_expr rparen block { Expression.for_ ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $3 $7 $11 $13 } when_guard: | opt_eols WHEN expr { $3 } @@ -568,37 +568,37 @@ match_branches: | lseparated_nonempty_list(comma, match_branch) comma? { $1 } match_expr: - | MATCH lparen expr rparen lbrace match_branches rbrace { Expression.match_ ~loc:(to_loc $loc) $3 $6 } + | MATCH lparen expr rparen lbrace match_branches rbrace { Expression.match_ ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $3 (mkloc $6 (to_loc (fst $loc($5), snd $loc($7)))) } list_item: | ELLIPSIS expr { ListSpread ($2, to_loc $loc) } | expr { ListItem $1 } list_expr: - | lbrack rbrack { Expression.list ~loc:(to_loc $loc) [] } - | lbrack lseparated_nonempty_list(comma, list_item) comma? rbrack { Expression.list ~loc:(to_loc $loc) $2 } + | lbrack rbrack { Expression.list ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) [] } + | lbrack lseparated_nonempty_list(comma, list_item) comma? rbrack { Expression.list ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $2 } array_expr: - | lbrackrcaret rbrack { Expression.array ~loc:(to_loc $loc) [] } - | lbrackrcaret opt_eols lseparated_nonempty_list(comma, expr) comma? rbrack { Expression.array ~loc:(to_loc $loc) $3 } + | lbrackrcaret rbrack { Expression.array ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) [] } + | lbrackrcaret opt_eols lseparated_nonempty_list(comma, expr) comma? rbrack { Expression.array ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $3 } stmt_expr: - | THROW expr { Expression.apply ~loc:(to_loc $loc) (mkid_expr $loc($1) [mkstr $loc($1) "throw"]) [{paa_label=Unlabeled; paa_expr=$2; paa_loc=(to_loc $loc($2))}] } - | ASSERT expr { Expression.apply ~loc:(to_loc $loc) (mkid_expr $loc($1) [mkstr $loc($1) "assert"]) [{paa_label=Unlabeled; paa_expr=$2; paa_loc=(to_loc $loc($2))}] } - | FAIL expr { Expression.apply ~loc:(to_loc $loc) (mkid_expr $loc($1) [mkstr $loc($1) "fail"]) [{paa_label=Unlabeled; paa_expr=$2; paa_loc=(to_loc $loc($2))}] } + | THROW expr { Expression.apply ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) (mkid_expr $loc($1) [mkstr $loc($1) "throw"]) [{paa_label=Unlabeled; paa_expr=$2; paa_loc=(to_loc $loc($2))}] } + | ASSERT expr { Expression.apply ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) (mkid_expr $loc($1) [mkstr $loc($1) "assert"]) [{paa_label=Unlabeled; paa_expr=$2; paa_loc=(to_loc $loc($2))}] } + | FAIL expr { Expression.apply ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) (mkid_expr $loc($1) [mkstr $loc($1) "fail"]) [{paa_label=Unlabeled; paa_expr=$2; paa_loc=(to_loc $loc($2))}] } // allow DASH to cause a shift instead of the usual reduction of the left side for subtraction - | RETURN ioption(expr) %prec _below_infix { Expression.return ~loc:(to_loc $loc) $2 } - | CONTINUE { Expression.continue ~loc:(to_loc $loc) () } - | BREAK { Expression.break ~loc:(to_loc $loc) () } + | RETURN ioption(expr) %prec _below_infix { Expression.return ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $2 } + | CONTINUE { Expression.continue ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) () } + | BREAK { Expression.break ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) () } | use_stmt { $1 } assign_binop_op: | INFIX_ASSIGNMENT_10 { mkstr $loc $1 } assign_expr: - | left_accessor_expr GETS opt_eols expr { Expression.box_assign ~loc:(to_loc $loc) $1 $4 } - | id_expr equal expr { Expression.assign ~loc:(to_loc $loc) $1 $3 } - | id_expr assign_binop_op opt_eols expr { Expression.assign ~loc:(to_loc $loc) $1 (Expression.apply ~loc:(to_loc $loc) (mkid_expr $loc($2) [$2]) [{paa_label=Unlabeled; paa_expr=$1; paa_loc=(to_loc $loc($1))}; {paa_label=Unlabeled; paa_expr=$4; paa_loc=(to_loc $loc($4))}]) } + | left_accessor_expr GETS opt_eols expr { Expression.box_assign ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 $4 } + | id_expr equal expr { Expression.assign ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 $3 } + | id_expr assign_binop_op opt_eols expr { Expression.assign ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 (Expression.apply ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) (mkid_expr $loc($2) [$2]) [{paa_label=Unlabeled; paa_expr=$1; paa_loc=(to_loc $loc($1))}; {paa_label=Unlabeled; paa_expr=$4; paa_loc=(to_loc $loc($4))}]) } | record_set { $1 } | array_set { $1 } @@ -632,24 +632,24 @@ tuple_exprs: | expr COMMA tuple_expr_ending { $1::$3 } array_get: - | left_accessor_expr lbrack expr rbrack { Expression.array_get ~loc:(to_loc $loc) $1 $3 } + | left_accessor_expr lbrack expr rbrack { Expression.array_get ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 $3 } array_set: - | left_accessor_expr lbrack expr rbrack equal expr { Expression.array_set ~loc:(to_loc $loc) $1 $3 $6 } - | left_accessor_expr lbrack expr rbrack assign_binop_op expr { Expression.array_set ~loc:(to_loc $loc) $1 $3 (Expression.apply ~loc:(to_loc $loc) (mkid_expr $loc($5) [$5]) [{paa_label=Unlabeled; paa_expr=Expression.array_get ~loc:(to_loc $loc) $1 $3; paa_loc=(to_loc $loc($6))}; {paa_label=Unlabeled; paa_expr=$6; paa_loc=(to_loc $loc($6))}]) } + | left_accessor_expr lbrack expr rbrack equal expr { Expression.array_set ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 $3 $6 } + | left_accessor_expr lbrack expr rbrack assign_binop_op expr { Expression.array_set ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 $3 (Expression.apply ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) (mkid_expr $loc($5) [$5]) [{paa_label=Unlabeled; paa_expr=Expression.array_get ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 $3; paa_loc=(to_loc $loc($6))}; {paa_label=Unlabeled; paa_expr=$6; paa_loc=(to_loc $loc($6))}]) } record_get: - | left_accessor_expr dot lid { Expression.record_get ~loc:(to_loc $loc) $1 $3 } + | left_accessor_expr dot lid { Expression.record_get ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 $3 } record_set: - | left_accessor_expr dot lid equal expr { Expression.record_set ~loc:(to_loc $loc) $1 $3 $5 } - | left_accessor_expr dot lid assign_binop_op opt_eols expr { Expression.record_set ~loc:(to_loc $loc) $1 $3 (Expression.apply ~loc:(to_loc $loc) (mkid_expr $loc($4) [$4]) [{paa_label=Unlabeled; paa_expr=Expression.record_get ~loc:(to_loc $loc) $1 $3; paa_loc=(to_loc $loc($6))}; {paa_label=Unlabeled; paa_expr=$6; paa_loc=(to_loc $loc($6))}]) } + | left_accessor_expr dot lid equal expr { Expression.record_set ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 $3 $5 } + | left_accessor_expr dot lid assign_binop_op opt_eols expr { Expression.record_set ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 $3 (Expression.apply ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) (mkid_expr $loc($4) [$4]) [{paa_label=Unlabeled; paa_expr=Expression.record_get ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 $3; paa_loc=(to_loc $loc($6))}; {paa_label=Unlabeled; paa_expr=$6; paa_loc=(to_loc $loc($6))}]) } %inline record_field_value: | colon expr {$2} punned_record_field: - | qualified_lid { RecordItem ($1, (Expression.ident ~loc:(to_loc $loc) $1)) } + | qualified_lid { RecordItem ($1, (Expression.ident ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1)) } non_punned_record_field: | qualified_lid record_field_value { RecordItem ($1, $2) } @@ -700,18 +700,18 @@ module_stmt: | MODULE UIDENT lbrace toplevel_stmts RBRACE { ModuleDeclaration.mk ~loc:(to_loc $loc) (mkstr $loc($2) $2) $4 } toplevel_stmt: - | attributes LET REC value_binds { Toplevel.let_ ~loc:(to_loc $sloc) ~attributes:$1 NotProvided Recursive Immutable $4 } - | attributes LET value_binds { Toplevel.let_ ~loc:(to_loc $sloc) ~attributes:$1 NotProvided Nonrecursive Immutable $3 } - | attributes LET REC MUT value_binds { Toplevel.let_ ~loc:(to_loc $sloc) ~attributes:$1 NotProvided Recursive Mutable $5 } - | attributes LET MUT value_binds { Toplevel.let_ ~loc:(to_loc $sloc) ~attributes:$1 NotProvided Nonrecursive Mutable $4 } - | attributes data_declaration_stmts { Toplevel.data ~loc:(to_loc $sloc) ~attributes:$1 $2 } - | attributes foreign_stmt { Toplevel.foreign ~loc:(to_loc $loc) ~attributes:$1 NotProvided $2 } - | attributes include_stmt { Toplevel.include_ ~loc:(to_loc $loc) ~attributes:$1 $2 } - | attributes module_stmt { Toplevel.module_ ~loc:(to_loc $loc) ~attributes:$1 NotProvided $2 } - | attributes primitive_stmt { Toplevel.primitive ~loc:(to_loc $loc) ~attributes:$1 NotProvided $2 } - | expr { Toplevel.expr ~loc:(to_loc $loc) $1 } + | attributes LET REC value_binds { Toplevel.let_ ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 NotProvided Recursive Immutable $4 } + | attributes LET value_binds { Toplevel.let_ ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 NotProvided Nonrecursive Immutable $3 } + | attributes LET REC MUT value_binds { Toplevel.let_ ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 NotProvided Recursive Mutable $5 } + | attributes LET MUT value_binds { Toplevel.let_ ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 NotProvided Nonrecursive Mutable $4 } + | attributes data_declaration_stmts { Toplevel.data ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 $2 } + | attributes foreign_stmt { Toplevel.foreign ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 NotProvided $2 } + | attributes include_stmt { Toplevel.include_ ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 $2 } + | attributes module_stmt { Toplevel.module_ ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 NotProvided $2 } + | attributes primitive_stmt { Toplevel.primitive ~loc:(to_loc $sloc) ~core_loc:(to_loc (fst $loc($2), snd $loc)) ~attributes:$1 NotProvided $2 } + | expr { Toplevel.expr ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) $1 } | provide_stmt { $1 } - | exception_stmt { Toplevel.grain_exception ~loc:(to_loc $loc) NotProvided $1 } + | exception_stmt { Toplevel.grain_exception ~loc:(to_loc $loc) ~core_loc:(to_loc $loc) NotProvided $1 } toplevel_stmts: | lseparated_nonempty_list(eos, toplevel_stmt) eos? { $1 } @@ -720,5 +720,5 @@ module_header: | MODULE UIDENT { mkstr $loc($2) $2 } program: - | opt_eols module_header eos toplevel_stmts EOF { make_program $2 $4 } - | opt_eols module_header eos? EOF { make_program $2 [] } + | opt_eols module_header eos toplevel_stmts EOF { make_program ~loc:(to_loc $sloc) $2 $4 } + | opt_eols module_header eos? EOF { make_program ~loc:(to_loc $sloc) $2 [] } diff --git a/compiler/src/parsing/parser_header.re b/compiler/src/parsing/parser_header.re index d9972f87c8..810286969b 100644 --- a/compiler/src/parsing/parser_header.re +++ b/compiler/src/parsing/parser_header.re @@ -4,10 +4,6 @@ open Parsetree; open Ast_helper; open Grain_utils; -/* Used for error messages. */ -let first_loc = ref(Location.dummy_loc); -let last_loc = ref(Location.dummy_loc); - let make_line_comment = (source, loc) => { let content = String_utils.slice(~first=2, source) |> String.trim; Line({cmt_content: content, cmt_source: source, cmt_loc: loc}); @@ -35,9 +31,7 @@ let make_doc_comment = (source, loc) => { }; let to_loc = ((loc_start, loc_end)) => { - let ret = {loc_start, loc_end, loc_ghost: false}; - last_loc := ret; - ret; + {loc_start, loc_end, loc_ghost: false}; }; let fix_tyvar_mapper = super => { @@ -89,8 +83,10 @@ let mkid = ns => { mkloc @@ help(ns); }; -let mkid_expr = (loc, ns) => - Expression.ident(~loc=to_loc(loc), mkid(ns, to_loc(loc))); +let mkid_expr = (loc, ns) => { + let loc = to_loc(loc); + Expression.ident(~loc, ~core_loc=loc, mkid(ns, loc)); +}; let mkstr = (loc, s) => mkloc(s, to_loc(loc)); @@ -107,16 +103,15 @@ let make_module_alias = ident => { }; }; -let make_program = (module_name, statements) => { - let prog_loc = { - loc_start: first_loc^.loc_end, - loc_end: last_loc^.loc_end, - loc_ghost: false, - }; +let make_program = (~loc, module_name, statements) => { + // Ensure the program loc starts at the beginning of the file even if + // there's whitespace or comments + let loc_start = {...loc.loc_start, pos_lnum: 1, pos_cnum: 0, pos_bol: 0}; + let prog_loc = {...loc, loc_start}; + fix_blocks({module_name, statements, comments: [], prog_loc}); }; let parse_program = (program, token, lexbuf) => { - first_loc := Location.curr(lexbuf); program(token); }; diff --git a/compiler/src/parsing/parsetree.re b/compiler/src/parsing/parsetree.re index a73c1e608e..afc13f44a1 100644 --- a/compiler/src/parsing/parsetree.re +++ b/compiler/src/parsing/parsetree.re @@ -154,6 +154,16 @@ and number_type = | PConstNumberFloat(string) | PConstNumberRational(string, string); +[@deriving (sexp, yojson)] +type list_item('a) = + | ListItem('a) + | ListSpread('a, Location.t); + +[@deriving (sexp, yojson)] +type record_item('a) = + | RecordItem(loc(Identifier.t), 'a) + | RecordSpread('a, Location.t); + /** Various binding forms */ [@deriving (sexp, yojson)] @@ -161,6 +171,7 @@ type pattern_desc = | PPatAny | PPatVar(loc(string)) | PPatTuple(list(pattern)) + | PPatList(list(list_item(pattern))) | PPatArray(list(pattern)) | PPatRecord(list((loc(Identifier.t), pattern)), closed_flag) | PPatConstant(constant) @@ -493,7 +504,9 @@ type expression = { pexp_desc: expression_desc, pexp_attributes: attributes, [@sexp_drop_if sexp_locs_disabled] - pexp_loc: Location.t, + pexp_loc: Location.t, // The full location, including attributes + [@sexp_drop_if sexp_locs_disabled] + pexp_core_loc: Location.t // The core expression location, without attributes } [@deriving (sexp, yojson)] @@ -501,6 +514,7 @@ and expression_desc = | PExpId(loc(Identifier.t)) | PExpConstant(constant) | PExpTuple(list(expression)) + | PExpList(list(list_item(expression))) | PExpArray(list(expression)) | PExpArrayGet(expression, expression) | PExpArraySet(expression, expression, expression) @@ -508,7 +522,7 @@ and expression_desc = | PExpRecordGet(expression, loc(Identifier.t)) | PExpRecordSet(expression, loc(Identifier.t), expression) | PExpLet(rec_flag, mut_flag, list(value_binding)) - | PExpMatch(expression, list(match_branch)) + | PExpMatch(expression, loc(list(match_branch))) | PExpPrim0(prim0) | PExpPrim1(prim1, expression) | PExpPrim2(prim2, expression, expression) @@ -639,7 +653,7 @@ and toplevel_stmt_desc = | PTopForeign(provide_flag, value_description) | PTopPrimitive(provide_flag, primitive_description) | PTopModule(provide_flag, module_declaration) - | PTopData(list((provide_flag, data_declaration))) + | PTopData(list((provide_flag, data_declaration, Location.t))) | PTopLet(provide_flag, rec_flag, mut_flag, list(value_binding)) | PTopExpr(expression) | PTopException(provide_flag, type_exception) @@ -650,7 +664,9 @@ and toplevel_stmt = { ptop_desc: toplevel_stmt_desc, ptop_attributes: attributes, [@sexp_drop_if sexp_locs_disabled] - ptop_loc: Location.t, + ptop_loc: Location.t, // The full location, including attributes + [@sexp_drop_if sexp_locs_disabled] + ptop_core_loc: Location.t // The core location, without attributes }; [@deriving (sexp, yojson)] diff --git a/compiler/src/parsing/parsetree_iter.re b/compiler/src/parsing/parsetree_iter.re index d54ad14c42..fcb6d9e3c8 100644 --- a/compiler/src/parsing/parsetree_iter.re +++ b/compiler/src/parsing/parsetree_iter.re @@ -35,8 +35,10 @@ type hooks = { leave_let: (rec_flag, mut_flag, list(value_binding)) => unit, enter_value_binding: value_binding => unit, leave_value_binding: value_binding => unit, - enter_data_declarations: list((provide_flag, data_declaration)) => unit, - leave_data_declarations: list((provide_flag, data_declaration)) => unit, + enter_data_declarations: + list((provide_flag, data_declaration, Location.t)) => unit, + leave_data_declarations: + list((provide_flag, data_declaration, Location.t)) => unit, enter_data_declaration: data_declaration => unit, leave_data_declaration: data_declaration => unit, }; @@ -70,10 +72,12 @@ let iter_ident = (hooks, id) => { iter(id.txt); }; -let iter_attribute = (hooks, (attr_name, attr_args) as attr) => { +let iter_attribute = + (hooks, {Asttypes.attr_name, attr_args, attr_loc} as attr) => { hooks.enter_attribute(attr); iter_loc(hooks, attr_name); List.iter(iter_loc(hooks), attr_args); + iter_location(hooks, attr_loc); hooks.leave_attribute(attr); }; @@ -92,9 +96,18 @@ and iter_toplevel_stmts = (hooks, stmts) => { } and iter_toplevel_stmt = - (hooks, {ptop_desc: desc, ptop_attributes: attrs, ptop_loc: loc} as top) => { + ( + hooks, + { + ptop_desc: desc, + ptop_attributes: attrs, + ptop_loc: loc, + ptop_core_loc: core_loc, + } as top, + ) => { hooks.enter_toplevel_stmt(top); iter_location(hooks, loc); + iter_location(hooks, core_loc); iter_attributes(hooks, attrs); switch (desc) { | PTopInclude(id) => iter_include(hooks, id) @@ -165,7 +178,13 @@ and iter_primitive_description = and iter_data_declarations = (hooks, dds) => { hooks.enter_data_declarations(dds); - List.iter(((_, d)) => iter_data_declaration(hooks, d), dds); + List.iter( + ((_, d, l)) => { + iter_data_declaration(hooks, d); + iter_location(hooks, l); + }, + dds, + ); hooks.leave_data_declarations(dds); } @@ -223,14 +242,35 @@ and iter_expressions = (hooks, es) => { } and iter_expression = - (hooks, {pexp_desc: desc, pexp_attributes: attrs, pexp_loc: loc} as expr) => { + ( + hooks, + { + pexp_desc: desc, + pexp_attributes: attrs, + pexp_loc: loc, + pexp_core_loc: core_loc, + } as expr, + ) => { hooks.enter_expression(expr); iter_location(hooks, loc); + iter_location(hooks, core_loc); iter_attributes(hooks, attrs); switch (desc) { | PExpId(i) => iter_ident(hooks, i) | PExpConstant(c) => iter_constant(hooks, c) | PExpTuple(es) => iter_expressions(hooks, es) + | PExpList(es) => + List.iter( + item => { + switch (item) { + | ListItem(e) => iter_expression(hooks, e) + | ListSpread(e, loc) => + iter_expression(hooks, e); + iter_location(hooks, loc); + } + }, + es, + ) | PExpArray(es) => iter_expressions(hooks, es) | PExpArrayGet(a, i) => iter_expression(hooks, a); @@ -252,7 +292,8 @@ and iter_expression = | PExpLet(r, m, vbs) => iter_let(hooks, r, m, vbs) | PExpMatch(e, mbs) => iter_expression(hooks, e); - List.iter(iter_match_branch(hooks), mbs); + iter_loc(hooks, mbs); + List.iter(iter_match_branch(hooks), mbs.txt); | PExpPrim0(p0) => () | PExpPrim1(p1, e) => iter_expression(hooks, e) | PExpPrim2(p2, e1, e2) => @@ -434,6 +475,18 @@ and iter_pattern = (hooks, {ppat_desc: desc, ppat_loc: loc} as pat) => { | PPatAny => () | PPatVar(sl) => iter_loc(hooks, sl) | PPatTuple(pl) => iter_patterns(hooks, pl) + | PPatList(pl) => + List.iter( + item => { + switch (item) { + | ListItem(p) => iter_pattern(hooks, p) + | ListSpread(p, loc) => + iter_pattern(hooks, p); + iter_location(hooks, loc); + } + }, + pl, + ) | PPatArray(pl) => iter_patterns(hooks, pl) | PPatRecord(fs, _) => iter_record_patterns(hooks, fs) | PPatConstant(c) => iter_constant(hooks, c) diff --git a/compiler/src/parsing/parsetree_iter.rei b/compiler/src/parsing/parsetree_iter.rei index bda1169d12..39fdb965fc 100644 --- a/compiler/src/parsing/parsetree_iter.rei +++ b/compiler/src/parsing/parsetree_iter.rei @@ -35,8 +35,10 @@ type hooks = { leave_let: (rec_flag, mut_flag, list(value_binding)) => unit, enter_value_binding: value_binding => unit, leave_value_binding: value_binding => unit, - enter_data_declarations: list((provide_flag, data_declaration)) => unit, - leave_data_declarations: list((provide_flag, data_declaration)) => unit, + enter_data_declarations: + list((provide_flag, data_declaration, Location.t)) => unit, + leave_data_declarations: + list((provide_flag, data_declaration, Location.t)) => unit, enter_data_declaration: data_declaration => unit, leave_data_declaration: data_declaration => unit, }; diff --git a/compiler/src/parsing/well_formedness.re b/compiler/src/parsing/well_formedness.re index 48513edfd7..a6b646dc34 100644 --- a/compiler/src/parsing/well_formedness.re +++ b/compiler/src/parsing/well_formedness.re @@ -351,7 +351,8 @@ let known_attributes = [ ]; let valid_attributes = (errs, super) => { - let enter_attribute = (({txt, loc}, args) as attr) => { + let enter_attribute = + ({Asttypes.attr_name: {txt, loc}, attr_args: args} as attr) => { switch (List.find_opt(({name}) => name == txt, known_attributes)) { | Some({arity}) when List.length(args) != arity => errs := [InvalidAttributeArity(txt, arity, loc), ...errs^] @@ -372,8 +373,13 @@ let valid_attributes = (errs, super) => { let disallowed_attributes = (errs, super) => { let enter_expression = ({pexp_desc: desc, pexp_attributes: attrs} as e) => { - switch (List.find_opt((({txt}, _)) => txt == "externalName", attrs)) { - | Some(({txt, loc}, _)) => + switch ( + List.find_opt( + ({Asttypes.attr_name: {txt}}) => txt == "externalName", + attrs, + ) + ) { + | Some({Asttypes.attr_name: {txt, loc}}) => errs := [ AttributeDisallowed( @@ -387,8 +393,13 @@ let disallowed_attributes = (errs, super) => { }; let enter_toplevel_stmt = ({ptop_desc: desc, ptop_attributes: attrs} as top) => { - switch (List.find_opt((({txt}, _)) => txt == "externalName", attrs)) { - | Some(({txt, loc}, _)) => + switch ( + List.find_opt( + ({Asttypes.attr_name: {txt}}) => txt == "externalName", + attrs, + ) + ) { + | Some({Asttypes.attr_name: {txt, loc}}) => switch (desc) { | PTopForeign(_) | PTopLet( @@ -511,7 +522,7 @@ let malformed_return_statements = (errs, super) => { false | PExpIf(_, ifso, Some(ifnot)) => has_returning_branch(ifso) || has_returning_branch(ifnot) - | PExpMatch(_, branches) => + | PExpMatch(_, {txt: branches}) => List.exists(branch => has_returning_branch(branch.pmb_body), branches) | _ => false }; @@ -536,7 +547,7 @@ let malformed_return_statements = (errs, super) => { | PExpIf(_, ifso, Some(ifnot)) when has_returning_branch(exp) => collect_non_returning_branches(ifso, []) @ collect_non_returning_branches(ifnot, acc) - | PExpMatch(_, branches) when has_returning_branch(exp) => + | PExpMatch(_, {txt: branches}) when has_returning_branch(exp) => List.fold_left( (acc, branch) => collect_non_returning_branches(branch.pmb_body, acc), @@ -635,7 +646,18 @@ let provided_multiple_times = (errs, super) => { switch (pattern.ppat_desc) { | PPatAny => binds | PPatVar(bind) => [bind, ...binds] - | PPatTuple(pats) + | PPatTuple(pats) => List.fold_left(extract_bindings, binds, pats) + | PPatList(pats) => + List.fold_left( + (binds, item) => { + switch (item) { + | ListItem(p) => extract_bindings(binds, p) + | ListSpread(p, loc) => extract_bindings(binds, p) + } + }, + binds, + pats, + ) | PPatArray(pats) => List.fold_left(extract_bindings, binds, pats) | PPatRecord(pats, _) => List.fold_left( @@ -720,14 +742,14 @@ let provided_multiple_times = (errs, super) => { List.iter( decl => { switch (decl) { - | (Provided | Abstract, {pdata_name, pdata_loc}) => + | (Provided | Abstract, {pdata_name, pdata_loc}, _) => if (Hashtbl.mem(types, pdata_name.txt)) { errs := [ProvidedMultipleTimes(pdata_name.txt, pdata_loc), ...errs^]; } else { Hashtbl.add(types, pdata_name.txt, ()); } - | (NotProvided, _) => () + | (NotProvided, _, _) => () } }, decls, @@ -827,12 +849,12 @@ let provided_multiple_times = (errs, super) => { let mutual_rec_type_improper_rec_keyword = (errs, super) => { let enter_toplevel_stmt = ({ptop_desc: desc, ptop_loc: loc} as e) => { switch (desc) { - | PTopData([(_, first_decl), ...[_, ..._] as rest_decls]) => + | PTopData([(_, first_decl, _), ...[_, ..._] as rest_decls]) => if (first_decl.pdata_rec != Recursive) { errs := [MutualRecTypesMissingRec(loc), ...errs^]; } else { List.iter( - ((_, decl)) => + ((_, decl, _)) => switch (decl) { | {pdata_rec: Recursive} => errs := [MutualRecExtraneousNonfirstRec(loc), ...errs^] diff --git a/compiler/src/typed/translprim.re b/compiler/src/typed/translprim.re index 7584c63125..34686dd2bf 100644 --- a/compiler/src/typed/translprim.re +++ b/compiler/src/typed/translprim.re @@ -26,6 +26,7 @@ let default_loc = Location.dummy_loc; let mkident = name => Expression.ident( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Location.mkloc( Identifier.IdentName(Location.mkloc(name, default_loc)), default_loc, @@ -1459,13 +1460,20 @@ let active_memory_base = () => { let transl_prim = (env, desc) => { let loc = desc.pprim_loc; + let core_loc = desc.pprim_loc; let prim = try(PrimMap.find(prim_map, desc.pprim_name.txt)) { | Not_found => failwith("This primitive does not exist.") }; - let disable_gc = [(Location.mknoloc("disableGC"), [])]; + let disable_gc = [ + { + Asttypes.attr_name: Location.mknoloc("disableGC"), + attr_args: [], + attr_loc: Location.dummy_loc, + }, + ]; let lambda_arg = pat => { pla_label: Unlabeled, @@ -1491,7 +1499,7 @@ let transl_prim = (env, desc) => { [], ) }; - (Expression.constant(~loc, ~attributes, value), typ); + (Expression.constant(~loc, ~core_loc, ~attributes, value), typ); | Primitive0(p) => let attributes = switch (p) { @@ -1508,12 +1516,21 @@ let transl_prim = (env, desc) => { | HeapTypeMetadata => disable_gc }; ( - Expression.lambda(~loc, ~attributes, [], Expression.prim0(~loc, p)), + Expression.lambda( + ~loc, + ~core_loc, + ~attributes, + [], + Expression.prim0(~loc, ~core_loc, p), + ), Typecore.prim0_type(p), ); | Primitive1(BuiltinId as p) => // This primitive must always be inlined, so we do not generate a lambda - (Expression.constant(~loc, PConstVoid), Typecore.prim1_type(p)) + ( + Expression.constant(~loc, ~core_loc, PConstVoid), + Typecore.prim1_type(p), + ) | Primitive1(p) => let attributes = switch (p) { @@ -1565,9 +1582,10 @@ let transl_prim = (env, desc) => { ( Expression.lambda( ~loc, + ~core_loc, ~attributes, [lambda_arg(pat_a)], - Expression.prim1(~loc, p, id_a), + Expression.prim1(~loc, ~core_loc, p, id_a), ), Typecore.prim1_type(p), ); @@ -1591,9 +1609,10 @@ let transl_prim = (env, desc) => { ( Expression.lambda( ~loc, + ~core_loc, ~attributes, [lambda_arg(pat_a), lambda_arg(pat_b)], - Expression.prim2(~loc, p, id_a, id_b), + Expression.prim2(~loc, ~core_loc, p, id_a, id_b), ), Typecore.prim2_type(p), ); @@ -1611,9 +1630,10 @@ let transl_prim = (env, desc) => { ( Expression.lambda( ~loc, + ~core_loc, ~attributes, [lambda_arg(pat_a), lambda_arg(pat_b), lambda_arg(pat_c)], - Expression.primn(~loc, p, [id_a, id_b, id_c]), + Expression.primn(~loc, ~core_loc, p, [id_a, id_b, id_c]), ), Typecore.primn_type(p), ); diff --git a/compiler/src/typed/typecore.re b/compiler/src/typed/typecore.re index f4a1fa049a..7bfad9bdf0 100644 --- a/compiler/src/typed/typecore.re +++ b/compiler/src/typed/typecore.re @@ -567,7 +567,7 @@ let rec final_subexpression = sexp => switch (sexp.pexp_desc) { | PExpIf(_, e, _) | PExpWhile(_, e) - | PExpMatch(_, [{pmb_body: e}, ..._]) => final_subexpression(e) + | PExpMatch(_, {txt: [{pmb_body: e}, ..._]}) => final_subexpression(e) | PExpBlock(es) => try(final_subexpression(last(es))) { | Not_found => sexp @@ -629,7 +629,7 @@ let rec approx_type = (env, sty) => let rec type_approx = (env, sexp: Parsetree.expression) => switch (sexp.pexp_desc) { | PExpLet(_, _, _) => Builtin_types.type_void - | PExpMatch(_, [{pmb_body: e}, ..._]) => type_approx(env, e) + | PExpMatch(_, {txt: [{pmb_body: e}, ..._]}) => type_approx(env, e) | PExpIf(_, e, _) => type_approx(env, e) | PExpWhile(_, e) => type_approx(env, e) | PExpLambda(args, e) => @@ -787,6 +787,7 @@ and type_expect_ = (~in_function=?, ~recarg=Rejected, env, sexp, ty_expected_explained) => { let {ty: ty_expected, explanation} = ty_expected_explained; let loc = sexp.pexp_loc; + let core_loc = sexp.pexp_core_loc; let attributes = Typetexp.type_attributes(sexp.pexp_attributes); /* Record the expression type before unifying it with the expected type */ let type_expect = type_expect(~in_function?); @@ -846,6 +847,58 @@ and type_expect_ = exp_type: newty(TTyTuple(List.map(e => e.exp_type, expl))), exp_env: env, }); + | PExpList(es) => + let convert_list = (~loc, ~core_loc, ~attributes=?, a) => { + open Ast_helper; + let empty = + Expression.tuple_construct(~loc, ~core_loc, ident_empty, []); + let list = + switch (List.rev(a)) { + | [] => empty + | [base, ...rest] => + let base = + switch (base) { + | ListItem(expr) => + Expression.tuple_construct( + ~loc, + ~core_loc, + ~attributes?, + ident_cons, + [expr, empty], + ) + | ListSpread(expr, _) => expr + }; + List.fold_left( + (acc, expr) => { + switch (expr) { + | ListItem(expr) => + Expression.tuple_construct( + ~loc, + ~core_loc, + ~attributes?, + ident_cons, + [expr, acc], + ) + | ListSpread(_, loc) => + raise( + SyntaxError( + loc, + "A list spread can only appear at the end of a list.", + ), + ) + } + }, + base, + rest, + ); + }; + {...list, pexp_loc: loc}; + }; + type_expect( + env, + convert_list(~loc, ~core_loc, ~attributes=sexp.pexp_attributes, es), + ty_expected_explained, + ); | PExpArray(es) => let ty = newgenvar(); let to_unify = Builtin_types.type_array(ty); @@ -1144,6 +1197,7 @@ and type_expect_ = | Some(default) => let default_value_name = mknoloc("$default_value"); let default_loc = default.pexp_loc; + let default_core_loc = default.pexp_core_loc; let scases = [ MatchBranch.mk( ~loc=default_loc, @@ -1156,6 +1210,7 @@ and type_expect_ = ), Expression.ident( ~loc=default_loc, + ~core_loc=default_core_loc, mknoloc(Identifier.IdentName(default_value_name)), ), None, @@ -1180,16 +1235,19 @@ and type_expect_ = let smatch = Expression.match( ~loc=sloc, + ~core_loc=sloc, Expression.ident( ~loc=sloc, + ~core_loc=sloc, mknoloc(Identifier.IdentName(opt_name)), ), - scases, + mknoloc(scases), ); let pat = Pattern.var(~loc=sloc, opt_name); let prelude_expr = Expression.let_( ~loc=sloc, + ~core_loc=sloc, Nonrecursive, Immutable, [ValueBinding.mk(~loc=arg.pla_loc, arg.pla_pattern, smatch)], @@ -1222,7 +1280,12 @@ and type_expect_ = let body = switch (prelude) { | [] => body - | _ => Expression.block(~loc=body.pexp_loc, prelude @ [body]) + | _ => + Expression.block( + ~loc=body.pexp_loc, + ~core_loc=body.pexp_core_loc, + prelude @ [body], + ) }; type_function( ~in_function?, @@ -1286,7 +1349,7 @@ and type_expect_ = ty_expected, true, loc, - branches, + branches.txt, ); re({ exp_desc: TExpMatch(arg, val_cases, partial), @@ -1930,6 +1993,7 @@ and type_construct = (env, loc, lid, sarg, ty_expected_explained, attrs) => { pexp_desc: PExpRecord(None, rfs), pexp_attributes: attrs, pexp_loc: loc, + pexp_core_loc: loc, }, ], true, diff --git a/compiler/src/typed/typedecl.re b/compiler/src/typed/typedecl.re index 91ccc7ca78..dad8795e88 100644 --- a/compiler/src/typed/typedecl.re +++ b/compiler/src/typed/typedecl.re @@ -612,7 +612,7 @@ let check_duplicates = sdecl_list => { let labels = Hashtbl.create(7) and constrs = Hashtbl.create(7); List.iter( - ((_, sdecl)) => + ((_, sdecl, _)) => switch (sdecl.pdata_kind) { | PDataAbstract => () | PDataVariant(cl) => @@ -681,7 +681,7 @@ let transl_data_decl = (env, rec_flag, sdecl_list) => { /* Create identifiers. */ let id_list = List.map( - ((_, sdecl)) => Ident.create(sdecl.pdata_name.txt), + ((_, sdecl, _)) => Ident.create(sdecl.pdata_name.txt), sdecl_list, ); @@ -698,7 +698,7 @@ let transl_data_decl = (env, rec_flag, sdecl_list) => { List.fold_left2( enter_type(rec_flag), env, - List.map(snd, sdecl_list), + List.map(((_, snd, _)) => snd, sdecl_list), id_list, ); /* Translate each declaration. */ @@ -727,7 +727,7 @@ let transl_data_decl = (env, rec_flag, sdecl_list) => { | Asttypes.Nonrecursive => (id, None) }; - let transl_declaration = ((provide_flag, name_sdecl), (id, slot)) => { + let transl_declaration = ((provide_flag, name_sdecl, _), (id, slot)) => { current_slot := slot; transl_declaration(temp_env, provide_flag, name_sdecl, id); }; @@ -754,7 +754,7 @@ let transl_data_decl = (env, rec_flag, sdecl_list) => { | Asttypes.Nonrecursive => () | Asttypes.Recursive => List.iter2( - (id, (_, sdecl)) => + (id, (_, sdecl, _)) => update_type(temp_env, newenv, id, sdecl.pdata_loc), id_list, sdecl_list, @@ -766,7 +766,7 @@ let transl_data_decl = (env, rec_flag, sdecl_list) => { /* Check for ill-formed abbrevs */ let id_loc_list = List.map2( - (id, (_, sdecl)) => (id, sdecl.pdata_loc), + (id, (_, sdecl, _)) => (id, sdecl.pdata_loc), id_list, sdecl_list, ); @@ -799,7 +799,7 @@ let transl_data_decl = (env, rec_flag, sdecl_list) => { List.iter(check_abbrev_recursion(newenv, id_loc_list, to_check), tdecls); /* Check that all type variables are closed */ List.iter2( - ((_, sdecl), tdecl) => { + ((_, sdecl, _), tdecl) => { let decl = tdecl.data_type; switch (Ctype.closed_type_decl(decl)) { | Some(ty) => diff --git a/compiler/src/typed/typedecl.rei b/compiler/src/typed/typedecl.rei index b7eb9f3ed5..c6c044de57 100644 --- a/compiler/src/typed/typedecl.rei +++ b/compiler/src/typed/typedecl.rei @@ -23,7 +23,7 @@ let transl_data_decl: ( Env.t, Asttypes.rec_flag, - list((Asttypes.provide_flag, Parsetree.data_declaration)) + list((Asttypes.provide_flag, Parsetree.data_declaration, Location.t)) ) => (list(Typedtree.data_declaration), Env.t); diff --git a/compiler/src/typed/typemod.re b/compiler/src/typed/typemod.re index 9699a67412..11a5bb2e35 100644 --- a/compiler/src/typed/typemod.re +++ b/compiler/src/typed/typemod.re @@ -398,7 +398,7 @@ let rec type_module = (~toplevel=false, anchor, env, statements) => { // `rec` on mutually-recursive types let rec_flag = switch (data_decls) { - | [(_, {pdata_rec: Recursive}), ..._] => Recursive + | [(_, {pdata_rec: Recursive}, _), ..._] => Recursive | _ => Nonrecursive }; let (decls, newenv) = @@ -410,7 +410,7 @@ let rec type_module = (~toplevel=false, anchor, env, statements) => { switch (Typedecl.transl_data_decl(env, Recursive, data_decls)) { | exception exn => raise(exn) | _ => - let (_, {pdata_name: type_name}) = List.hd(data_decls); + let (_, {pdata_name: type_name}, _) = List.hd(data_decls); raise( Error( loc, @@ -444,7 +444,7 @@ let rec type_module = (~toplevel=false, anchor, env, statements) => { } }, decls, - List.map(((e, _)) => e, data_decls), + List.map(((e, _, _)) => e, data_decls), [], ); let ty_decls = List.filter_map(decl => decl, ty_decls); diff --git a/compiler/src/typed/typepat.re b/compiler/src/typed/typepat.re index 4d3e9d4b1b..f7f1f50aa1 100644 --- a/compiler/src/typed/typepat.re +++ b/compiler/src/typed/typepat.re @@ -45,6 +45,15 @@ type error = | UnrefutedPattern(pattern) | InlineRecordPatternMisuse(Identifier.t, string, string); +let ident_empty = { + txt: Identifier.IdentName(Location.mknoloc("[]")), + loc: Location.dummy_loc, +}; +let ident_cons = { + txt: Identifier.IdentName(Location.mknoloc("[...]")), + loc: Location.dummy_loc, +}; + exception Error(Location.t, Env.t, error); let iter_ppat = (f, p) => @@ -54,6 +63,16 @@ let iter_ppat = (f, p) => | PPatConstant(_) | PPatConstruct(_, PPatConstrSingleton) => () | PPatTuple(lst) => List.iter(f, lst) + | PPatList(lst) => + List.iter( + item => { + switch (item) { + | ListItem(p) => f(p) + | ListSpread(p, _) => f(p) + } + }, + lst, + ) | PPatArray(lst) => List.iter(f, lst) | PPatRecord(fs, _) | PPatConstruct(_, PPatConstrRecord(fs, _)) => @@ -665,6 +684,49 @@ and type_pat_aux = }, ), ); + | PPatList(spl) => + let convert_list = (~loc, a) => { + open Ast_helper; + let empty = Pattern.tuple_construct(~loc, ident_empty, []); + let a = List.rev(a); + switch (a) { + | [] => empty + | [base, ...rest] => + let base = + switch (base) { + | ListItem(pat) => + Pattern.tuple_construct(~loc, ident_cons, [pat, empty]) + | ListSpread(pat, _) => pat + }; + List.fold_left( + (acc, pat) => { + switch (pat) { + | ListItem(pat) => + Pattern.tuple_construct(~loc, ident_cons, [pat, acc]) + | ListSpread(_, loc) => + raise( + SyntaxError( + loc, + "A list spread can only appear at the end of a list.", + ), + ) + } + }, + base, + rest, + ); + }; + }; + type_pat( + ~constrs, + ~labels, + ~mode=mode', + ~explode, + ~env, + convert_list(~loc=sp.ppat_loc, spl), + expected_ty, + k, + ); | PPatArray(spl) => let arr_ty = newgenvar(); unify_pat_types( diff --git a/compiler/src/typed/typetexp.re b/compiler/src/typed/typetexp.re index f818ec73be..4adbc5f241 100644 --- a/compiler/src/typed/typetexp.re +++ b/compiler/src/typed/typetexp.re @@ -630,8 +630,8 @@ let fold_modtypes = fold_simple(Env.fold_modtypes); let type_attributes = attrs => { List.map( - (({txt, loc}, args)) => - switch (txt, args) { + ({attr_name: {txt, loc}, attr_args}) => + switch (txt, attr_args) { | ("disableGC", []) => Location.mkloc(Disable_gc, loc) | ("unsafe", []) => Location.mkloc(Unsafe, loc) | ("externalName", [name]) => diff --git a/compiler/test/grainfmt/aliases.expected.gr b/compiler/test/grainfmt/aliases.expected.gr index a8f4123b55..d964bac90f 100644 --- a/compiler/test/grainfmt/aliases.expected.gr +++ b/compiler/test/grainfmt/aliases.expected.gr @@ -2,7 +2,8 @@ module Aliases type Foo = String -type Foo = String // Test comment +type Foo = // Test comment + String type Foo = String @@ -10,7 +11,8 @@ type Foo = String type Foo1 = String // Test comment -type Foo2< // Test comment +type Foo2< + // Test comment a, b > = String @@ -30,7 +32,7 @@ type Foo3< b > = String -type Foo4 = String +type Foo4 = String // Test comment type Foo5 = String @@ -148,10 +150,11 @@ type Bar< ai > -type Foo< // opener +type Foo< + // opener a, /* Test comment */ b /* second test comment */ > = String //trailer -type Foo2 = - VeryVeryLongVeryVeryLongVeryVeryLongVeryVeryLongVeryVeryLongVeryVeryLongVeryVeryLongVeryVeryLongVeryVeryLong // comment +type Foo2 = // comment + VeryVeryLongVeryVeryLongVeryVeryLongVeryVeryLongVeryVeryLongVeryVeryLongVeryVeryLongVeryVeryLongVeryVeryLong diff --git a/compiler/test/grainfmt/application.expected.gr b/compiler/test/grainfmt/application.expected.gr index 3015d05d93..84fef09577 100644 --- a/compiler/test/grainfmt/application.expected.gr +++ b/compiler/test/grainfmt/application.expected.gr @@ -36,13 +36,17 @@ let findIndex = (x, y) => None * @returns The new array with only unique values */ provide let unique = array => { - filteri((el, index) => - findIndex(longvalue => longvalue == el, array) == Some(index), array) + filteri( + (el, index) => findIndex(longvalue => longvalue == el, array) == Some(index), + array + ) } provide let unique2 = array => { - filteri((el, index) => - findIndex(value => value == el, array) == Some(index), array) + filteri( + (el, index) => findIndex(value => value == el, array) == Some(index), + array + ) } provide let batchActionCreateAccount = ({ index }) => { @@ -55,33 +59,27 @@ let ret = filteri( length ) -let preExistingObjectsWithRefCountMismatch = Map.make(): Map.Map< - Number, - ( - Number, - Number, - Number, - Number - ) -> +let preExistingObjectsWithRefCountMismatch = Map.make(): + Map.Map let preEmatch = Map.make(): Map.Map let preExistingObjectsch2 = Map.make(): Map.Map -let preExistingObjectsWithRefCountMismatch2 = Map.make(): Map.Map< - Number, - ( - Number, - Number, +let preExistingObjectsWithRefCountMismatch2 = Map.make(): + Map.Map< Number, - Number, - Number, - Number, - Number, - Number, - Number, - Number, - Number - ) -> + ( + Number, + Number, + Number, + Number, + Number, + Number, + Number, + Number, + Number, + Number, + Number + ) + > diff --git a/compiler/test/grainfmt/application2.expected.gr b/compiler/test/grainfmt/application2.expected.gr index 7640402b42..baa9df3dbe 100644 --- a/compiler/test/grainfmt/application2.expected.gr +++ b/compiler/test/grainfmt/application2.expected.gr @@ -19,6 +19,15 @@ init3(index => { true }, positions) +[fn(x => { + let _ = x +}), fn(y => { + let _ = y + void +}), fn(z => { + let _ = z +})] + [ fn(x => { let _ = x @@ -30,6 +39,18 @@ init3(index => { fn(z => { let _ = z }), + fn(z => { + let _ = z + }), + fn(z => { + let _ = z + }), + fn(z => { + let _ = z + }), + fn(z => { + let _ = z + }), ] let makePrettyPrintJSONWriter = String.forEachCodePoint(c => { diff --git a/compiler/test/grainfmt/application2.input.gr b/compiler/test/grainfmt/application2.input.gr index cc824a6cc5..6edc9fad73 100644 --- a/compiler/test/grainfmt/application2.input.gr +++ b/compiler/test/grainfmt/application2.input.gr @@ -30,6 +30,19 @@ init3(index => { fn(z => {let _ = z}), ] +[ + fn(x => {let _ = x}), + fn(y => { + let _ = y + void + }), + fn(z => {let _ = z}), + fn(z => {let _ = z}), + fn(z => {let _ = z}), + fn(z => {let _ = z}), + fn(z => {let _ = z}), +] + let makePrettyPrintJSONWriter = String.forEachCodePoint(c => { // a void diff --git a/compiler/test/grainfmt/application_indenting.expected.gr b/compiler/test/grainfmt/application_indenting.expected.gr index bb58573638..9b5f189807 100644 --- a/compiler/test/grainfmt/application_indenting.expected.gr +++ b/compiler/test/grainfmt/application_indenting.expected.gr @@ -2,8 +2,10 @@ module ApplicationIndenting let flagsToWasmVal = (flag, i) => { let riiiiiiightsInheriting = Module64.load(structPtr, 16n) - (riiiiiiightsInheriting & - 1N << Module64.extendI32U(Module32.fromGrain(i) >> 1n)) > + ( + riiiiiiightsInheriting & + 1N << Module64.extendI32U(Module32.fromGrain(i) >> 1n) + ) > 0N } @@ -14,11 +16,13 @@ let grisuRound = (buffer, len, delta, rest, ten_kappa, wp_w) => { while ( Module64.ltU(rest, wp_w) && Module64.geU(Module64.sub(delta, rest), ten_kappa) && - (Module64.ltU(Module64.add(rest, ten_kappa), wp_w) || + ( + Module64.ltU(Module64.add(rest, ten_kappa), wp_w) || Module64.gtU( Module64.sub(wp_w, rest), Module64.sub(Module64.add(rest, ten_kappa), wp_w) - )) + ) + ) ) { digit -= 1n rest = Module64.add(rest, ten_kappa) diff --git a/compiler/test/grainfmt/arrays.expected.gr b/compiler/test/grainfmt/arrays.expected.gr index b4bfb0f0d3..f92102120c 100644 --- a/compiler/test/grainfmt/arrays.expected.gr +++ b/compiler/test/grainfmt/arrays.expected.gr @@ -39,5 +39,5 @@ let addRepeatedGroup = (groupN, state, pos, n, backAmt, callback) => { } } -provide let getBefore = (array, index) => if (array == [>]) "nope" -else array[index - 1] +provide let getBefore = (array, index) => + if (array == [>]) "nope" else array[index - 1] diff --git a/compiler/test/grainfmt/binop_perf.expected.gr b/compiler/test/grainfmt/binop_perf.expected.gr index 83cd97a8ec..cf3a593c5f 100644 --- a/compiler/test/grainfmt/binop_perf.expected.gr +++ b/compiler/test/grainfmt/binop_perf.expected.gr @@ -1,5 +1,4 @@ module BinopPerf - // performance regression test—this will hang seemingly forever if not performant val == Mod.func('🌾') || val == Mod.func('🌾') || diff --git a/compiler/test/grainfmt/binops.expected.gr b/compiler/test/grainfmt/binops.expected.gr index 77e39ca000..a69e7c5d54 100644 --- a/compiler/test/grainfmt/binops.expected.gr +++ b/compiler/test/grainfmt/binops.expected.gr @@ -12,7 +12,8 @@ if ( fofdfdfdfdfdfdfdfdfdfdfdo && bafdfdfdfdfdfdddefdfdfdfdfdr && badfdfdfdfdfdfdffdffdfdfdz -) true +) + true let a = (2 >> 1) << 1 diff --git a/compiler/test/grainfmt/blocks.expected.gr b/compiler/test/grainfmt/blocks.expected.gr index 48cac4655f..c09c98c4c9 100644 --- a/compiler/test/grainfmt/blocks.expected.gr +++ b/compiler/test/grainfmt/blocks.expected.gr @@ -25,30 +25,29 @@ let myfun1 = (a, b) => { false } -let myfun = - ( - a, - b, - c, - d, - e, - f, - g, - h, - i, - j, - k, - l, - m, - n, - o, - p, - aaaaa, - bbbbb, - ccccc, - dddddd, - eeeee, - ) => { +let myfun = ( + a, + b, + c, + d, + e, + f, + g, + h, + i, + j, + k, + l, + m, + n, + o, + p, + aaaaa, + bbbbb, + ccccc, + dddddd, + eeeee, +) => { true true false diff --git a/compiler/test/grainfmt/chained.expected.gr b/compiler/test/grainfmt/chained.expected.gr index 9d1e6ea17c..321903c3c4 100644 --- a/compiler/test/grainfmt/chained.expected.gr +++ b/compiler/test/grainfmt/chained.expected.gr @@ -1,9 +1,7 @@ module Chained -let g = 7 -// g represents the precision desired, p is the values of p[i] to plug into Lanczos' formula -and p = 8 -// another comment +let g = 7 // g represents the precision desired, p is the values of p[i] to plug into Lanczos' formula +and p = 8 // another comment and q = 32 let myFunction2 = x => { @@ -22,8 +20,7 @@ and q = 32 let myFunction2 = x => { let inter = x + 1 "some string" -} -// a comment +} // a comment and myFunction3 = y => { let myVal = 5 "some string" diff --git a/compiler/test/grainfmt/comments.expected.gr b/compiler/test/grainfmt/comments.expected.gr index ee20b5806e..09e29a5297 100644 --- a/compiler/test/grainfmt/comments.expected.gr +++ b/compiler/test/grainfmt/comments.expected.gr @@ -5,9 +5,9 @@ module Comments followed by a blank line */ -let myfun = (/* a */ x /*post*/, y) => 10 +let myfun = (/* a */ x, /*post*/ y) => 10 -let myfun1a /*special*/ = (x /*lead*/, y /*follow*/) => x + 5 +let myfun1a = /*special*/ (x, /*lead*/ y /*follow*/) => x + 5 /** * Then a doc @@ -15,7 +15,7 @@ let myfun1a /*special*/ = (x /*lead*/, y /*follow*/) => x + 5 // Then a line let myfun1 = (/* a */ x, y) => 10 -let myfun2 = (x /*post*/, y) => 10 +let myfun2 = (x, /*post*/ y) => 10 /** * Slices an array given zero-based start and end indexes. The value @@ -72,7 +72,8 @@ if (a /*why*/) { // inner comment 5 // trail 5 - 6 /* block 6a */ /* block 6 b */ + 6 /* block 6a */ + /* block 6 b */ // ending comment } else { 7 // on time @@ -81,10 +82,9 @@ if (a /*why*/) { // end block } -let myfun /*special*/ = (/*lead*/ x /*follow*/, y) => x + 5 +let myfun = /*special*/ (/*lead*/ x, /*follow*/ y) => x + 5 if (true) { - let noval = 5 // Tell the host where the key and value are located and their lengths. @@ -109,9 +109,10 @@ if ( true // comment ) true else false -if (true) +if ( + true // comment - true else false +) true else false if (true /* hey */) true else false @@ -147,3 +148,503 @@ Result.expect( // decRef(WasmI32.fromGrain(setDebug)) // void // } + +let { foo: /* yoyo */ bar } = 5 +let foo = (foo=/* yoyo */5) => 5 +let Foo(foo as /* yoyo */ bar) = 5 +let Foo(foo | /*yoyo*/ /*jaja*/ bar) = 5 + +let Foo{ + foo, + // comment + bar, /*2*/ + baz, /*3*/ + qux, /*4*/ + qux, /*4*/ + qux, /*4*/ + qux, /*4*/ + qux, /*4*/ + // trailer +} = 5 + +let Foo{ + foo, + // comment + bar, /*2*/ + baz, /*3*/ + qux, /*4*/ + // trailer +} = 5 + +let Foo{ + // comment + foo, + // comment + bar, +} = 5 + +let { foo: Foo{ + qux: quux, // comment +}, cccc, bar: baz } = 6 + +let Foo( + // yo + foo, // bar + baz, // heh +) = 1 + +let foo: /* int */ Int32 = 1l +let foo: /* int */ Int32 = 1l + +let { + foo, + // comment + bar, /*2*/ + baz, /*3*/ + qux, /*4*/ + qux, /*4*/ + qux, /*4*/ + qux, /*4*/ + qux, /*4*/ + // trailer +} = 5 + +let { + foo, + // comment + bar, /*2*/ + baz, /*3*/ + qux, /*4*/ + // trailer +} = 5 + +let { + // comment + foo, + // comment + bar, +} = 5 + +let [> /* foo */ foo, /* bar */ bar /* bar2 */] = 5 +let [> + /* foo */ foo, /* bar */ + bar, // bar2 +] = 5 + +let [/* foo */ foo, /* bar */ bar /* bar2 */] = 5 +let [ + /* foo */ foo, /* bar */ + bar, // bar2 +] = 5 +let [ + /* foo */ foo, /* bar */ + bar, // bar2 + ...rest +] = 5 +let [ + /* foo */ foo, /* bar */ + bar, // bar2 + ...rest // trailer +] = 5 + +let ( + // yo + foo, // bar + baz, // heh +) = 1 + +{ /* foo */ foo, } +{ foo: /* foo */ bar, } + +let { /* foo */ foo } = void +let { foo: /* foo */ bar } = void + +from Mod use { module /* Foo */ Foo, type /* Foo */ Foo, foo } + +match (5) { + 1 /* when */ when false => 5, + 1 /* when */ when false => 5, + 1 => // branch + 5, + 1 => /* branch */ { + 5 + }, + 1 => /* branch */ { + 5 + }, + 1 => /* branch1 */ /* branch2 */ { + 5 + }, +} + +@attr(/* foo */ "foo", /* bar */ "bar") +let _ = 1 +@attr( + // foo + "foo", /* bar */ + "bar", +) +let _ = 1 + +foo(foo=/*foo*/5) +foo(foo=/*foo*/5) + +if (/* if */ cond) { + 5 +} else { + 6 +} +if (/* if */ cond) { + 5 +} else { + 6 +} +if (cond /* if */) { + 5 +} else { + 6 +} +if (cond /* if */) { + 5 +} else { + 6 +} +if (cond) { + 5 +} /* if */ else { + 6 +} +if (cond) { + 5 +} /* if */ else { + 6 +} +if (cond) + 5 /* longlonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglong */ +else + 5 + +foo += /* foo */ 1 +foo += /* foo */ 1 +foo += /* foo */ 1 +foo += /* foo */ 1 +foo += /* foo */ /* foo */ 1 +foo += /* foo */ /* foo */ 1 + +{ + @attr + let // foo + _ = 1 + @attr( + "foo", /* doo */ + "foo", + // foo + // goo + // bar + ) + let /* doo */ // foo + // goo + // bar + _ = 1 +} + +Foo( + // yo + foo, // bar + baz, // heh +) +Foo{ + // comment + foo, + // comment + bar +} + +{ + // foo + foo + foo + + foo + + foo // same line foo + // next line foo + + // trailer +} + +{ + let rec mut /* let */ /* rec */ /* mut */ foo = 5 + + let foo = 5 // foo + and foo = 6 + + let foo = 5 + // foo + and foo = 6 + + let foo = 5 + + // foo + and foo = 6 +} + +!/* foo */true + +fail /* fail */ "fail" + +foo( + // yo + foo, // bar + baz // heh +) + +x => /* foo */ /* foo */ 5 +x => /* foo */ +// foo + 5 + +( + // yo + foo, // bar + baz, // heh + /* lambda */ +) => 5 + +( + // yo + foo, // bar + baz, // heh +) + +foo/* arr */[1] +foo/* arr */[1] = /* arr */ 1 + +{ + // comment + foo, + // comment + bar, +} +{ + foo, + // comment + bar, /*2*/ + baz, /*3*/ + qux, /*4*/ + qux, /*4*/ + qux, /*4*/ + qux, /*4*/ + qux, /*4*/ + // trailer +} +{ + ...old, + // comment + foo, + // comment + bar, +} +{ + // old + ...old, + // comment + foo, + // comment + bar, +} +{ + /* old */ + ...old, + // comment + foo, + // comment + bar, +} + +foo./* bar */bar +foo.// bar +bar + +foo := /* foo */ /* foo */ foo + +return /* return */ foo + +from /* from */ Mod use /* Mod */ /* use */ * +from /* from */ Mod use { /* Mod */ /* use */ foo } + +while (/* true */ true) { + void +} +while (/* true */ true) { + void +} +while (true /* true */) { + void +} +while (true /* true */) { + void +} + +for ( + /* for */ + /* init */ let mut i = 1; /*cond*/ + i < 1; /*inc*/ + i += 1 /* more */ + /* and more */ +) { + void +} +for ( + ;/* for */ + /* init */ + /*cond*/ + i < 1; /*inc*/ + i += 1 /* more */ + /* and more */ +) { + void +} +for ( + /* for */ + /* init */ let mut i = 1; + ; /*cond*/ + /*inc*/ + i += 1 /* more */ + /* and more */ +) { + void +} +for ( + /* for */ + /* init */ let mut i = 1; /*cond*/ + i < 1; /*inc*/ + /* more */ + /* and more */ +) { + void +} +for (;; /* for */ /* init */ /*cond*/ /*inc*/ /* more */ /* and more */) { + void +} + +match (/* match */ /*foo*/ foo /* post foo */ /* post foo 2 */) { /* mr branch */ + _ => /* void */ void, /* post void */ +} + +foo: /* foo*/ Number +foo: /* foo*/ Number + +let foo = /* foo */ 5 +let foo = /* foo */ 5 + +let foo: (a: /* a */ Number) => a = void +let foo: (a: /* a */ Number) => a = void + +let foo: Box = void + +foo: (/*Number*/ Number, /*post Number*/ /* String */ String /*post String */) + +foo: Number => /* Number */ Void +foo: Number => /* Number */ Void +foo: + (Number, /* Number */ String /* String */ /* More String */ /* Void */) => Void + +record Rec { + foo: /* foo */ /* more foo */ Void, + mut bar: /*pre-bar*/ /* bar */ Void, +} + +enum Enum { + Foo/* Foo */(/* Number */ Number, /* String */ String /*post-string*/), +} + +enum Enum { + Foo/* Foo */{ + /* Number */ foo: Number, /* String */ + bar: String, /*post-string*/ + }, +} + +exception /* except */ Foo +exception /* except */ Foo/*Foo*/(Number) + +type /* type */ Foo = /* Foo */ /* Number */ Number +type /* type */ Foo< + /* Foo */ + /* a */ a, /* post-a */ + b /* post-b */ + /* Foo */ + /* Number */ +> = Number + +primitive /* prim */ foo = /* foo */ /* hello */ "@hello" + +include /* include */ "foo" +include /* include */ "foo" as /* foo */ /* alias */ Foo + +module /* module */ Foo { /* Foo */ + foo /* foo */ + bar +} + +module /* module */ Foo { /* Foo */ + foo /* foo */ + + // bar + + bar +} + +foreign wasm foo: /* foo1 */ /* foo2 */ Number from /* type */ /* from */ "" +foreign wasm foo: /* foo1 */ + /* foo2 */ + Number as /* pre-alias */ /* as */ bar from /* bar */ /* from */ "" + +provide { + foo as /* foo */ /* as */ bar, + type Foo as /* Foo */ /* as */ Bar, + module Foo as /* Foo */ /* as */ Bar, +} + +@attr +let _ = 1 +@attr( + "foo", /* doo */ + "foo", + // foo + // goo + // bar +) +let _ = 1 + +provide /* provide */ enum Foo { + Foo, +} +abstract /* abstract */ enum Foo { + Foo, +} + +let foo = bar +and baz = qux +let foo = bar +and baz = qux +let foo = bar /*and1*/ /*and2*/ +and baz = qux +let foo = bar + +// and +and baz = qux + +provide /* provide */ primitive /* prim */ foo = /* foo */ /* hello */ "@hello" + +provide { /*header*/ foo, /* foo */ type Foo /*trailer*/ } + +[>] +[> /*foo */] +let [>] = 5 +let [> /*foo */] = 5 +[] +[/*foo */] +let [] = 5 +let [/*foo */] = 5 diff --git a/compiler/test/grainfmt/comments.input.gr b/compiler/test/grainfmt/comments.input.gr index 9e1a1c6575..9957929039 100644 --- a/compiler/test/grainfmt/comments.input.gr +++ b/compiler/test/grainfmt/comments.input.gr @@ -157,3 +157,332 @@ Result.expect( // decRef(WasmI32.fromGrain(setDebug)) // void // } + +let { foo: /* yoyo */ bar} = 5 +let foo = (foo=/* yoyo */5) => 5 +let Foo(foo/* yoyo */as bar) = 5 +let Foo(foo/*yoyo*/| /*jaja*/bar) = 5 + +let Foo{foo, +// comment + bar /*2*/, baz /*3*/, qux /*4*/, qux /*4*/, qux /*4*/, qux /*4*/, qux /*4*/, // trailer + } = 5 + +let Foo{foo, +// comment + bar /*2*/, baz /*3*/, qux /*4*/, // trailer + } = 5 + +let Foo{ + // comment + foo, + // comment + bar +} = 5 + +let { foo: Foo{qux: quux, // comment +}, +cccc, bar: baz}=6 + +let Foo( // yo + foo, // bar + baz // heh +) = 1 + +let foo /* int */: Int32 = 1l +let foo: /* int */ Int32 = 1l + +let {foo, +// comment + bar /*2*/, baz /*3*/, qux /*4*/, qux /*4*/, qux /*4*/, qux /*4*/, qux /*4*/, // trailer + } = 5 + +let {foo, +// comment + bar /*2*/, baz /*3*/, qux /*4*/, // trailer + } = 5 + +let { + // comment + foo, + // comment + bar +} = 5 + +let [> /* foo */ foo, /* bar */ bar, /* bar2 */] = 5 +let [> /* foo */ foo, /* bar */ bar, // bar2 +] = 5 + +let [ /* foo */ foo, /* bar */ bar, /* bar2 */] = 5 +let [ /* foo */ foo, /* bar */ bar, // bar2 +] = 5 +let [ /* foo */ foo, /* bar */ bar, // bar2 +...rest] = 5 +let [ /* foo */ foo, /* bar */ bar, // bar2 +...rest // trailer +] = 5 + +let ( // yo + foo, // bar + baz // heh +) = 1 + +{foo: /* foo */ foo,} +{foo: /* foo */ bar,} + +let {foo: /* foo */ foo,} = void +let {foo: /* foo */ bar,} = void + +from Mod use { module /* Foo */ Foo, type /* Foo */ Foo, foo } + +match (5) { + 1 /* when */ when false => 5, + 1 when /* when */ false => 5, + 1 => // branch + 5, + 1 /* branch */ => { + 5 }, + 1 => /* branch */ { + 5 }, + 1 /* branch1 */ => /* branch2 */ { + 5 }, +} + +@attr(/* foo */ "foo", /* bar */ "bar") let _ = 1 +@attr(// foo +"foo", /* bar */ "bar") let _ = 1 + +foo(foo/*foo*/=5) +foo(foo=/*foo*/5) + +if /* if */ (cond) {5} else {6} +if (/* if */cond) {5} else {6} +if (cond/* if */) {5} else {6} +if (cond) /* if */{5} else {6} +if (cond) {5} /* if */else {6} +if (cond) {5} else /* if */{6} +if (cond) 5 /* longlonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglong */ else 5 + +foo /* foo */ += 1 +foo += /* foo */ 1 +foo /* foo */ = foo + 1 +foo = /* foo */ foo + 1 +foo /* foo */ = foo /* foo */ + 1 +foo = /* foo */ foo /* foo */ + 1 + +{ +@attr // foo +let _ = 1 +@attr("foo", /* doo */ "foo" +// foo +// goo +) // bar +let _ = 1 +} + +Foo( // yo + foo, // bar + baz // heh +) +Foo{ + // comment + foo, + // comment + bar +} + +{ + // foo + foo + foo + + foo + + foo // same line foo + // next line foo + + // trailer +} + +{ + let /* let */ rec /* rec */ mut /* mut */ foo = 5 + + let foo = 5 // foo + and foo = 6 + + let foo = 5 + // foo + and foo = 6 + + let foo = 5 + + // foo + and foo = 6 +} + +!/* foo */true + +fail /* fail */ "fail" + +foo( // yo + foo, // bar + baz // heh +) + +x /* foo */ => /* foo */ 5 +x /* foo */ => // foo + 5 + +( // yo + foo, // bar + baz // heh +) /* lambda */ => 5 + +( // yo + foo, // bar + baz // heh +) + +foo/* arr */[1] +foo/* arr */[1] /* arr */ = 1 + +{ + // comment + foo, + // comment + bar +} +{foo, +// comment + bar /*2*/, baz /*3*/, qux /*4*/, qux /*4*/, qux /*4*/, qux /*4*/, qux /*4*/, // trailer + } +{ + ...old, + // comment + foo, + // comment + bar +} +{ // old + ...old, + // comment + foo, + // comment + bar +} +{ /* old */ + ...old, + // comment + foo, + // comment + bar +} + +foo./* bar */bar +foo.// bar +bar + +foo /* foo */ := /* foo */ foo + +return /* return */ foo + +from /* from */ Mod /* Mod */ use /* use */ * +from /* from */ Mod /* Mod */ use /* use */ {foo} + +while /* true */ (true) {void} +while (/* true */ true) {void} +while (true /* true */) {void} +while (true) /* true */ {void} + +for /* for */ (/* init */ let mut i = 1; /*cond*/i < 1; /*inc*/ i += 1 /* more */ )/* and more */ {void} +for /* for */ (/* init */ ; /*cond*/i < 1; /*inc*/ i += 1 /* more */ )/* and more */ {void} +for /* for */ (/* init */ let mut i = 1; /*cond*/; /*inc*/ i += 1 /* more */ )/* and more */ {void} +for /* for */ (/* init */ let mut i = 1; /*cond*/i < 1; /*inc*/ /* more */ )/* and more */ {void} +for /* for */ (/* init */ ; /*cond*/; /*inc*/ /* more */ )/* and more */ {void} + +match /* match */ (/*foo*/ foo /* post foo */) /* post foo 2 */ {/* mr branch */_=> /* void */void/* post void */} + +foo /* foo*/ : Number +foo: /* foo*/ Number + +let foo /* foo */ = 5 +let foo =/* foo */ 5 + +let foo: (a/* a */: Number) => a = void +let foo: (a: /* a */ Number) => a = void + +let foo: Box/* Box */< /* a */a /* after a */> = void + +foo: (/*Number*/Number /*post Number*/, /* String */ String /*post String */) + +foo: Number /* Number */ => Void +foo: Number => /* Number */ Void +foo: (Number /* Number */, String /* String */) /* More String */ => /* Void */ Void + +record Rec { foo /* foo */: /* more foo */Void, mut /*pre-bar*/ bar /* bar */: Void} + +enum Enum { Foo /* Foo */ (/* Number */ Number, /* String */ String, /*post-string*/)} + +enum Enum { Foo /* Foo */ {/* Number */ foo:Number, /* String */ bar:String, /*post-string*/}} + +exception /* except */ Foo +exception /* except */ Foo/*Foo*/(Number) + +type /* type */ Foo /* Foo */ = /* Number */ Number +type /* type */ Foo /* Foo */ < /* a */ a, /* post-a */ b, /* post-b */> /* Foo */ = /* Number */ Number + +primitive /* prim */ foo /* foo */ = /* hello */ "@hello" + +include /* include */ "foo" +include /* include */ "foo" /* foo */ as /* alias */ Foo + +module /* module */ Foo /* Foo */ { + foo; /* foo */ bar +} + +module /* module */ Foo /* Foo */ { + foo; /* foo */ + + // bar + + bar +} + +foreign wasm foo /* foo1 */: /* foo2 */ Number /* type */ from /* from */ "" +foreign wasm foo /* foo1 */: /* foo2 */ Number /* pre-alias */ as /* as */ bar /* bar */ from /* from */ "" + +provide { foo /* foo */ as /* as */ bar, type Foo /* Foo */ as /* as */ Bar, module Foo /* Foo */ as /* as */ Bar } + +@attr // foo +let _ = 1 +@attr("foo", /* doo */ "foo" +// foo +// goo +) // bar +let _ = 1 + +provide /* provide */ enum Foo { Foo } +abstract /* abstract */ enum Foo { Foo } + +let foo = bar and baz = qux +let foo = bar + +and baz = qux +let foo = bar /*and1*/ and /*and2*/ baz = qux +let foo = bar + +// and +and baz = qux + +provide /* provide */ primitive /* prim */ foo /* foo */ = /* hello */ "@hello" + +provide { /*header*/ foo, /* foo */ type Foo /*trailer*/} + +[> ] +[> /*foo */] +let [> ] = 5 +let [> /*foo */] = 5 +[ ] +[ /*foo */] +let [ ] = 5 +let [ /*foo */] = 5 diff --git a/compiler/test/grainfmt/empty.expected.gr b/compiler/test/grainfmt/empty.expected.gr new file mode 100644 index 0000000000..32c92da8ca --- /dev/null +++ b/compiler/test/grainfmt/empty.expected.gr @@ -0,0 +1 @@ +module Empty // Empty module diff --git a/compiler/test/grainfmt/empty.input.gr b/compiler/test/grainfmt/empty.input.gr new file mode 100644 index 0000000000..32c92da8ca --- /dev/null +++ b/compiler/test/grainfmt/empty.input.gr @@ -0,0 +1 @@ +module Empty // Empty module diff --git a/compiler/test/grainfmt/enums.expected.gr b/compiler/test/grainfmt/enums.expected.gr index f5c736f300..f50f3fac28 100644 --- a/compiler/test/grainfmt/enums.expected.gr +++ b/compiler/test/grainfmt/enums.expected.gr @@ -31,18 +31,11 @@ enum ParsedRegularExpression { REWordBoundary, RENotWordBoundary, RELiteral(Char), - RELiteralString( - String - ), // <- sequences of literals are flattened into a string + RELiteralString(String), // <- sequences of literals are flattened into a string REAlts(ParsedRegularExpression, ParsedRegularExpression), RESequence(List, Bool), // seq elts, needs backtrack REGroup(ParsedRegularExpression, Number), // regex, group ID - RERepeat( - ParsedRegularExpression, - Number, - Option, - Bool - ), // regex, min, max (None for infinity), true=non-greedy + RERepeat(ParsedRegularExpression, Number, Option, Bool), // regex, min, max (None for infinity), true=non-greedy REMaybe(ParsedRegularExpression, Bool), // regex, true=non-greedy REConditional( ParsedRegularExpression, @@ -52,12 +45,7 @@ enum ParsedRegularExpression { Number, Bool ), // test, if-true, if-false, n-start, num-n, needs-backtrack - RELookahead( - ParsedRegularExpression, - Bool, - Number, - Number - ), // regex, is-match, n-start, num-n + RELookahead(ParsedRegularExpression, Bool, Number, Number), // regex, is-match, n-start, num-n RELookbehind( ParsedRegularExpression, Bool, @@ -66,12 +54,7 @@ enum ParsedRegularExpression { Number, Number ), // regex, is-match, lb-min, lb-max, n-start, num-n (lb-xx values patched in later) - RECut( - ParsedRegularExpression, - Number, - Number, - Bool - ), // regex, n-start, num-n, needs-backtrack + RECut(ParsedRegularExpression, Number, Number, Bool), // regex, n-start, num-n, needs-backtrack REReference(Number, Bool), // n, case-sensitive RERange(List<(Number, Number)>), REUnicodeCategories(Bool), // symlist, true=match/false=does-not-match diff --git a/compiler/test/grainfmt/function_params.expected.gr b/compiler/test/grainfmt/function_params.expected.gr index 10c1e5ba67..559b5a71e1 100644 --- a/compiler/test/grainfmt/function_params.expected.gr +++ b/compiler/test/grainfmt/function_params.expected.gr @@ -6,24 +6,22 @@ let unit_arg = () => 3 let two_args = (x, y) => 4 -provide let fake_write: (int, int, int, string) => string = - ( - fd, - iovs, - iovs_len, - nwritten, - ) => { +provide let fake_write: (int, int, int, string) => string = ( + fd, + iovs, + iovs_len, + nwritten, +) => { "ok" } -let lots_of_args = - ( - verylonglong1, - verylonglong2, - verylonglong3, - verylonglong4, - verylonglong1, - ) => { +let lots_of_args = ( + verylonglong1, + verylonglong2, + verylonglong3, + verylonglong4, + verylonglong1, +) => { print("lots of args") } @@ -37,7 +35,7 @@ let stringTailMatcher = (toMatch, len) => state, stack, ) => { - true -} + true + } let f: Number => (Number, Number) => Number = a => (b, c) => a + b + c diff --git a/compiler/test/grainfmt/ifthenelse.expected.gr b/compiler/test/grainfmt/ifthenelse.expected.gr index 1528de3287..89553ad07b 100644 --- a/compiler/test/grainfmt/ifthenelse.expected.gr +++ b/compiler/test/grainfmt/ifthenelse.expected.gr @@ -8,13 +8,15 @@ if (true) { "short" } -if (SomeModule.ne(mval, 10)) ( - if (SomeModule.ltS(yval, 11)) SomeModule.sub(12, mres) else mres -) else 14 +if (SomeModule.ne(mval, 10)) + (if (SomeModule.ltS(yval, 11)) SomeModule.sub(12, mres) else mres) +else + 14 -if (SomeModule.ne(mval, 10000)) ( - if (SomeModule.ltS(yval, 111111)) SomeModule.sub(1323232, mres) else mres -) else 1232324 +if (SomeModule.ne(mval, 10000)) + (if (SomeModule.ltS(yval, 111111)) SomeModule.sub(1323232, mres) else mres) +else + 1232324 if (1 == 2) { print("false") diff --git a/compiler/test/grainfmt/includes.expected.gr b/compiler/test/grainfmt/includes.expected.gr index ba7e6cb2c3..9f03554516 100644 --- a/compiler/test/grainfmt/includes.expected.gr +++ b/compiler/test/grainfmt/includes.expected.gr @@ -5,9 +5,8 @@ include "runtime/unsafe/tags" include "list" include "option" as Opt include "option" as Opt -// TODO(#1627): This comment disappears -include "array" -include "array" as Foo +include /* special include */ "array" +include "array" as /* special include */ Foo from List use { length, map, forEach as each } from Opt use { module MutableOpt, @@ -15,23 +14,21 @@ from Opt use { type Opt, type Opt as OptAlias, } -from Opt use { /* comment1 */ /* comment2 */ /* comment3 */ /* comment4 */ /* comment5 */ /* comment6 */ /* comment7 */ /* comment8 */ /* comment9 */ - module MutableOpt, - module ImmutableOpt as Imm, - type Opt, - type Opt as OptAlias, - exception Exc as E, - exception Exc2, +from Opt use { + module MutableOpt, /* comment1 */ + module ImmutableOpt as /* comment2 */ /* comment3 */ Imm, /* comment4 */ + /* comment5 */ + type /* comment6 */ Opt, + type Opt as /* comment7 */ OptAlias, + exception Exc as /* comment8 */ E, + exception Exc2, /* comment9 */ } include "runtime/unsafe/wasmi32" from WasmI32 use { add as (+), mul as (*), xor as (^), shl as (<<) } -provide foreign wasm storage_read: ( - WasmI64, - WasmI64, - WasmI64, -) => WasmI64 as storageRead from "env" +provide foreign wasm storage_read: + (WasmI64, WasmI64, WasmI64) => WasmI64 as storageRead from "env" include "runtime/unsafe/wasmi32" from WasmI32 use { @@ -42,6 +39,7 @@ from WasmI32 use { // no signed imports, as care should be taken to use signed or unsigned operators } -provide foreign wasm promise_results_count: () => WasmI64 as promiseResultsCount from "env" +provide foreign wasm promise_results_count: + () => WasmI64 as promiseResultsCount from "env" provide { exception MyExc as E2 } diff --git a/compiler/test/grainfmt/includes.input.gr b/compiler/test/grainfmt/includes.input.gr index d8abf040bc..354f44b157 100644 --- a/compiler/test/grainfmt/includes.input.gr +++ b/compiler/test/grainfmt/includes.input.gr @@ -6,7 +6,6 @@ include "list" include "option" as Opt include "option" as Opt -// TODO(#1627): This comment disappears include /* special include */ "array" include "array" as /* special include */ Foo from List use { length, map, forEach as each } diff --git a/compiler/test/grainfmt/lambda.expected.gr b/compiler/test/grainfmt/lambda.expected.gr index a314eac4d1..1953857e98 100644 --- a/compiler/test/grainfmt/lambda.expected.gr +++ b/compiler/test/grainfmt/lambda.expected.gr @@ -9,9 +9,9 @@ y => { let z = 2 } -let testfna = forEachCodePoint(c /* arg */ => void) +let testfna = forEachCodePoint(c => /* arg */ void) -let testfn = forEachCodePoint(c /* arg */ => { +let testfn = forEachCodePoint(c => /* arg */ { // a void }, "") @@ -19,19 +19,19 @@ let testfn = forEachCodePoint(c /* arg */ => { let testfn3 = forEachCodePoint(( c, /* arg 1 */ d, // eol2 - e /* arg */ + e, /* arg */ // eol ) => { // a void }, "") -let testfn4 = forEachCodePoint(/* pre */ c /* arg */ => { +let testfn4 = forEachCodePoint(/* pre */ c => /* arg */ { // a void }, "") -let testfn5 = forEachCodePoint(c /* arg */ => { +let testfn5 = forEachCodePoint(c => /* arg */ { // a void }, "") diff --git a/compiler/test/grainfmt/lets.expected.gr b/compiler/test/grainfmt/lets.expected.gr index a6043652a3..82e1ad225f 100644 --- a/compiler/test/grainfmt/lets.expected.gr +++ b/compiler/test/grainfmt/lets.expected.gr @@ -1,11 +1,7 @@ module Lets -let rec myFun1 = x => - x + - 1 -and myFun2 = x => - x + - 1 +let rec myFun1 = x => x + 1 +and myFun2 = x => x + 1 let myBlock = { "some string" @@ -31,14 +27,15 @@ let myFunction4 = () => { } let rotate = (count, list) => { - let (beginning, end) = - if (count >= 0) part(count, list) else part(length(list) + count, list) + let (beginning, end) = if (count >= 0) + part(count, list) + else + part(length(list) + count, list) append(end, beginning) } let qsize = (if (WasmI32.eqz(WasmI32.and_(m + 1n, 1n))) { - m + 1n - } else { - m + 2n - }) >> - 1n + m + 1n +} else { + m + 2n +}) >> 1n diff --git a/compiler/test/grainfmt/list_sugar.expected.gr b/compiler/test/grainfmt/list_sugar.expected.gr index 4cd30ae87a..75e26565e0 100644 --- a/compiler/test/grainfmt/list_sugar.expected.gr +++ b/compiler/test/grainfmt/list_sugar.expected.gr @@ -64,8 +64,4 @@ let t = [ 3, ] -let cons = (a, b) => - [ - a, - ...b - ] // <- some long comment some long comment some long comment some long comment +let cons = (a, b) => [a, ...b] // <- some long comment some long comment some long comment some long comment diff --git a/compiler/test/grainfmt/matches.expected.gr b/compiler/test/grainfmt/matches.expected.gr index e4158429d5..e664720c1f 100644 --- a/compiler/test/grainfmt/matches.expected.gr +++ b/compiler/test/grainfmt/matches.expected.gr @@ -20,10 +20,7 @@ provide let pop = queue => { { forwards: [], backwards: [] } => queue, { forwards: [head], backwards: [] } => { forwards: [], backwards: [] }, { forwards: [head], backwards } => - { - forwards: List.reverse(backwards), - backwards: [], - }, + { forwards: List.reverse(backwards), backwards: [] }, { forwards: [head, ...ftail], backwards } => { forwards: ftail, backwards }, } } diff --git a/compiler/test/grainfmt/patterns.expected.gr b/compiler/test/grainfmt/patterns.expected.gr index e51943651d..fe412a131d 100644 --- a/compiler/test/grainfmt/patterns.expected.gr +++ b/compiler/test/grainfmt/patterns.expected.gr @@ -18,17 +18,21 @@ match (list) { } match (list) { - [_, alongidentalongident123456, _] | - [_, _, alongidentalongident123456, _, _] => + [_, alongidentalongident123456, _] | [_, _, alongidentalongident123456, _, _] => Some(alongidentalongident123456), _ => None, } -enum Rec { /* first */ /* second */ /* third */ /* fourth */ /* fifth */ - Rec{ - x: Number, - y: Number, - }, +enum Rec { + Rec/* first */{ /* second */ x: Number, /* third */ y: Number /* fourth */ }, /* fifth */ + Tup(Number, Number), +} +enum Rec { + Rec/* first */{ + /* second */ x: Number, /* third */ + y: Number, /* fourth */ + /* more */ + }, /* fifth */ Tup(Number, Number), } @@ -61,7 +65,7 @@ let x = match (n) { 18, 19, 20, - ...rest, + ...rest ] => void, _ => void, diff --git a/compiler/test/grainfmt/patterns.input.gr b/compiler/test/grainfmt/patterns.input.gr index adf92efdab..1a7da69c22 100644 --- a/compiler/test/grainfmt/patterns.input.gr +++ b/compiler/test/grainfmt/patterns.input.gr @@ -30,6 +30,7 @@ match (list) { } enum Rec { Rec /* first */ { /* second */ x: Number, /* third */ y: Number /* fourth */ } /* fifth */, Tup(Number, Number) } +enum Rec { Rec /* first */ { /* second */ x: Number, /* third */ y: Number /* fourth */ /* more */ } /* fifth */, Tup(Number, Number) } match (Rec { x: 1, y: 2 }) { Rec {x: 3, _} => 3, diff --git a/compiler/test/grainfmt/records.expected.gr b/compiler/test/grainfmt/records.expected.gr index 3a12191987..5f15219d1a 100644 --- a/compiler/test/grainfmt/records.expected.gr +++ b/compiler/test/grainfmt/records.expected.gr @@ -57,6 +57,7 @@ record GraphData { // comment 1 record GraphData2 { edge: Set.Set<(a, a)>, // comment 3 + nodes: Set.Set, } @@ -76,19 +77,22 @@ record Commented { // brace comment // trailing } -let x: Commented = { // comment 1 +let x: Commented = { + // comment 1 longlonglongnamenamename1: "A", longlonglongnamenamename2: "B", longlonglongnamenamename3: 42, } -let { // a comment 2 +let { + // a comment 2 longlonglongnamenamename1, longlonglongnamenamename2, longlonglongnamenamename3, } = x -let { // a comment 3 +let { + // a comment 3 l1, l2, l3, @@ -106,15 +110,16 @@ let s = { str: "", } -let s = { // comment 1 +let s = { + // comment 1 // comment 2 num: 1, var: A, // end line comment str: "", } -let y = { /* comment 1 */ - x, /* comment 2 */ +let y = { + /* comment 1 */ x, /* comment 2 */ longlonglongnamenamename2: 12345, /* comment 3 */ longlonglongnamenamename3: 12345, /* comment 4 */ } // end line comment @@ -129,8 +134,8 @@ let y = { longlonglongnamenamename3: 12345, } -let y = { /* comment 1 */ - ...x, /* comment 2 */ +let y = { + /* comment 1 */ ...x, /* comment 2 */ longlonglongnamenamename2: 12345, /* comment 3 */ longlonglongnamenamename3: 12345, /* comment 4 */ } // end line comment diff --git a/compiler/test/grainfmt/spreads.expected.gr b/compiler/test/grainfmt/spreads.expected.gr index ee4112842f..21fc5fbcf3 100644 --- a/compiler/test/grainfmt/spreads.expected.gr +++ b/compiler/test/grainfmt/spreads.expected.gr @@ -13,8 +13,10 @@ let filteri = (fn, list) => { match (list) { [] => [], [first, ...rest] => - if (fn(first, index22)) [first, ...iter(fn, rest, index22 + 1)] - else iter(fn, rest, index22 + 1), + if (fn(first, index22)) + [first, ...iter(fn, rest, index22 + 1)] + else + iter(fn, rest, index22 + 1), } } iter(fn, list, 0) diff --git a/compiler/test/grainfmt/variants.expected.gr b/compiler/test/grainfmt/variants.expected.gr index 901ac476fc..6c1a836d03 100644 --- a/compiler/test/grainfmt/variants.expected.gr +++ b/compiler/test/grainfmt/variants.expected.gr @@ -30,14 +30,12 @@ enum TrailingComment { } enum InlineRec { - Rec{ - x: Number, - y: Number, - }, + Rec{ x: Number, y: Number }, Tup(Number, Number), } -let r = Rec{ /* first comment */ +let r = Rec{ + /* first comment */ x: 1, /* second comment */ - y: 2, // third comment + y: 2 // third comment } /* fourth comment */ diff --git a/compiler/test/runner.re b/compiler/test/runner.re index 07c1831a93..254812f991 100644 --- a/compiler/test/runner.re +++ b/compiler/test/runner.re @@ -39,18 +39,6 @@ let graindoc_out_file = name => let gaindoc_in_file = name => Filepath.to_string(Fp.At.(test_gaindoc_dir / (name ++ ".input.gr"))); -let read_channel = channel => { - let buf = Buffer.create(2048); - try( - while (true) { - Buffer.add_channel(buf, channel, 2048); - } - ) { - | End_of_file => () - }; - Buffer.contents(buf); -}; - let compile = (~num_pages=?, ~config_fn=?, ~hook=?, name, prog) => { Config.preserve_all_configs(() => { Config.with_config( @@ -125,24 +113,29 @@ let open_process = args => { Unix.environment(), ); - let pid = Unix.process_full_pid((stdout, stdin, stderr)); - let (status, timed_out) = - try({ - let (_, status) = Test_utils.waitpid_timeout(15., pid); - (status, false); - }) { - | Test_utils.Timeout => - // Windows only supports the `sigkill` signal - Unix.kill(pid, Sys.sigkill); - (Unix.WEXITED(-1), true); + let current_time = Unix.time(); + + let out_eof = ref(false); + let err_eof = ref(false); + + let out_buf = Buffer.create(1024); + let err_buf = Buffer.create(1024); + + // Windows buffers output, so read channels as the subprocess is running + while ((! out_eof^ || ! err_eof^) && Unix.time() < current_time +. 15.) { + try(Buffer.add_channel(out_buf, stdout, 1024)) { + | End_of_file => out_eof := true }; + try(Buffer.add_channel(err_buf, stderr, 1024)) { + | End_of_file => err_eof := true + }; + }; - let out = read_channel(stdout); - let err = read_channel(stderr); + let timed_out = Unix.time() > current_time +. 15.; - close_in(stdout); - close_in(stderr); - close_out(stdin); + let status = Unix.close_process_full((stdout, stdin, stderr)); + let out = Buffer.contents(out_buf); + let err = Buffer.contents(err_buf); let code = switch (status) { diff --git a/compiler/test/stdlib/array.test.gr b/compiler/test/stdlib/array.test.gr index ebfa22d022..a8bd4298db 100644 --- a/compiler/test/stdlib/array.test.gr +++ b/compiler/test/stdlib/array.test.gr @@ -618,8 +618,7 @@ module Immutable { assert Array.reverse(fromList([])) == fromList([]) // Empty let testArr = fromList([1, 2, 3, 4]) - assert Array.reverse(testArr) == - fromList([4, 3, 2, 1]) // array is reversed properly + assert Array.reverse(testArr) == fromList([4, 3, 2, 1]) // array is reversed properly assert testArr == fromList([1, 2, 3, 4]) // original array is not modified // Array.toList @@ -631,8 +630,10 @@ module Immutable { // Array.map - assert Array.map(x => - fail "Map iterator should not be called on 0-length array", fromList([])) == + assert Array.map( + x => fail "Map iterator should not be called on 0-length array", + fromList([]) + ) == fromList([]) assert Array.map(incr, fromList([2, 3, 4])) == fromList([3, 4, 5]) assert Array.map(incr, bltArr) == fromList(List.map(incr, branchingLongTail)) @@ -722,27 +723,27 @@ module Immutable { let arr = fromList([1, 2, 3]) assert Array.count(x => x > 0, arr) == 3 - assert Array.count(x => - fail "count iterator should not be called on empty array", fromList([])) == + assert Array.count( + x => fail "count iterator should not be called on empty array", + fromList([]) + ) == 0 assert Array.count(x => x == 3, arr) == 1 // Array.reduce assert Array.reduce((acc, x) => acc + x, 0, arr) == 6 - assert Array.reduce((acc, x) => fail "Shouldn't be called", - 0, - fromList([]) - ) == + assert Array.reduce((acc, x) => fail "Shouldn't be called", 0, fromList([])) == 0 assert Array.reduce((+), 0, bltArr) == List.reduce((+), 0, branchingLongTail) // Array.reduceRight assert Array.reduceRight((cur, acc) => cur - acc, 0, arr) == 2 - assert Array.reduceRight((x, acc) => fail "Shouldn't be called", - 0, - fromList([]) + assert Array.reduceRight( + (x, acc) => fail "Shouldn't be called", + 0, + fromList([]) ) == 0 assert Array.reduceRight((x, acc) => [x, ...acc], [], bltArr) == diff --git a/compiler/test/stdlib/bigint.test.gr b/compiler/test/stdlib/bigint.test.gr index a3ad033622..49d791d1c4 100644 --- a/compiler/test/stdlib/bigint.test.gr +++ b/compiler/test/stdlib/bigint.test.gr @@ -181,14 +181,14 @@ assert (341237t ^ -60597t) == -383042t assert (-341237t ^ 60597t) == -383042t assert (-341237t ^ -60597t) == 383040t -assert (0xe8c4713afbb863c16a214fb9fbee69d0610d131f2c55328b641d61eff9037848t & - 0x16A214FB9FBEE69D061E8C4713AFBB863C16At) == +assert ( + 0xe8c4713afbb863c16a214fb9fbee69d0610d131f2c55328b641d61eff9037848t & + 0x16A214FB9FBEE69D061E8C4713AFBB863C16At +) == 9059764183839950850606742203634691510321224t assert lnot(5t) == -6t -assert lnot( - 0xe8c4713afbb863c16a214fb9fbee69d0610d131f2c55328b641d61eff9037848t -) == +assert lnot(0xe8c4713afbb863c16a214fb9fbee69d0610d131f2c55328b641d61eff9037848t) == -0xe8c4713afbb863c16a214fb9fbee69d0610d131f2c55328b641d61eff9037849t // Regression #1339 diff --git a/compiler/test/stdlib/hash.test.gr b/compiler/test/stdlib/hash.test.gr index 7077ea2aa0..d3ae81dabc 100644 --- a/compiler/test/stdlib/hash.test.gr +++ b/compiler/test/stdlib/hash.test.gr @@ -101,9 +101,7 @@ assert Hash.hash("") == Hash.hash("") assert Hash.hash("grain > ore > wool > lumber > brick") == Hash.hash("grain > ore > wool > lumber > brick") -let chars = String.explode( - "!@#$%^&*()1234567890-qwertyuiop🌾💯🔥😈😤💪🏼" -) +let chars = String.explode("!@#$%^&*()1234567890-qwertyuiop🌾💯🔥😈😤💪🏼") let charList = Array.toList(chars) assert uniq(List.map(Hash.hash, charList)) diff --git a/compiler/test/stdlib/map.test.gr b/compiler/test/stdlib/map.test.gr index 76233e1336..b35eae5e42 100644 --- a/compiler/test/stdlib/map.test.gr +++ b/compiler/test/stdlib/map.test.gr @@ -374,36 +374,24 @@ assert Map.size(rejectTestMap) == 3 let toUpdate = Map.fromList([("a", 1), ("b", 2), ("c", 3)]) -Map.update( - "b", - old => { - assert old == Some(2) - Some(4) - }, - toUpdate -) +Map.update("b", old => { + assert old == Some(2) + Some(4) +}, toUpdate) assert Map.get("b", toUpdate) == Some(4) -Map.update( - "d", - old => { - assert old == None - Some(10) - }, - toUpdate -) +Map.update("d", old => { + assert old == None + Some(10) +}, toUpdate) assert Map.get("d", toUpdate) == Some(10) -Map.update( - "c", - old => { - assert old == Some(3) - None - }, - toUpdate -) +Map.update("c", old => { + assert old == Some(3) + None +}, toUpdate) assert Map.contains("c", toUpdate) == false @@ -412,9 +400,7 @@ module Immutable { let strKeys = Map.fromList([("🌾", 1), ("🐑", 2), ("🧱", 3)]) let numKeys = Map.fromList([(1, "🌾"), (2, "🐑"), (3, "🧱")]) - let varKeys = Map.fromList( - [(Grain, "🌾"), (Sheep, "🐑"), (Brick, "🧱")] - ) + let varKeys = Map.fromList([(Grain, "🌾"), (Sheep, "🐑"), (Brick, "🧱")]) let recordKeys = Map.fromList( [ ({ name: "Grain", emoji: "🌾" }, 1), @@ -621,8 +607,10 @@ module Immutable { let mut filterTestImmutableMap = makeFilterTestImmutableMap() - filterTestImmutableMap = Map.filter((key, value) => - key == Sheep, filterTestImmutableMap) + filterTestImmutableMap = Map.filter( + (key, value) => key == Sheep, + filterTestImmutableMap + ) assert !Map.contains(Grain, filterTestImmutableMap) assert Map.contains(Sheep, filterTestImmutableMap) @@ -630,8 +618,10 @@ module Immutable { let mut filterTestImmutableMap = makeFilterTestImmutableMap() - filterTestImmutableMap = Map.filter((key, value) => - value == "b" || value == "s", filterTestImmutableMap) + filterTestImmutableMap = Map.filter( + (key, value) => value == "b" || value == "s", + filterTestImmutableMap + ) assert !Map.contains(Grain, filterTestImmutableMap) assert Map.contains(Sheep, filterTestImmutableMap) @@ -639,15 +629,19 @@ module Immutable { let mut filterTestImmutableMap = makeFilterTestImmutableMap() - filterTestImmutableMap = Map.filter((key, value) => - value == "invalid", filterTestImmutableMap) + filterTestImmutableMap = Map.filter( + (key, value) => value == "invalid", + filterTestImmutableMap + ) assert Map.size(filterTestImmutableMap) == 0 let mut filterTestImmutableMap = makeFilterTestImmutableMap() - filterTestImmutableMap = Map.filter((key, value) => - true, filterTestImmutableMap) + filterTestImmutableMap = Map.filter( + (key, value) => true, + filterTestImmutableMap + ) assert Map.size(filterTestImmutableMap) == 3 @@ -655,8 +649,10 @@ module Immutable { let mut rejectTestImmutableMap = makeFilterTestImmutableMap() - rejectTestImmutableMap = Map.reject((key, value) => - key == Sheep, rejectTestImmutableMap) + rejectTestImmutableMap = Map.reject( + (key, value) => key == Sheep, + rejectTestImmutableMap + ) assert Map.contains(Grain, rejectTestImmutableMap) assert !Map.contains(Sheep, rejectTestImmutableMap) @@ -664,8 +660,10 @@ module Immutable { let mut rejectTestImmutableMap = makeFilterTestImmutableMap() - rejectTestImmutableMap = Map.reject((key, value) => - value == "b" || value == "s", rejectTestImmutableMap) + rejectTestImmutableMap = Map.reject( + (key, value) => value == "b" || value == "s", + rejectTestImmutableMap + ) assert Map.contains(Grain, rejectTestImmutableMap) assert !Map.contains(Sheep, rejectTestImmutableMap) @@ -673,15 +671,19 @@ module Immutable { let mut rejectTestImmutableMap = makeFilterTestImmutableMap() - rejectTestImmutableMap = Map.reject((key, value) => - true, rejectTestImmutableMap) + rejectTestImmutableMap = Map.reject( + (key, value) => true, + rejectTestImmutableMap + ) assert Map.size(rejectTestImmutableMap) == 0 let mut rejectTestImmutableMap = makeFilterTestImmutableMap() - rejectTestImmutableMap = Map.reject((key, value) => - false, rejectTestImmutableMap) + rejectTestImmutableMap = Map.reject( + (key, value) => false, + rejectTestImmutableMap + ) assert Map.size(rejectTestImmutableMap) == 3 @@ -689,36 +691,24 @@ module Immutable { let mut toUpdate = Map.fromList([("a", 1), ("b", 2), ("c", 3)]) - toUpdate = Map.update( - "b", - old => { - assert old == Some(2) - Some(4) - }, - toUpdate - ) + toUpdate = Map.update("b", old => { + assert old == Some(2) + Some(4) + }, toUpdate) assert Map.get("b", toUpdate) == Some(4) - toUpdate = Map.update( - "d", - old => { - assert old == None - Some(10) - }, - toUpdate - ) + toUpdate = Map.update("d", old => { + assert old == None + Some(10) + }, toUpdate) assert Map.get("d", toUpdate) == Some(10) - toUpdate = Map.update( - "c", - old => { - assert old == Some(3) - None - }, - toUpdate - ) + toUpdate = Map.update("c", old => { + assert old == Some(3) + None + }, toUpdate) assert Map.contains("c", toUpdate) == false } diff --git a/compiler/test/stdlib/marshal.test.gr b/compiler/test/stdlib/marshal.test.gr index 440354f070..dd2db1d4ab 100644 --- a/compiler/test/stdlib/marshal.test.gr +++ b/compiler/test/stdlib/marshal.test.gr @@ -46,10 +46,7 @@ assert roundtripOk(Quux(Qux("qux", 42l))) record Bing { bang: Void, bop: a, - swish: ( - String, - a - ), + swish: (String, a), pow: Foo, } diff --git a/compiler/test/stdlib/number.test.gr b/compiler/test/stdlib/number.test.gr index 1bea34639e..31a4d79667 100644 --- a/compiler/test/stdlib/number.test.gr +++ b/compiler/test/stdlib/number.test.gr @@ -54,14 +54,14 @@ assert Infinity * Infinity == Infinity assert Number.isNaN(Infinity * NaN) assert Number.isNaN(NaN * NaN) // Rational mul tests -assert 2/3 * 4 == 8/3 -assert 2/3 * Int32.toNumber(4l) == 8/3 -assert 2/3 * Int64.toNumber(4L) == 8/3 +assert (2/3) * 4 == 8/3 +assert (2/3) * Int32.toNumber(4l) == 8/3 +assert (2/3) * Int64.toNumber(4L) == 8/3 assert 4 * (2/3) == 8/3 assert Int32.toNumber(4l) * (2/3) == 8/3 assert Int64.toNumber(4L) * (2/3) == 8/3 -assert 2/3 * 4.0 < 2.666666666666667 -assert 2.6666666666666 < 2/3 * 4.0 +assert (2/3) * 4.0 < 2.666666666666667 +assert 2.6666666666666 < (2/3) * 4.0 // div assert div(25, 5) == 5 assert div(9223372036854775809, 9) == 1024819115206086201 @@ -596,16 +596,12 @@ assert Number.parseInt("_0___42___", 10) == Ok(42) assert Number.parseInt("-42", 10) == Ok(-42) assert Number.parseInt("-042", 10) == Ok(-42) assert Number.parseInt("-_0___42___", 10) == Ok(-42) -assert Number.parseInt("1073741823", 10) == - Ok(1073741823) // grain simple number max -assert Number.parseInt("-1073741824", 10) == - Ok(-1073741824) // grain simple number min +assert Number.parseInt("1073741823", 10) == Ok(1073741823) // grain simple number max +assert Number.parseInt("-1073741824", 10) == Ok(-1073741824) // grain simple number min assert Number.parseInt("2147483647", 10) == Ok(2147483647) // i32 max assert Number.parseInt("-2147483648", 10) == Ok(-2147483648) // i32 min -assert Number.parseInt("9223372036854775807", 10) == - Ok(9223372036854775807) // i64 max -assert Number.parseInt("-9223372036854775808", 10) == - Ok(-9223372036854775808) // i64 min +assert Number.parseInt("9223372036854775807", 10) == Ok(9223372036854775807) // i64 max +assert Number.parseInt("-9223372036854775808", 10) == Ok(-9223372036854775808) // i64 min assert Number.parseInt("0xabcdef", 10) == Ok(0xabcdef) assert Number.parseInt("0Xabcdef", 10) == Ok(0xabcdef) assert Number.parseInt("abcdef", 16) == Ok(0xabcdef) diff --git a/compiler/test/stdlib/option.test.gr b/compiler/test/stdlib/option.test.gr index 85a2fbf765..332aaa5ce1 100644 --- a/compiler/test/stdlib/option.test.gr +++ b/compiler/test/stdlib/option.test.gr @@ -47,43 +47,34 @@ assert Option.unwrapWithDefault("🐑", None) == "🐑" // Option.map assert Option.map(x => x * 2, Some(2)) == Some(4) -assert Option.map(x => String.concat("hello ", x), Some("🌾")) == - Some("hello 🌾") +assert Option.map(x => String.concat("hello ", x), Some("🌾")) == Some("hello 🌾") assert Option.map(x => fail "Shouldn't be called", None) == None // Option.mapWithDefault assert Option.mapWithDefault(x => x * 2, 1, Some(2)) == 4 -assert Option.mapWithDefault(x => String.concat("hello ", x), -"🐑", -Some("🌾") -) == +assert Option.mapWithDefault(x => String.concat("hello ", x), "🐑", Some("🌾")) == "hello 🌾" assert Option.mapWithDefault(x => x * 2, 1, None) == 1 -assert Option.mapWithDefault(x => String.concat("hello ", x), "🐑", None) == - "🐑" +assert Option.mapWithDefault(x => String.concat("hello ", x), "🐑", None) == "🐑" // Option.mapWithDefaultFn -assert Option.mapWithDefaultFn(x => x * 2, -() => fail "Shouldn't be called", -Some(2) +assert Option.mapWithDefaultFn( + x => x * 2, + () => fail "Shouldn't be called", + Some(2) ) == 4 -assert Option.mapWithDefaultFn(x => String.concat("hello ", x), -() => fail "Shouldn't be called", -Some("🌾") +assert Option.mapWithDefaultFn( + x => String.concat("hello ", x), + () => fail "Shouldn't be called", + Some("🌾") ) == "hello 🌾" -assert Option.mapWithDefaultFn(x => fail "Shouldn't be called", -() => 1, -None -) == +assert Option.mapWithDefaultFn(x => fail "Shouldn't be called", () => 1, None) == 1 -assert Option.mapWithDefaultFn(x => fail "Shouldn't be called", -() => "🐑", -None -) == +assert Option.mapWithDefaultFn(x => fail "Shouldn't be called", () => "🐑", None) == "🐑" // Option.flatMap @@ -110,14 +101,10 @@ assert Option.zip(None, Some("🌾")) == None // Option.zipWith assert Option.zipWith((a, b) => a + b, Some(1), Some(2)) == Some(3) -assert Option.zipWith((a, b) => (a, b), Some(1), Some("🌾")) == - Some((1, "🌾")) +assert Option.zipWith((a, b) => (a, b), Some(1), Some("🌾")) == Some((1, "🌾")) assert Option.zipWith((a, b) => fail "Shouldn't be called", Some(1), None) == None -assert Option.zipWith((a, b) => fail "Shouldn't be called", -None, -Some("🌾") -) == +assert Option.zipWith((a, b) => fail "Shouldn't be called", None, Some("🌾")) == None // Option.flatten diff --git a/compiler/test/stdlib/path.test.gr b/compiler/test/stdlib/path.test.gr index e38d55d0be..ea7b0cead9 100644 --- a/compiler/test/stdlib/path.test.gr +++ b/compiler/test/stdlib/path.test.gr @@ -92,14 +92,17 @@ let parseFileTests = [ }, ] -List.forEach(({ pathStr, expParent, expStr, expName, expStem, expExt }) => { - let path = fs(pathStr) - assert Path.toString(path) == expStr - assert fs(expParent) == Path.parent(path) - assert expName == Path.basename(path) - assert Ok(expStem) == Path.stem(path) - assert Ok(expExt) == Path.extension(path) -}, parseFileTests) +List.forEach( + ({ pathStr, expParent, expStr, expName, expStem, expExt }) => { + let path = fs(pathStr) + assert Path.toString(path) == expStr + assert fs(expParent) == Path.parent(path) + assert expName == Path.basename(path) + assert Ok(expStem) == Path.stem(path) + assert Ok(expExt) == Path.extension(path) + }, + parseFileTests +) record ParseDirTestData { pathStr: String, @@ -110,12 +113,7 @@ record ParseDirTestData { let parseDirTests = [ { pathStr: "dir/../../", expParent: "../..", expStr: "../", expName: None }, - { - pathStr: ".git/", - expParent: ".", - expStr: "./.git/", - expName: Some(".git"), - }, + { pathStr: ".git/", expParent: ".", expStr: "./.git/", expName: Some(".git") }, { pathStr: ".", expParent: "..", expStr: "./", expName: None }, { pathStr: ".////", expParent: "..", expStr: "./", expName: None }, { pathStr: "../", expParent: "../..", expStr: "../", expName: None }, @@ -132,12 +130,15 @@ let parseDirTests = [ { pathStr: "c:/.././..", expParent: "c:/", expStr: "c:/", expName: None }, ] -List.forEach(({ pathStr, expParent, expStr, expName }: ParseDirTestData) => { - let path = fs(pathStr) - assert Path.toString(path) == expStr - assert fs(expParent) == Path.parent(path) - assert expName == Path.basename(path) -}, parseDirTests) +List.forEach( + ({ pathStr, expParent, expStr, expName }: ParseDirTestData) => { + let path = fs(pathStr) + assert Path.toString(path) == expStr + assert fs(expParent) == Path.parent(path) + assert expName == Path.basename(path) + }, + parseDirTests +) // miscellaneous parsing tests assert fs("") == fs(".") diff --git a/compiler/test/stdlib/priorityqueue.test.gr b/compiler/test/stdlib/priorityqueue.test.gr index 61d8ccb9d6..1ca33880bf 100644 --- a/compiler/test/stdlib/priorityqueue.test.gr +++ b/compiler/test/stdlib/priorityqueue.test.gr @@ -163,9 +163,7 @@ module Immutable { let sortedList = Array.toList(sortedVals) assert PriorityQueue.drain(pqWithAll) == sortedList assert PriorityQueue.drain(maxPqWithAll) == List.reverse(sortedList) - assert PriorityQueue.drain( - PriorityQueue.fromList(Array.toList(lotsOfVals)) - ) == + assert PriorityQueue.drain(PriorityQueue.fromList(Array.toList(lotsOfVals))) == sortedList assert PriorityQueue.drain( PriorityQueue.fromList(Array.toList(lotsOfVals), compare=(a, b) => b - a) diff --git a/compiler/test/stdlib/regex.test.gr b/compiler/test/stdlib/regex.test.gr index 650e27e527..a80e42d860 100644 --- a/compiler/test/stdlib/regex.test.gr +++ b/compiler/test/stdlib/regex.test.gr @@ -263,8 +263,8 @@ assert testRegex("(((((((((a)))))))))", "a") == Some("a"), Some("a"), ], - ) - ) + ), + ), ) assert testRegex("multiple words of text", "uh-uh") == Ok(None) assert testRegex("multiple words", "multiple words, yeah") == @@ -299,17 +299,14 @@ assert testRegex("(a)+x", "aaax") == Ok(Some(("aaax", [> Some("a")]))) assert testRegex("([ac])+x", "aacx") == Ok(Some(("aacx", [> Some("c")]))) assert testRegex("([^/]*/)*sub1/", "d:msgs/tdir/sub1/trial/away.cpp") == Ok(Some(("d:msgs/tdir/sub1/", [> Some("tdir/")]))) -assert testRegex( - "([^.]*)\\.([^:]*):[T ]+(.*)", - "track1.title:TBlah blah blah" -) == +assert testRegex("([^.]*)\\.([^:]*):[T ]+(.*)", "track1.title:TBlah blah blah") == Ok( Some( ( "track1.title:TBlah blah blah", [> Some("track1"), Some("title"), Some("Blah blah blah")], - ) - ) + ), + ), ) assert testRegex("([^N]*N)+", "abNNxyzN") == Ok(Some(("abNNxyzN", [> Some("xyzN")]))) @@ -461,8 +458,8 @@ assert testRegex("((((((((((a))))))))))", "a") == Some("a"), Some("a"), ], - ) - ) + ), + ), ) assert testRegex("((((((((((a))))))))))\\10", "aa") == Ok( @@ -481,8 +478,8 @@ assert testRegex("((((((((((a))))))))))\\10", "aa") == Some("a"), Some("a"), ], - ) - ) + ), + ), ) assert Result.isErr(testRegex("((((((((((a))))))))))\\41", "")) // NYI (case-insensitive): @@ -503,8 +500,8 @@ assert testRegex("(((((((((a)))))))))", "a") == Some("a"), Some("a"), ], - ) - ) + ), + ), ) assert testRegex("multiple words of text", "uh-uh") == Ok(None) assert testRegex("multiple words", "multiple words, yeah") == @@ -747,15 +744,15 @@ assert replace(unwrapResult(make("b(ar)")), "foo bar bar", "baza$1$$") == "foo bazaar$ bar" // *All variants -assert List.map(mr => flattenResult(mr), findAll( - unwrapResult(make("x.")), - "12x4x6" -)) == +assert List.map( + mr => flattenResult(mr), + findAll(unwrapResult(make("x.")), "12x4x6") +) == [("x4", [>]), ("x6", [>])] -assert List.map(mr => flattenResultPositions(mr), findAll( - unwrapResult(make("x.")), - "12x4x6" -)) == +assert List.map( + mr => flattenResultPositions(mr), + findAll(unwrapResult(make("x.")), "12x4x6") +) == [((2, 4), [>]), ((4, 6), [>])] assert replaceAll(unwrapResult(make("b(ar)")), "foo bar bar", "baza$1") == @@ -797,10 +794,7 @@ assert splitAll(unwrapResult(make(".")), "abcd") == ["", "", "", "", ""] assert splitAll(unwrapResult(make("d$")), "abcd") == ["abc", ""] assert splitAll(unwrapResult(make("b(ar)(ar)")), "foo barar test") == ["foo ", "ar", "ar", " test"] -assert splitAll( - unwrapResult(make("b(ar)(ar)")), - "foo barar test barar test2" -) == +assert splitAll(unwrapResult(make("b(ar)(ar)")), "foo barar test barar test2") == ["foo ", "ar", "ar", " test ", "ar", "ar", " test2"] assert splitAll(unwrapResult(make("b((a)r)")), "bar") == ["", "ar", "a", ""] assert splitAll(unwrapResult(make("b(((((a))))r)")), "bar") == diff --git a/compiler/test/stdlib/result.test.gr b/compiler/test/stdlib/result.test.gr index 79cbc2610a..ea50b2bf21 100644 --- a/compiler/test/stdlib/result.test.gr +++ b/compiler/test/stdlib/result.test.gr @@ -41,9 +41,10 @@ assert Result.mapWithDefault(toString, "Nope!", Err(1)) == "Nope!" // mapWithDefaultFn assert Result.mapWithDefaultFn(toString, x => fail "Shouldn't run", Ok(1)) == "1" -assert Result.mapWithDefaultFn(x => fail "Shouldn't run", -x => "7", -Err("Awesome") +assert Result.mapWithDefaultFn( + x => fail "Shouldn't run", + x => "7", + Err("Awesome") ) == "7" diff --git a/compiler/test/stdlib/set.test.gr b/compiler/test/stdlib/set.test.gr index afd97a14b5..7f8bd33ca8 100644 --- a/compiler/test/stdlib/set.test.gr +++ b/compiler/test/stdlib/set.test.gr @@ -349,9 +349,10 @@ module Immutable { let reduceTestSet = Set.fromList([1, 3, 2, 5, 4]) - let result = Set.reduce((acc, key) => fail "Shouldn't be called", - 0, - Set.empty + let result = Set.reduce( + (acc, key) => fail "Shouldn't be called", + 0, + Set.empty ) assert result == 0 diff --git a/compiler/test/stdlib/string.test.gr b/compiler/test/stdlib/string.test.gr index f32a60969f..35cdc80318 100644 --- a/compiler/test/stdlib/string.test.gr +++ b/compiler/test/stdlib/string.test.gr @@ -6,7 +6,8 @@ include "array" include "bytes" let fox = "The quick brown fox jumps over the lazy dog." -let emojis = "we found a path🚀 to greatness🏅but the grind never quits😤💪🏼 keep milling💯🔥😈" +let emojis = + "we found a path🚀 to greatness🏅but the grind never quits😤💪🏼 keep milling💯🔥😈" let emoji = "🌾" let short = "fox" let empty = "" @@ -211,8 +212,7 @@ assert String.replaceFirst("🌾", "Hello", "Hello Grain") == "Hello Grain" assert String.replaceFirst("Grain", "🌾", "Hello Grain") == "Hello 🌾" assert String.replaceFirst("ello", "i", "Hello Grain") == "Hi Grain" assert String.replaceFirst("i", "ello", "Hi Grain") == "Hello Grain" -assert String.replaceFirst("🌾", "🚀", "🚀🚀🚀🌾🌾🌾") == - "🚀🚀🚀🚀🌾🌾" +assert String.replaceFirst("🌾", "🚀", "🚀🚀🚀🌾🌾🌾") == "🚀🚀🚀🚀🌾🌾" // Replace Last assert String.replaceLast("Hello", "Hi", "Hey Hello World") == "Hey Hi World" assert String.replaceLast("Hello", "Hi", "Hello World") == "Hi World" @@ -226,8 +226,7 @@ assert String.replaceLast("🌾", "Hello", "Hello Grain") == "Hello Grain" assert String.replaceLast("Grain", "🌾", "Hello Grain") == "Hello 🌾" assert String.replaceLast("ello", "i", "Grain Hello") == "Grain Hi" assert String.replaceLast("i", "ello", "Grain Hi") == "Grain Hello" -assert String.replaceLast("🚀", "🌾", "🚀🚀🚀🌾🌾🌾") == - "🚀🚀🌾🌾🌾🌾" +assert String.replaceLast("🚀", "🌾", "🚀🚀🚀🌾🌾🌾") == "🚀🚀🌾🌾🌾🌾" // Replace All assert String.replaceAll("Hello", "Hi", "Hey Hello World") == "Hey Hi World" assert String.replaceAll("Hello", "Hi", "Hello World") == "Hi World" @@ -239,8 +238,7 @@ assert String.replaceAll("Hello", "Hi", "Hel") == "Hel" assert String.replaceAll("🌾", "Grain", "Hello") == "Hello" assert String.replaceAll("🌾", "Hello", "Hello Grain") == "Hello Grain" assert String.replaceAll("Grain", "🌾", "Hello Grain") == "Hello 🌾" -assert String.replaceAll("🚀", "🌾", "🚀🚀🚀🌾🌾🌾") == - "🌾🌾🌾🌾🌾🌾" +assert String.replaceAll("🚀", "🌾", "🚀🚀🚀🌾🌾🌾") == "🌾🌾🌾🌾🌾🌾" assert String.replaceAll("/", "\/", "/test/test/test/") == "\/test\/test\/test\/" assert String.replaceAll(",", "|", "test,test,test") == "test|test|test" @@ -404,13 +402,7 @@ assert String.decodeRange( emojis // but, when we include it, it should preserve it: assert String.decodeRange( - String.encodeAt( - emojis, - String.UTF16_LE, - Bytes.make(500), - 51, - includeBom=true - ), + String.encodeAt(emojis, String.UTF16_LE, Bytes.make(500), 51, includeBom=true), String.UTF16_LE, 51, 166, @@ -426,52 +418,28 @@ assert String.decodeRange( ) == emojis assert String.decodeRange( - String.encodeAt( - emojis, - String.UTF16_LE, - Bytes.make(500), - 51, - includeBom=true - ), + String.encodeAt(emojis, String.UTF16_LE, Bytes.make(500), 51, includeBom=true), String.UTF16_LE, 51, 166 ) == emojis assert String.decodeRange( - String.encodeAt( - emojis, - String.UTF16_BE, - Bytes.make(500), - 51, - includeBom=true - ), + String.encodeAt(emojis, String.UTF16_BE, Bytes.make(500), 51, includeBom=true), String.UTF16_BE, 51, 166 ) == emojis assert String.decodeRange( - String.encodeAt( - emojis, - String.UTF32_BE, - Bytes.make(500), - 51, - includeBom=true - ), + String.encodeAt(emojis, String.UTF32_BE, Bytes.make(500), 51, includeBom=true), String.UTF32_BE, 51, 300 ) == emojis assert String.decodeRange( - String.encodeAt( - emojis, - String.UTF32_LE, - Bytes.make(500), - 51, - includeBom=true - ), + String.encodeAt(emojis, String.UTF32_LE, Bytes.make(500), 51, includeBom=true), String.UTF32_LE, 51, 300 diff --git a/compiler/test/suites/arrays.re b/compiler/test/suites/arrays.re index 3a57e99df1..cd10e129b9 100644 --- a/compiler/test/suites/arrays.re +++ b/compiler/test/suites/arrays.re @@ -132,20 +132,25 @@ describe("arrays", ({test, testSkip}) => { statements: [ Toplevel.expr( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.array_set( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.ident( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Location.mknoloc( Identifier.IdentName(Location.mknoloc("state")), ), ), Expression.constant( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Constant.number(PConstNumberInt("0")), ), Expression.constant( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Constant.number(PConstNumberInt("5")), ), ), diff --git a/compiler/test/suites/basic_functionality.re b/compiler/test/suites/basic_functionality.re index ee617b4513..b9cd154152 100644 --- a/compiler/test/suites/basic_functionality.re +++ b/compiler/test/suites/basic_functionality.re @@ -268,6 +268,7 @@ describe("basic functionality", ({test, testSkip}) => { statements: [ Toplevel.data( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, [ ( Asttypes.NotProvided, @@ -290,11 +291,13 @@ describe("basic functionality", ({test, testSkip}) => { ), ], ), + Location.dummy_loc, ), ], ), Toplevel.let_( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Asttypes.NotProvided, Asttypes.Nonrecursive, Asttypes.Immutable, @@ -307,6 +310,7 @@ describe("basic functionality", ({test, testSkip}) => { ), Expression.constant( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Constant.string("pikachu"), ), ), @@ -314,6 +318,7 @@ describe("basic functionality", ({test, testSkip}) => { ), Toplevel.data( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, [ ( Asttypes.NotProvided, @@ -331,6 +336,7 @@ describe("basic functionality", ({test, testSkip}) => { ), ), ), + Location.dummy_loc, ), ], ), diff --git a/compiler/test/suites/blocks.re b/compiler/test/suites/blocks.re index 535e6e38f9..611000b835 100644 --- a/compiler/test/suites/blocks.re +++ b/compiler/test/suites/blocks.re @@ -16,11 +16,14 @@ describe("blocks", ({test}) => { statements: [ Toplevel.expr( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.block( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, [ Expression.singleton_construct( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Location.mknoloc( Identifier.IdentName(Location.mknoloc("Foo")), ), diff --git a/compiler/test/suites/chars.re b/compiler/test/suites/chars.re index 7b7b474ef7..1e43e42926 100644 --- a/compiler/test/suites/chars.re +++ b/compiler/test/suites/chars.re @@ -35,7 +35,8 @@ describe("chars", ({test, testSkip}) => { }; let char = (~loc=?, s) => { let loc = Option.value(~default=Location.dummy_loc, loc); - Toplevel.expr(~loc) @@ Expression.constant(~loc, Constant.char(s)); + Toplevel.expr(~loc) @@ + Expression.constant(~loc, ~core_loc=loc, Constant.char(s)); }; assertRun("char1", "print('A')", "A\n"); @@ -117,6 +118,7 @@ Did you mean to create the string "\{\\"test\\": 1\}" instead?|}, statements: [ char( ~loc=mk_loc("char_loc_simple", (2, 12, 12), (2, 15, 12)), + ~core_loc=mk_loc("char_loc_simple", (2, 12, 12), (2, 15, 12)), "a", ), ], @@ -136,6 +138,7 @@ Did you mean to create the string "\{\\"test\\": 1\}" instead?|}, statements: [ char( ~loc=mk_loc("char_loc_code", (2, 12, 12), (2, 23, 12)), + ~core_loc=mk_loc("char_loc_code", (2, 12, 12), (2, 23, 12)), "🏴", ), ], @@ -155,6 +158,7 @@ Did you mean to create the string "\{\\"test\\": 1\}" instead?|}, statements: [ char( ~loc=mk_loc("char_loc_emoji", (2, 12, 12), (2, 15, 12)), + ~core_loc=mk_loc("char_loc_emoji", (2, 12, 12), (2, 15, 12)), "💯", ), ], diff --git a/compiler/test/suites/formatter.re b/compiler/test/suites/formatter.re index 28c6f50a6a..318aeb74bf 100644 --- a/compiler/test/suites/formatter.re +++ b/compiler/test/suites/formatter.re @@ -55,4 +55,5 @@ describe("formatter", ({test, testSkip}) => { assertFormatOutput("chained", "chained"); assertFormatOutput("grouped_expr", "grouped_expr"); assertFormatOutput("early_return", "early_return"); + assertFormatOutput("empty", "empty"); }); diff --git a/compiler/test/suites/parsing.re b/compiler/test/suites/parsing.re index 5530e9a2d1..ffeff6e3fe 100644 --- a/compiler/test/suites/parsing.re +++ b/compiler/test/suites/parsing.re @@ -20,16 +20,19 @@ describe("parsing", ({test, testSkip}) => { let a = Expression.ident( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Location.mknoloc(Identifier.IdentName(Location.mknoloc("a"))), ); let b = Expression.ident( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Location.mknoloc(Identifier.IdentName(Location.mknoloc("b"))), ); let c = Expression.ident( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Location.mknoloc(Identifier.IdentName(Location.mknoloc("c"))), ); let unlabled_expr = expr => { @@ -46,10 +49,13 @@ describe("parsing", ({test, testSkip}) => { statements: [ Toplevel.expr( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.apply( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.ident( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Location.mknoloc( Identifier.IdentName(Location.mknoloc(op)), ), @@ -118,10 +124,13 @@ describe("parsing", ({test, testSkip}) => { statements: [ Toplevel.expr( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.apply( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.ident( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Location.mknoloc( Identifier.IdentName(Location.mknoloc("+++")), ), @@ -131,8 +140,10 @@ describe("parsing", ({test, testSkip}) => { unlabled_expr( Expression.apply( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.ident( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Location.mknoloc( Identifier.IdentName(Location.mknoloc("***")), ), @@ -156,10 +167,13 @@ describe("parsing", ({test, testSkip}) => { statements: [ Toplevel.expr( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.apply( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.ident( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Location.mknoloc( Identifier.IdentName(Location.mknoloc("&&--")), ), @@ -169,8 +183,10 @@ describe("parsing", ({test, testSkip}) => { unlabled_expr( Expression.apply( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.ident( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Location.mknoloc( Identifier.IdentName(Location.mknoloc("&--")), ), @@ -194,10 +210,13 @@ describe("parsing", ({test, testSkip}) => { statements: [ Toplevel.expr( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.apply( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.ident( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Location.mknoloc( Identifier.IdentName(Location.mknoloc("||--")), ), @@ -207,8 +226,10 @@ describe("parsing", ({test, testSkip}) => { unlabled_expr( Expression.apply( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.ident( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Location.mknoloc( Identifier.IdentName(Location.mknoloc("|--")), ), @@ -232,10 +253,13 @@ describe("parsing", ({test, testSkip}) => { statements: [ Toplevel.expr( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.apply( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.ident( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Location.mknoloc( Identifier.IdentName(Location.mknoloc(">>")), ), @@ -244,8 +268,10 @@ describe("parsing", ({test, testSkip}) => { unlabled_expr( Expression.apply( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.ident( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Location.mknoloc( Identifier.IdentName(Location.mknoloc("<<")), ), @@ -270,11 +296,14 @@ describe("parsing", ({test, testSkip}) => { statements: [ Toplevel.expr( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Expression.return( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, Some( Expression.constant( ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, PConstNumber(PConstNumberInt("-1")), ), ), @@ -354,8 +383,16 @@ describe("parsing", ({test, testSkip}) => { { module_name: Location.mknoloc("Test"), statements: [ - Toplevel.expr(~loc=Location.dummy_loc, a), - Toplevel.expr(~loc=Location.dummy_loc, b), + Toplevel.expr( + ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, + a, + ), + Toplevel.expr( + ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, + b, + ), ], comments: [], prog_loc: Location.dummy_loc, @@ -367,8 +404,16 @@ describe("parsing", ({test, testSkip}) => { { module_name: Location.mknoloc("Test"), statements: [ - Toplevel.expr(~loc=Location.dummy_loc, a), - Toplevel.expr(~loc=Location.dummy_loc, b), + Toplevel.expr( + ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, + a, + ), + Toplevel.expr( + ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, + b, + ), ], comments: [], prog_loc: Location.dummy_loc, @@ -380,8 +425,16 @@ describe("parsing", ({test, testSkip}) => { { module_name: Location.mknoloc("Test"), statements: [ - Toplevel.expr(~loc=Location.dummy_loc, a), - Toplevel.expr(~loc=Location.dummy_loc, b), + Toplevel.expr( + ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, + a, + ), + Toplevel.expr( + ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, + b, + ), ], comments: [], prog_loc: Location.dummy_loc, @@ -393,8 +446,16 @@ describe("parsing", ({test, testSkip}) => { { module_name: Location.mknoloc("Test"), statements: [ - Toplevel.expr(~loc=Location.dummy_loc, a), - Toplevel.expr(~loc=Location.dummy_loc, b), + Toplevel.expr( + ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, + a, + ), + Toplevel.expr( + ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, + b, + ), ], comments: [], prog_loc: Location.dummy_loc, @@ -406,8 +467,16 @@ describe("parsing", ({test, testSkip}) => { { module_name: Location.mknoloc("Test"), statements: [ - Toplevel.expr(~loc=Location.dummy_loc, a), - Toplevel.expr(~loc=Location.dummy_loc, b), + Toplevel.expr( + ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, + a, + ), + Toplevel.expr( + ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, + b, + ), ], comments: [], prog_loc: Location.dummy_loc, @@ -419,8 +488,16 @@ describe("parsing", ({test, testSkip}) => { { module_name: Location.mknoloc("Test"), statements: [ - Toplevel.expr(~loc=Location.dummy_loc, a), - Toplevel.expr(~loc=Location.dummy_loc, b), + Toplevel.expr( + ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, + a, + ), + Toplevel.expr( + ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, + b, + ), ], comments: [], prog_loc: Location.dummy_loc, @@ -432,8 +509,16 @@ describe("parsing", ({test, testSkip}) => { { module_name: Location.mknoloc("Test"), statements: [ - Toplevel.expr(~loc=Location.dummy_loc, a), - Toplevel.expr(~loc=Location.dummy_loc, b), + Toplevel.expr( + ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, + a, + ), + Toplevel.expr( + ~loc=Location.dummy_loc, + ~core_loc=Location.dummy_loc, + b, + ), ], comments: [], prog_loc: Location.dummy_loc, diff --git a/compiler/test/suites/strings.re b/compiler/test/suites/strings.re index c5393a96bf..7e2633afb8 100644 --- a/compiler/test/suites/strings.re +++ b/compiler/test/suites/strings.re @@ -37,7 +37,8 @@ describe("strings", ({test, testSkip}) => { }; let str = (~loc=?, s) => { let loc = Option.value(~default=Location.dummy_loc, loc); - Toplevel.expr(~loc) @@ Expression.constant(~loc, Constant.string(s)); + Toplevel.expr(~loc, ~core_loc=loc) @@ + Expression.constant(~loc, ~core_loc=loc, Constant.string(s)); }; assertParse( "string_parse_dqs1", diff --git a/stdlib/array.gr b/stdlib/array.gr index 390ccb2e11..bfde2b7e58 100644 --- a/stdlib/array.gr +++ b/stdlib/array.gr @@ -10,7 +10,6 @@ * @history v0.1.0: Originally named `arrays` * @history v0.2.0: Renamed to `array` */ - module Array include "number" @@ -867,8 +866,10 @@ provide let filteri = (fn, array) => { */ provide let unique = array => { // TODO(#1651): improve performance - filteri((el, index) => - findIndex(value => value == el, array) == Some(index), array) + filteri( + (el, index) => findIndex(value => value == el, array) == Some(index), + array + ) } /** @@ -1272,7 +1273,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let empty = { let empty = { length: 0, shift: branchingBits, root: [>], tail: [>] } empty @@ -1294,7 +1294,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let isEmpty = array => array.length == 0 /** @@ -1310,7 +1309,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let length = array => array.length let tailIndex = length => { @@ -1375,7 +1373,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let get = (index, array) => { let index = wrapNegativeIndex(array.length, index) @@ -1418,7 +1415,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let set = (index, value, array) => { let index = wrapNegativeIndex(array.length, index) @@ -1596,7 +1592,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let append = (array1, array2) => { // Magic number of 4 determined best from benchmarks according to Elm's // Array implementation @@ -1639,7 +1634,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let concat = arrays => { List.reduce(append, empty, arrays) } @@ -1663,7 +1657,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let init = (length, fn) => { let tailLen = length % branchingFactor let btail = mutInit(tailLen, i => fn(length - tailLen + i)) @@ -1694,7 +1687,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let make = (length, value) => { init(length, (_) => value) } @@ -1715,7 +1707,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let forEach = (fn, array) => { let rec forEachFn = node => { match (node) { @@ -1745,7 +1736,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let cycle = (fn, n, array) => { for (let mut i = 0; i < n; i += 1) { forEach(fn, array) @@ -1769,7 +1759,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let map = (fn, array) => { let rec mapFn = node => { match (node) { @@ -1805,7 +1794,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let reduce = (fn, initial, array) => { let rec reduceFn = (acc, node) => { match (node) { @@ -1840,7 +1828,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let reduceRight = (fn, initial, array) => { let rec reduceFn = (node, acc) => { match (node) { @@ -1872,7 +1859,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let flatMap = (fn, array) => { reduce((acc, x) => append(acc, fn(x)), empty, array) } @@ -1891,7 +1877,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let fromList = list => { let rec fromListInner = (list, nodes, numNodes) => { let node = mutFromList(List.take(branchingFactor, list)) @@ -1921,7 +1906,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let toList = array => { reduceRight((val, list) => [val, ...list], [], array) } @@ -1944,7 +1928,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let filter = (fn, array) => { fromList( reduceRight((x, arr) => if (fn(x)) [x, ...arr] else arr, [], array) @@ -1971,7 +1954,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let every = (fn, array) => { reduce((acc, x) => acc && fn(x), true, array) } @@ -1996,7 +1978,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let some = (fn, array) => { reduce((acc, x) => acc || fn(x), false, array) } @@ -2016,7 +1997,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let reverse = array => { fromList(reduce((acc, x) => [x, ...acc], [], array)) } @@ -2041,7 +2021,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let contains = (search, array) => { reduce((acc, x) => acc || x == search, false, array) } @@ -2065,7 +2044,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let find = (fn, array) => { reduce((acc, x) => if (acc == None && fn(x)) Some(x) else acc, None, array) } @@ -2089,7 +2067,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let findIndex = (fn, array) => { let mut i = -1 reduce((acc, x) => { @@ -2115,13 +2092,10 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let product = (array1, array2) => { - fromList( - reduceRight((x1, list) => { - reduceRight((x2, list) => [(x1, x2), ...list], list, array2) - }, [], array1) - ) + fromList(reduceRight((x1, list) => { + reduceRight((x2, list) => [(x1, x2), ...list], list, array2) + }, [], array1)) } /** @@ -2139,7 +2113,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let count = (fn, array) => { reduce((acc, x) => if (fn(x)) acc + 1 else acc, 0, array) } @@ -2159,7 +2132,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let unique = array => { // TODO(#1651): improve performance fromList(List.unique(toList(array))) @@ -2186,7 +2158,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let zip = (array1, array2) => { fromList(List.zip(toList(array1), toList(array2))) } @@ -2220,7 +2191,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let zipWith = (fn, array1, array2) => { fromList(List.zipWith(fn, toList(array1), toList(array2))) } @@ -2241,7 +2211,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let unzip = array => { let (list1, list2) = List.unzip(toList(array)) (fromList(list1), fromList(list2)) @@ -2262,7 +2231,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let join = (separator, array) => { // TODO(#728): Improve performance here with buffer approach let iter = (acc, str) => { @@ -2306,17 +2274,14 @@ provide module Immutable { * @history v0.5.4: Originally in `"immutablearray"` module * @history v0.6.0: Default `end` to the Array length */ - provide let slice = (start, end=length(array), array) => { let begin = clampIndex(array.length, start) let end = clampIndex(array.length, end) let mut i = array.length - fromList( - reduceRight((x, acc) => { - i -= 1 - if (i >= begin && i < end) [x, ...acc] else acc - }, [], array) - ) + fromList(reduceRight((x, acc) => { + i -= 1 + if (i >= begin && i < end) [x, ...acc] else acc + }, [], array)) } /** @@ -2335,7 +2300,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module, with `compare` being a required argument */ - provide let sort = (compare=compare, array) => { fromList(List.sort(compare=compare, toList(array))) } @@ -2362,7 +2326,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablearray"` module */ - provide let rotate = (n, array) => { let sliceI = if (array.length == 0) 0 else n % array.length let before = slice(0, end=sliceI, array) diff --git a/stdlib/bigint.gr b/stdlib/bigint.gr index 4ac1fcdbcd..4c51d40fc7 100644 --- a/stdlib/bigint.gr +++ b/stdlib/bigint.gr @@ -9,7 +9,6 @@ * * @since v0.5.0 */ - module BigInt include "runtime/unsafe/wasmi32" @@ -105,9 +104,8 @@ provide let abs = (num: BigInt) => { */ @unsafe provide let (+) = (num1: BigInt, num2: BigInt) => { - WasmI32.toGrain( - BI.add(WasmI32.fromGrain(num1), WasmI32.fromGrain(num2)) - ): BigInt + WasmI32.toGrain(BI.add(WasmI32.fromGrain(num1), WasmI32.fromGrain(num2))): + BigInt } /** @@ -126,9 +124,8 @@ provide let (+) = (num1: BigInt, num2: BigInt) => { */ @unsafe provide let (-) = (num1: BigInt, num2: BigInt) => { - WasmI32.toGrain( - BI.sub(WasmI32.fromGrain(num1), WasmI32.fromGrain(num2)) - ): BigInt + WasmI32.toGrain(BI.sub(WasmI32.fromGrain(num1), WasmI32.fromGrain(num2))): + BigInt } /** @@ -147,9 +144,8 @@ provide let (-) = (num1: BigInt, num2: BigInt) => { */ @unsafe provide let (*) = (num1: BigInt, num2: BigInt) => { - WasmI32.toGrain( - BI.mul(WasmI32.fromGrain(num1), WasmI32.fromGrain(num2)) - ): BigInt + WasmI32.toGrain(BI.mul(WasmI32.fromGrain(num1), WasmI32.fromGrain(num2))): + BigInt } // For further reading on Truncated vs. Floored division: https://en.wikipedia.org/wiki/Modulo_operation @@ -171,9 +167,8 @@ provide let (*) = (num1: BigInt, num2: BigInt) => { */ @unsafe provide let (/) = (num1: BigInt, num2: BigInt) => { - WasmI32.toGrain( - BI.quot(WasmI32.fromGrain(num1), WasmI32.fromGrain(num2)) - ): BigInt + WasmI32.toGrain(BI.quot(WasmI32.fromGrain(num1), WasmI32.fromGrain(num2))): + BigInt } /** @@ -190,9 +185,8 @@ provide let (/) = (num1: BigInt, num2: BigInt) => { */ @unsafe provide let rem = (num1: BigInt, num2: BigInt) => { - WasmI32.toGrain( - BI.rem(WasmI32.fromGrain(num1), WasmI32.fromGrain(num2)) - ): BigInt + WasmI32.toGrain(BI.rem(WasmI32.fromGrain(num1), WasmI32.fromGrain(num2))): + BigInt } /** @@ -229,9 +223,8 @@ provide let quotRem = (num1: BigInt, num2: BigInt) => { */ @unsafe provide let gcd = (num1: BigInt, num2: BigInt) => { - WasmI32.toGrain( - BI.gcd(WasmI32.fromGrain(num1), WasmI32.fromGrain(num2)) - ): BigInt + WasmI32.toGrain(BI.gcd(WasmI32.fromGrain(num1), WasmI32.fromGrain(num2))): + BigInt } /** @@ -541,9 +534,8 @@ provide let (^) = (num1: BigInt, num2: BigInt) => { */ @unsafe provide let clz = (num: BigInt) => { - WasmI32.toGrain( - DS.newInt32(BI.countLeadingZeros(WasmI32.fromGrain(num))) - ): Int32 + WasmI32.toGrain(DS.newInt32(BI.countLeadingZeros(WasmI32.fromGrain(num)))): + Int32 } /** @@ -558,9 +550,8 @@ provide let clz = (num: BigInt) => { */ @unsafe provide let ctz = (num: BigInt) => { - WasmI32.toGrain( - DS.newInt64(BI.countTrailingZeros(WasmI32.fromGrain(num))) - ): Int64 + WasmI32.toGrain(DS.newInt64(BI.countTrailingZeros(WasmI32.fromGrain(num)))): + Int64 } /** diff --git a/stdlib/buffer.gr b/stdlib/buffer.gr index 6aa4641955..d991bdc9ee 100644 --- a/stdlib/buffer.gr +++ b/stdlib/buffer.gr @@ -9,7 +9,6 @@ * * @since v0.4.0 */ - module Buffer include "runtime/unsafe/memory" @@ -430,11 +429,11 @@ provide let addChar = (char, buffer) => { autogrow(bytelen, buffer) let off = coerceNumberToWasmI32(buffer.len) let dst = WasmI32.fromGrain(buffer.data) + _VALUE_OFFSET - WasmI32.store8(dst, (usv >>> 6n * count) + offset, off) + WasmI32.store8(dst, (usv >>> (6n * count)) + offset, off) let mut n = 0n while (count > 0n) { n += 1n - let temp = usv >>> 6n * (count - 1n) + let temp = usv >>> (6n * (count - 1n)) WasmI32.store8(dst + n, 0x80n | temp & 0x3Fn, off) count -= 1n } @@ -514,13 +513,12 @@ provide let addStringSlice = (start: Number, end, string, buffer) => { * @since v0.4.0 */ @unsafe -provide let addBytesSlice = - ( - start: Number, - length: Number, - bytes: Bytes, - buffer: Buffer, - ) => { +provide let addBytesSlice = ( + start: Number, + length: Number, + bytes: Bytes, + buffer: Buffer, +) => { if (length != 0) { from WasmI32 use { (-), (<), (>), (>=) } diff --git a/stdlib/bytes.gr b/stdlib/bytes.gr index 4dd57a384e..ab0c7317eb 100644 --- a/stdlib/bytes.gr +++ b/stdlib/bytes.gr @@ -8,7 +8,6 @@ * * @since v0.3.2 */ - module Bytes include "runtime/unsafe/memory" @@ -203,7 +202,7 @@ provide let slice = (start: Number, length: Number, bytes: Bytes) => { let length = coerceNumberToWasmI32(length) if (start + length > size) { throw Exception.InvalidArgument( - "The given index and length do not specify a valid range of bytes" + "The given index and length do not specify a valid range of bytes", ) } let dst = allocateBytes(length) @@ -288,14 +287,13 @@ provide let resize = (left: Number, right: Number, bytes: Bytes) => { * @since v0.3.2 */ @unsafe -provide let move = - ( - srcIndex: Number, - dstIndex: Number, - length: Number, - src: Bytes, - dst: Bytes, - ) => { +provide let move = ( + srcIndex: Number, + dstIndex: Number, + length: Number, + src: Bytes, + dst: Bytes, +) => { from WasmI32 use { (+), (>) } let srcIndexOrig = srcIndex let dstIndexOrig = dstIndex diff --git a/stdlib/char.gr b/stdlib/char.gr index 3b23dab12f..d7174df494 100644 --- a/stdlib/char.gr +++ b/stdlib/char.gr @@ -11,7 +11,6 @@ * * @since v0.3.0 */ - module Char include "runtime/unsafe/wasmi32" @@ -197,12 +196,12 @@ provide let toString = (char: Char) => { offset = 0xF0n } let string = allocateString(count + 1n) - WasmI32.store8(string, (usv >>> 6n * count) + offset, 8n) + WasmI32.store8(string, (usv >>> (6n * count)) + offset, 8n) let mut n = 0n while (count > 0n) { n += 1n - let temp = usv >>> 6n * (count - 1n) + let temp = usv >>> (6n * (count - 1n)) WasmI32.store8(string + n, 0x80n | temp & 0x3Fn, 8n) count -= 1n } @@ -340,8 +339,7 @@ provide let isAsciiDigit = char => char >= '0' && char <= '9' * @since v0.6.0 */ provide let isAsciiAlpha = char => - char >= 'a' && char <= 'z' || - char >= 'A' && char <= 'Z' + char >= 'a' && char <= 'z' || char >= 'A' && char <= 'Z' /** * Converts the character to ASCII lowercase if it is an ASCII uppercase character. diff --git a/stdlib/exception.gr b/stdlib/exception.gr index effe3f7b3f..b551e7f495 100644 --- a/stdlib/exception.gr +++ b/stdlib/exception.gr @@ -10,7 +10,6 @@ * * @since v0.3.0 */ - module Exception include "runtime/unsafe/wasmi32" diff --git a/stdlib/float32.gr b/stdlib/float32.gr index 834450416a..98204c5603 100644 --- a/stdlib/float32.gr +++ b/stdlib/float32.gr @@ -5,7 +5,6 @@ * * @since v0.2.0 */ - module Float32 include "runtime/unsafe/wasmi32" diff --git a/stdlib/float64.gr b/stdlib/float64.gr index 502da340bf..14cd506a65 100644 --- a/stdlib/float64.gr +++ b/stdlib/float64.gr @@ -5,7 +5,6 @@ * * @since v0.2.0 */ - module Float64 include "runtime/unsafe/wasmi32" diff --git a/stdlib/hash.gr b/stdlib/hash.gr index 202da328ca..75690f8d1a 100644 --- a/stdlib/hash.gr +++ b/stdlib/hash.gr @@ -5,14 +5,12 @@ * * @since v0.1.0 */ - module Hash /** This module implements MurmurHash3 for Grain data types. https://en.wikipedia.org/wiki/MurmurHash */ - include "runtime/unsafe/wasmi32" from WasmI32 use { (+), @@ -114,13 +112,12 @@ let rec hashOne = (val, depth) => { } else if ((val & Tags._GRAIN_NUMBER_TAG_MASK) != 0n) { hash32(val) } else if ( - (val & Tags._GRAIN_GENERIC_TAG_MASK) == Tags._GRAIN_GENERIC_HEAP_TAG_TYPE + (val & Tags._GRAIN_GENERIC_TAG_MASK) == + Tags._GRAIN_GENERIC_HEAP_TAG_TYPE ) { let heapPtr = val match (WasmI32.load(heapPtr, 0n)) { - t when ( - t == Tags._GRAIN_STRING_HEAP_TAG || t == Tags._GRAIN_BYTES_HEAP_TAG - ) => { + t when t == Tags._GRAIN_STRING_HEAP_TAG || t == Tags._GRAIN_BYTES_HEAP_TAG => { let length = WasmI32.load(heapPtr, 4n) let extra = length % 4n let l = length - extra @@ -215,11 +212,9 @@ let rec hashOne = (val, depth) => { }, } }, - t when ( - t == Tags._GRAIN_INT32_HEAP_TAG || + t when t == Tags._GRAIN_INT32_HEAP_TAG || t == Tags._GRAIN_FLOAT32_HEAP_TAG || - t == Tags._GRAIN_UINT32_HEAP_TAG - ) => { + t == Tags._GRAIN_UINT32_HEAP_TAG => { hash32(WasmI32.load(heapPtr, 4n)) }, t when t == Tags._GRAIN_UINT64_HEAP_TAG => { diff --git a/stdlib/int16.gr b/stdlib/int16.gr index 551fd89141..d9cd8df0d7 100644 --- a/stdlib/int16.gr +++ b/stdlib/int16.gr @@ -4,7 +4,6 @@ * * @since v0.6.0 */ - module Int16 include "runtime/unsafe/wasmi32" diff --git a/stdlib/int32.gr b/stdlib/int32.gr index fde13e7c5f..69d5c8568f 100644 --- a/stdlib/int32.gr +++ b/stdlib/int32.gr @@ -5,7 +5,6 @@ * * @since v0.2.0 */ - module Int32 include "runtime/unsafe/wasmi32" @@ -542,6 +541,8 @@ let rec expBySquaring = (y, x, n) => { * @since v0.6.0 */ provide let (**) = (base, power) => { - if (power < 0l) return expBySquaring(1l, 1l / base, power * -1l) - else return expBySquaring(1l, base, power) + if (power < 0l) + return expBySquaring(1l, 1l / base, power * -1l) + else + return expBySquaring(1l, base, power) } diff --git a/stdlib/int64.gr b/stdlib/int64.gr index d063dfb37d..ea9cf5aca7 100644 --- a/stdlib/int64.gr +++ b/stdlib/int64.gr @@ -5,7 +5,6 @@ * * @since v0.2.0 */ - module Int64 include "runtime/unsafe/wasmi32" @@ -529,6 +528,8 @@ let rec expBySquaring = (y, x, n) => { * @since v0.6.0 */ provide let (**) = (base, power) => { - if (power < 0L) return expBySquaring(1L, 1L / base, power * -1L) - else return expBySquaring(1L, base, power) + if (power < 0L) + return expBySquaring(1L, 1L / base, power * -1L) + else + return expBySquaring(1L, base, power) } diff --git a/stdlib/int8.gr b/stdlib/int8.gr index 05a7053d37..d61e5ae42e 100644 --- a/stdlib/int8.gr +++ b/stdlib/int8.gr @@ -7,7 +7,6 @@ * * @since v0.6.0 */ - module Int8 include "runtime/unsafe/wasmi32" diff --git a/stdlib/list.gr b/stdlib/list.gr index 96b42ccf88..ea0dee44c9 100644 --- a/stdlib/list.gr +++ b/stdlib/list.gr @@ -7,7 +7,6 @@ * @history v0.1.0: Originally named `lists` * @history v0.2.0: Renamed to `list` */ - module List /** @@ -344,8 +343,10 @@ provide let filteri = (fn, list) => { match (list) { [] => [], [first, ...rest] => - if (fn(first, index)) [first, ...iter(fn, rest, index + 1)] - else iter(fn, rest, index + 1), + if (fn(first, index)) + [first, ...iter(fn, rest, index + 1)] + else + iter(fn, rest, index + 1), } } iter(fn, list, 0) @@ -469,8 +470,10 @@ provide let rec insert = (index, value, list) => { match (list) { [] => if (index == 0) [value] else fail "insert index is out-of-bounds", [first, ...rest] => - if (index == 0) [value, ...list] - else [first, ...insert(index - 1, value, rest)], + if (index == 0) + [value, ...list] + else + [first, ...insert(index - 1, value, rest)], } } } @@ -516,8 +519,10 @@ provide let part = (count, list) => { [] => if (count > 0) fail "part count is out-of-bounds" else (list1, list2), [first, ...rest] => - if (count > 0) iter([first, ...list1], rest, count - 1) - else (list1, list2), + if (count > 0) + iter([first, ...list1], rest, count - 1) + else + (list1, list2), } } let (pt1, pt2) = iter([], list, count) @@ -556,8 +561,10 @@ provide let rotate = (n, list) => { match (list2) { [] => if (n > 0) None else Some((list1, list2)), [first, ...rest] => - if (n > 0) iter([first, ...list1], rest, n - 1) - else Some((list1, list2)), + if (n > 0) + iter([first, ...list1], rest, n - 1) + else + Some((list1, list2)), } } let res = iter([], list, n) @@ -587,8 +594,10 @@ provide let unique = list => { match (list) { [] => reverse(acc), [first, ...rest] => - if (contains(first, acc)) iter(rest, acc) - else iter(rest, [first, ...acc]), + if (contains(first, acc)) + iter(rest, acc) + else + iter(rest, [first, ...acc]), } } iter(list, []) @@ -662,9 +671,13 @@ provide let zipWith = (fn, list1, list2) => { * @since v0.5.3 */ provide let unzip = list => { - reduceRight(((first, second), (firstUnzipped, secondUnzipped)) => { - ([first, ...firstUnzipped], [second, ...secondUnzipped]) - }, ([], []), list) + reduceRight( + ((first, second), (firstUnzipped, secondUnzipped)) => { + ([first, ...firstUnzipped], [second, ...secondUnzipped]) + }, + ([], []), + list + ) } /** diff --git a/stdlib/map.gr b/stdlib/map.gr index 2da720e366..d94125e1b9 100644 --- a/stdlib/map.gr +++ b/stdlib/map.gr @@ -6,7 +6,6 @@ * * @since v0.2.0 */ - module Map include "list" @@ -51,10 +50,7 @@ provide record InternalMapStats { * @since v0.2.0 * @history v0.6.0: Merged with `makeSized`; modified signature to accept size */ -provide let make = - ( - size=16, - ) => { // TODO: This could take an `eq` function to custom comparisons +provide let make = (size=16) => { // TODO: This could take an `eq` function to custom comparisons let buckets = Array.make(size, None) { size: 0, buckets } } @@ -479,11 +475,15 @@ provide let fromArray = array => { * @since v0.2.0 */ provide let filter = (fn, map) => { - let keysToRemove = reduce((list, key, value) => if (!fn(key, value)) { - [key, ...list] - } else { - list - }, [], map) + let keysToRemove = reduce( + (list, key, value) => if (!fn(key, value)) { + [key, ...list] + } else { + list + }, + [], + map + ) List.forEach(key => { remove(key, map) }, keysToRemove) @@ -546,7 +546,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let empty = Empty // returns the key-value pair of the minimum key in a tree @@ -567,7 +566,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let size = map => { match (map) { Empty => 0, @@ -584,7 +582,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let isEmpty = map => { match (map) { Empty => true, @@ -680,7 +677,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let rec set = (key, value, map) => { match (map) { Empty => Tree({ key, val: value, size: 1, left: Empty, right: Empty }), @@ -706,7 +702,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let rec get = (key, map) => { match (map) { Empty => None, @@ -730,7 +725,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let rec contains = (key, map) => { Option.isSome(get(key, map)) } @@ -768,7 +762,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let rec remove = (key, map) => { match (map) { Empty => Empty, @@ -799,7 +792,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let update = (key, fn, map) => { let val = get(key, map) match (fn(val)) { @@ -817,7 +809,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let forEach = (fn, map) => { let rec forEachInner = node => { match (node) { @@ -843,7 +834,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let reduce = (fn, init, map) => { let rec reduceInner = (acc, node) => { match (node) { @@ -863,20 +853,10 @@ provide module Immutable { match ((left, right)) { (Empty, node) | (node, Empty) => set(key, val, node), (Tree(left) as leftOpt, Tree(right) as rightOpt) => { - let { - size: lSize, - key: lKey, - left: lLeft, - right: lRight, - val: lVal, - } = left - let { - size: rSize, - key: rKey, - left: rLeft, - right: rRight, - val: rVal, - } = right + let { size: lSize, key: lKey, left: lLeft, right: lRight, val: lVal } = + left + let { size: rSize, key: rKey, left: rLeft, right: rRight, val: rVal } = + right if (weight * lSize < rSize) { balancedNode(rKey, rVal, concat3(key, val, leftOpt, rLeft), rRight) } else if (weight * rSize < lSize) { @@ -921,7 +901,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let keys = map => { reduceRight((list, key, _) => [key, ...list], [], map) } @@ -935,7 +914,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let values = map => { reduceRight((list, _, value) => [value, ...list], [], map) } @@ -950,7 +928,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let filter = (fn, map) => { let rec filterInner = node => { match (node) { @@ -977,7 +954,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let reject = (fn, map) => { filter((key, val) => !fn(key, val), map) } @@ -991,7 +967,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let fromList = list => { List.reduce((map, (key, val)) => set(key, val, map), empty, list) } @@ -1005,7 +980,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let toList = map => { reduceRight((list, key, val) => [(key, val), ...list], [], map) } @@ -1019,7 +993,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let fromArray = array => { Array.reduce((map, (key, val)) => set(key, val, map), empty, array) } @@ -1033,7 +1006,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablemap"` module */ - provide let toArray = map => { Array.fromList(toList(map)) } diff --git a/stdlib/marshal.gr b/stdlib/marshal.gr index fe0c99f8a0..c6f14d4403 100644 --- a/stdlib/marshal.gr +++ b/stdlib/marshal.gr @@ -5,7 +5,6 @@ * * @since v0.5.3 */ - module Marshal /* @@ -53,8 +52,7 @@ let roundTo8 = n => { @unsafe let isHeapPtr = value => - (value & Tags._GRAIN_GENERIC_TAG_MASK) == - Tags._GRAIN_GENERIC_HEAP_TAG_TYPE + (value & Tags._GRAIN_GENERIC_TAG_MASK) == Tags._GRAIN_GENERIC_HEAP_TAG_TYPE @unsafe let rec size = (value, acc, valuesSeen, toplevel) => { @@ -74,9 +72,8 @@ let rec size = (value, acc, valuesSeen, toplevel) => { } let heapPtr = value match (load(heapPtr, 0n)) { - t when ( - t == Tags._GRAIN_STRING_HEAP_TAG || t == Tags._GRAIN_BYTES_HEAP_TAG - ) => { + t when t == Tags._GRAIN_STRING_HEAP_TAG || + t == Tags._GRAIN_BYTES_HEAP_TAG => { acc + roundTo8(8n + load(heapPtr, 4n)) }, t when t == Tags._GRAIN_ADT_HEAP_TAG => { @@ -142,10 +139,8 @@ let rec size = (value, acc, valuesSeen, toplevel) => { t when t == Tags._GRAIN_BOXED_NUM_HEAP_TAG => { let tag = load(heapPtr, 4n) match (tag) { - t when ( - t == Tags._GRAIN_INT64_BOXED_NUM_TAG || - t == Tags._GRAIN_FLOAT64_BOXED_NUM_TAG - ) => { + t when t == Tags._GRAIN_INT64_BOXED_NUM_TAG || + t == Tags._GRAIN_FLOAT64_BOXED_NUM_TAG => { acc + 16n }, t when t == Tags._GRAIN_BIGINT_BOXED_NUM_TAG => { @@ -162,11 +157,9 @@ let rec size = (value, acc, valuesSeen, toplevel) => { }, } }, - t when ( - t == Tags._GRAIN_INT32_HEAP_TAG || + t when t == Tags._GRAIN_INT32_HEAP_TAG || t == Tags._GRAIN_FLOAT32_HEAP_TAG || - t == Tags._GRAIN_UINT32_HEAP_TAG - ) => { + t == Tags._GRAIN_UINT32_HEAP_TAG => { acc + 8n }, t when t == Tags._GRAIN_UINT64_HEAP_TAG => { @@ -200,9 +193,7 @@ let rec marshalHeap = (heapPtr, buf, offset, valuesSeen) => { Map.set(asInt32, offsetAsInt32, valuesSeen) match (load(heapPtr, 0n)) { - t when ( - t == Tags._GRAIN_STRING_HEAP_TAG || t == Tags._GRAIN_BYTES_HEAP_TAG - ) => { + t when t == Tags._GRAIN_STRING_HEAP_TAG || t == Tags._GRAIN_BYTES_HEAP_TAG => { let size = 8n + load(heapPtr, 4n) Memory.copy(buf + offset, heapPtr, size) roundTo8(offset + size) @@ -392,11 +383,9 @@ let rec marshalHeap = (heapPtr, buf, offset, valuesSeen) => { }, } }, - t when ( - t == Tags._GRAIN_INT32_HEAP_TAG || + t when t == Tags._GRAIN_INT32_HEAP_TAG || t == Tags._GRAIN_FLOAT32_HEAP_TAG || - t == Tags._GRAIN_UINT32_HEAP_TAG - ) => { + t == Tags._GRAIN_UINT32_HEAP_TAG => { Memory.copy(buf + offset, heapPtr, 8n) offset + 8n }, @@ -451,13 +440,11 @@ let reportError = (message, offset) => { @unsafe let validateStack = (value, offset) => { match (value) { - _ when ( - value == fromGrain(true) || + _ when value == fromGrain(true) || value == fromGrain(false) || value == fromGrain(void) || (value & Tags._GRAIN_NUMBER_TAG_MASK) == Tags._GRAIN_NUMBER_TAG_TYPE || - (value & Tags._GRAIN_GENERIC_TAG_MASK) == Tags._GRAIN_SHORTVAL_TAG_TYPE - ) => + (value & Tags._GRAIN_GENERIC_TAG_MASK) == Tags._GRAIN_SHORTVAL_TAG_TYPE => None, _ => reportError("Unknown value", offset), } @@ -472,9 +459,7 @@ let rec validateHeap = (buf, bufSize, offset, valuesChecked) => { let valuePtr = buf + offset match (load(valuePtr, 0n)) { - t when ( - t == Tags._GRAIN_STRING_HEAP_TAG || t == Tags._GRAIN_BYTES_HEAP_TAG - ) => { + t when t == Tags._GRAIN_STRING_HEAP_TAG || t == Tags._GRAIN_BYTES_HEAP_TAG => { let size = 8n + load(valuePtr, 4n) if (offset + size > bufSize) { reportError("String/Bytes length exceeds buffer size", offset) @@ -670,7 +655,7 @@ let rec validateHeap = (buf, bufSize, offset, valuesChecked) => { let numeratorError = if ( load(buf, numeratorOffset) != Tags._GRAIN_BOXED_NUM_HEAP_TAG && load(buf, numeratorOffset + 4n) != - Tags._GRAIN_BIGINT_BOXED_NUM_TAG + Tags._GRAIN_BIGINT_BOXED_NUM_TAG ) { reportError( "Rational/Number numerator was not in the expected format", @@ -680,10 +665,9 @@ let rec validateHeap = (buf, bufSize, offset, valuesChecked) => { None } let denominatorError = if ( - load(buf, denominatorOffset) != - Tags._GRAIN_BOXED_NUM_HEAP_TAG && + load(buf, denominatorOffset) != Tags._GRAIN_BOXED_NUM_HEAP_TAG && load(buf, denominatorOffset + 4n) != - Tags._GRAIN_BIGINT_BOXED_NUM_TAG + Tags._GRAIN_BIGINT_BOXED_NUM_TAG ) { reportError( "Rational/Number denominator was not in the expected format", @@ -748,15 +732,12 @@ let validate = (buf, bufSize) => { } else { // Handle non-heap values: booleans, chars, void, etc. match (value) { - _ when ( - value == fromGrain(true) || + _ when value == fromGrain(true) || value == fromGrain(false) || value == fromGrain(void) || - (value & Tags._GRAIN_NUMBER_TAG_MASK) == - Tags._GRAIN_NUMBER_TAG_TYPE || + (value & Tags._GRAIN_NUMBER_TAG_MASK) == Tags._GRAIN_NUMBER_TAG_TYPE || (value & Tags._GRAIN_GENERIC_TAG_MASK) == - Tags._GRAIN_SHORTVAL_TAG_TYPE - ) => + Tags._GRAIN_SHORTVAL_TAG_TYPE => None, _ => reportError("Unknown value", 0n), } @@ -770,9 +751,7 @@ let rec unmarshalHeap = (buf, offset, valuesUnmarshaled) => { let valuePtr = buf + offset match (load(valuePtr, 0n)) { - t when ( - t == Tags._GRAIN_STRING_HEAP_TAG || t == Tags._GRAIN_BYTES_HEAP_TAG - ) => { + t when t == Tags._GRAIN_STRING_HEAP_TAG || t == Tags._GRAIN_BYTES_HEAP_TAG => { let size = 8n + load(valuePtr, 4n) let value = Memory.malloc(size) Memory.copy(value, valuePtr, size) @@ -1006,11 +985,9 @@ let rec unmarshalHeap = (buf, offset, valuesUnmarshaled) => { }, } }, - t when ( - t == Tags._GRAIN_INT32_HEAP_TAG || + t when t == Tags._GRAIN_INT32_HEAP_TAG || t == Tags._GRAIN_FLOAT32_HEAP_TAG || - t == Tags._GRAIN_UINT32_HEAP_TAG - ) => { + t == Tags._GRAIN_UINT32_HEAP_TAG => { let value = Memory.malloc(8n) Memory.copy(value, valuePtr, 8n) diff --git a/stdlib/number.gr b/stdlib/number.gr index 19f10e531e..87db6dbeb9 100644 --- a/stdlib/number.gr +++ b/stdlib/number.gr @@ -18,7 +18,6 @@ * * @since v0.4.0 */ - module Number include "runtime/unsafe/wasmi32" @@ -486,49 +485,48 @@ provide let parseFloat = Atof.parseFloat provide let parse = input => { match (parseInt(input, 10)) { Ok(number) => Ok(number), - Err(msg) => - match (parseFloat(input)) { - Ok(number) => Ok(number), - Err(_) => { - // Split the input on a `/` and attempt to parse a rational - from WasmI32 use { (+), (-), ltU as (<), (==) } + Err(msg) => match (parseFloat(input)) { + Ok(number) => Ok(number), + Err(_) => { + // Split the input on a `/` and attempt to parse a rational + from WasmI32 use { (+), (-), ltU as (<), (==) } - // Search for `/` - let input = WasmI32.fromGrain(input) - let len = WasmI32.load(input, 4n) - let mut slashIdx = -1n - for (let mut i = 0n; i < len; i += 1n) { - if (WasmI32.load8U(input + i, 8n) == 0x2fn) { - slashIdx = i - break - } + // Search for `/` + let input = WasmI32.fromGrain(input) + let len = WasmI32.load(input, 4n) + let mut slashIdx = -1n + for (let mut i = 0n; i < len; i += 1n) { + if (WasmI32.load8U(input + i, 8n) == 0x2fn) { + slashIdx = i + break } + } - if (slashIdx == -1n) { - Err(msg) - } else { - let numeratorLen = slashIdx - let denominatorLen = len - slashIdx - 1n + if (slashIdx == -1n) { + Err(msg) + } else { + let numeratorLen = slashIdx + let denominatorLen = len - slashIdx - 1n - let numerator = allocateString(numeratorLen) - Memory.copy(numerator + 8n, input + 8n, numeratorLen) - let numerator = WasmI32.toGrain(numerator): String + let numerator = allocateString(numeratorLen) + Memory.copy(numerator + 8n, input + 8n, numeratorLen) + let numerator = WasmI32.toGrain(numerator): String - let denominator = allocateString(denominatorLen) - Memory.copy( - denominator + 8n, - input + 8n + slashIdx + 1n, - denominatorLen - ) - let denominator = WasmI32.toGrain(denominator): String + let denominator = allocateString(denominatorLen) + Memory.copy( + denominator + 8n, + input + 8n + slashIdx + 1n, + denominatorLen + ) + let denominator = WasmI32.toGrain(denominator): String - match ((parseInt(numerator, 10), parseInt(denominator, 10))) { - (Ok(numerator), Ok(denominator)) => Ok(numerator / denominator), - (Err(msg), _) | (_, Err(msg)) => Err(msg), - } + match ((parseInt(numerator, 10), parseInt(denominator, 10))) { + (Ok(numerator), Ok(denominator)) => Ok(numerator / denominator), + (Err(msg), _) | (_, Err(msg)) => Err(msg), } - }, + } }, + }, } } @@ -666,8 +664,10 @@ provide let asin = angle => { /* |x| < 0.5 */ if (ix < 0x3fe00000n) { /* if 0x1p-1022 <= |x| < 0x1p-26, avoid raising underflow */ - let output = - if (ix < 0x3e500000n && ix >= 0x00100000n) x else x + x * rf(x * x) + let output = if (ix < 0x3e500000n && ix >= 0x00100000n) + x + else + x + x * rf(x * x) return WasmI32.toGrain(newFloat64(output)): Number } /* 1 > |x| >= 0.5 */ @@ -685,11 +685,7 @@ provide let asin = angle => { ) let c = (z - f * f) / (s + f) x = 0.5W * pio2_hi - - (2.0W * s * r - - (pio2_lo - - 2.0W * c) - - (0.5W * pio2_hi - - 2.0W * f)) + (2.0W * s * r - (pio2_lo - 2.0W * c) - (0.5W * pio2_hi - 2.0W * f)) } x = WasmF64.copySign(x, origAngle) return WasmI32.toGrain(newFloat64(x)): Number @@ -735,19 +731,18 @@ provide let acos = angle => { /* acos(1)=0, acos(-1)=pi */ if (hx >> 31n != 0n) return WasmI32.toGrain(newFloat64(2.0W * pio2_hi + 0x1p-120W)): Number - else return 0 + else + return 0 } return WasmI32.toGrain(newFloat64(NaNW)): Number } /* |x| < 0.5 */ if (ix < 0x3fe00000n) { /* |x| < 2**-57 */ - let output = - if (ix <= 0x3c600000n) pio2_hi + 0x1p-120W - else pio2_hi - - (x - - (pio2_lo - - x * rf(x * x))) + let output = if (ix <= 0x3c600000n) + pio2_hi + 0x1p-120W + else + pio2_hi - (x - (pio2_lo - x * rf(x * x))) return WasmI32.toGrain(newFloat64(output)): Number } /* x < -0.5 */ @@ -930,11 +925,11 @@ provide let rec gamma = z => { * @since v0.5.4 */ provide let rec factorial = n => { - if (isInteger(n) && n < 0) gamma(abs(n) + 1) * -1 else if ( - !isInteger(n) && n < 0 - ) { + if (isInteger(n) && n < 0) { + gamma(abs(n) + 1) * -1 + } else if (!isInteger(n) && n < 0) { throw Exception.InvalidArgument( - "Cannot compute the factorial of a negative non-integer" + "Cannot compute the factorial of a negative non-integer", ) } else { gamma(n + 1) @@ -977,8 +972,13 @@ provide let clamp = (range, input) => { let rangeEnd = max(range.rangeStart, range.rangeEnd) let rangeStart = min(range.rangeStart, range.rangeEnd) - if (input > rangeEnd) rangeEnd else if (input < rangeStart) rangeStart - else input + if (input > rangeEnd) { + rangeEnd + } else if (input < rangeStart) { + rangeStart + } else { + input + } } } diff --git a/stdlib/option.gr b/stdlib/option.gr index 574601304a..d318180375 100644 --- a/stdlib/option.gr +++ b/stdlib/option.gr @@ -10,7 +10,6 @@ * * @since v0.2.0 */ - module Option /** @@ -190,12 +189,11 @@ provide let flatMap = (fn, option) => { */ provide let filter = (fn, option) => { match (option) { - Some(x) => - if (fn(x)) { - Some(x) - } else { - None - }, + Some(x) => if (fn(x)) { + Some(x) + } else { + None + }, None => None, } } diff --git a/stdlib/path.gr b/stdlib/path.gr index d4b7eeeb23..139ecd8a8d 100644 --- a/stdlib/path.gr +++ b/stdlib/path.gr @@ -20,7 +20,6 @@ * * @since v0.5.5 */ - module Path include "string" @@ -471,15 +470,16 @@ provide let isAbsolute = path => { // should only be used on relative path appended to directory path let rec appendHelper = (path: PathInfo, toAppend: PathInfo) => match (toAppend) { - (Rel(up2), ft, s2) => - match (path) { - (Rel(up1), _, []) => (Rel(up1 + up2), ft, s2), - (Abs(_) as d, _, []) => (d, ft, s2), - (d, pft, [_, ...rest] as s1) => { - if (up2 > 0) appendHelper((d, pft, rest), (Rel(up2 - 1), ft, s2)) - else (d, ft, List.append(s2, s1)) - }, + (Rel(up2), ft, s2) => match (path) { + (Rel(up1), _, []) => (Rel(up1 + up2), ft, s2), + (Abs(_) as d, _, []) => (d, ft, s2), + (d, pft, [_, ...rest] as s1) => { + if (up2 > 0) + appendHelper((d, pft, rest), (Rel(up2 - 1), ft, s2)) + else + (d, ft, List.append(s2, s1)) }, + }, (Abs(_), _, _) => fail "Impossible: relative path encoded as absolute path", }: PathInfo @@ -526,8 +526,10 @@ let relativeToHelper = (source: PathInfo, dest: PathInfo) => { let result = match ((source, dest)) { ((_, File, [name, ..._]), _) when source == dest => Ok((1, [name])), ((Abs(r1), _, s1), (Abs(r2), _, s2)) => - if (r1 != r2) Err(Incompatible(DifferentRoots)) - else relativizeDepth((0, List.reverse(s1)), (0, List.reverse(s2))), + if (r1 != r2) + Err(Incompatible(DifferentRoots)) + else + relativizeDepth((0, List.reverse(s1)), (0, List.reverse(s2))), ((Rel(up1), _, s1), (Rel(up2), _, s2)) => relativizeDepth((up1, List.reverse(s1)), (up2, List.reverse(s2))), _ => fail "Impossible: paths should have both been absolute or relative", @@ -574,14 +576,13 @@ provide let relativeTo = (source, dest) => { } } -let rec segsAncestry = (baseSegs, pathSegs) => - match ((baseSegs, pathSegs)) { - ([], []) => Self, - ([], _) => Descendant, - (_, []) => Ancestor, - ([first1, ..._], [first2, ..._]) when first1 != first2 => NoLineage, - ([_, ...rest1], [_, ...rest2]) => segsAncestry(rest1, rest2), - } +let rec segsAncestry = (baseSegs, pathSegs) => match ((baseSegs, pathSegs)) { + ([], []) => Self, + ([], _) => Descendant, + (_, []) => Ancestor, + ([first1, ..._], [first2, ..._]) when first1 != first2 => NoLineage, + ([_, ...rest1], [_, ...rest2]) => segsAncestry(rest1, rest2), +} // should be used on paths with same absolute/relativeness let ancestryHelper = (base: PathInfo, path: PathInfo) => { @@ -618,12 +619,11 @@ provide let ancestry = (base: Path, path: Path) => { } } -let parentHelper = (path: PathInfo) => - match (path) { - (base, _, [_, ...rest]) => (base, Dir, rest), - (Rel(upDirs), _, []) => (Rel(upDirs + 1), Dir, []), - (Abs(_) as base, _, []) => (base, Dir, []), - }: PathInfo +let parentHelper = (path: PathInfo) => match (path) { + (base, _, [_, ...rest]) => (base, Dir, rest), + (Rel(upDirs), _, []) => (Rel(upDirs + 1), Dir, []), + (Abs(_) as base, _, []) => (base, Dir, []), +}: PathInfo /** * Retrieves the path corresponding to the parent directory of the given path. @@ -640,11 +640,10 @@ provide let parent = (path: Path) => { toPath(parentHelper(pathInfo(path))) } -let basenameHelper = (path: PathInfo) => - match (path) { - (_, _, [name, ..._]) => Some(name), - _ => None, - } +let basenameHelper = (path: PathInfo) => match (path) { + (_, _, [name, ..._]) => Some(name), + _ => None, +} /** * Retrieves the basename (named final segment) of a path. @@ -662,21 +661,20 @@ provide let basename = (path: Path) => { } // should only be used on file paths -let stemExtHelper = (path: PathInfo) => - match (path) { - (_, _, [name, ..._]) => { - // trim first character (which is possibly a .) off as trick for - // splitting .a.b.c into .a, .b.c - match (String.indexOf(".", String.slice(1, name))) { - Some(dotI) => { - let dotI = dotI + 1 - (String.slice(0, end=dotI, name), String.slice(dotI, name)) - }, - None => (name, ""), - } - }, - _ => ("", ""), - } +let stemExtHelper = (path: PathInfo) => match (path) { + (_, _, [name, ..._]) => { + // trim first character (which is possibly a .) off as trick for + // splitting .a.b.c into .a, .b.c + match (String.indexOf(".", String.slice(1, name))) { + Some(dotI) => { + let dotI = dotI + 1 + (String.slice(0, end=dotI, name), String.slice(dotI, name)) + }, + None => (name, ""), + } + }, + _ => ("", ""), +} /** * Retrieves the basename of a file path without the extension. @@ -725,11 +723,10 @@ provide let extension = (path: Path) => { } // should only be used on absolute paths -let rootHelper = (path: PathInfo) => - match (path) { - (Abs(root), _, _) => root, - _ => fail "Impossible: malformed absolute path data", - } +let rootHelper = (path: PathInfo) => match (path) { + (Abs(root), _, _) => root, + _ => fail "Impossible: malformed absolute path data", +} /** * Retrieves the root of the absolute path. diff --git a/stdlib/pervasives.gr b/stdlib/pervasives.gr index c26103d532..9e0b393dd7 100644 --- a/stdlib/pervasives.gr +++ b/stdlib/pervasives.gr @@ -7,7 +7,6 @@ * * @since v0.1.0 */ - module Pervasives include "runtime/exception" diff --git a/stdlib/priorityqueue.gr b/stdlib/priorityqueue.gr index f3cae2ee1e..7e4bac9821 100644 --- a/stdlib/priorityqueue.gr +++ b/stdlib/priorityqueue.gr @@ -7,7 +7,6 @@ * * @since v0.5.3 */ - module PriorityQueue include "array" @@ -33,8 +32,8 @@ let swap = (i1, i2, array) => { let get = (array, i) => Option.expect( "Impossible: " ++ - toString(i) ++ - " in PriorityQueue's inner storage array is None", + toString(i) ++ + " in PriorityQueue's inner storage array is None", array[i] ) @@ -270,7 +269,6 @@ provide module Immutable { /** * Immutable data structure which maintains a priority order for its elements. */ - abstract record PriorityQueue { comp: (a, a) => Number, size: Number, @@ -283,7 +281,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablepriorityqueue"` module */ - provide let empty = { let empty = { comp: compare, size: 0, root: None } empty @@ -304,7 +301,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.3: Originally in `"immutablepriorityqueue"` module with `compare` being a required argument */ - provide let make = (compare=compare) => { { comp: compare, size: 0, root: None } } @@ -318,7 +314,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.3: Originally in `"immutablepriorityqueue"` module */ - provide let size = ({ size, _ } as pq: PriorityQueue) => { size } @@ -332,7 +327,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.3: Originally in `"immutablepriorityqueue"` module */ - provide let isEmpty = ({ size, _ } as pq: PriorityQueue) => { size == 0 } @@ -346,7 +340,8 @@ provide module Immutable { children: [newNode, node2, ...node1.children], } } else if ( - comp(node2.val, newNode.val) <= 0 && comp(node2.val, node1.val) <= 0 + comp(node2.val, newNode.val) <= 0 && + comp(node2.val, node1.val) <= 0 ) { { val: node2.val, @@ -380,21 +375,22 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.3: Originally in `"immutablepriorityqueue"` module */ - provide let push = (val, pq) => { let { comp, size, root } = pq match (root) { None => { comp, size: 1, root: Some({ rootVal: val, pq: [] }) }, Some({ rootVal, pq }) => { // make the new value the root if it has higher priority than the highest priority value - let (morePriorityVal, lessPriorityVal) = - if (comp(val, rootVal) <= 0) (val, rootVal) else (rootVal, val) + let (morePriorityVal, lessPriorityVal) = if (comp(val, rootVal) <= 0) + (val, rootVal) + else + (rootVal, val) let newRoot = Some( { rootVal: morePriorityVal, pq: skewInsert(comp, lessPriorityVal, pq), - } + }, ) { comp, size: size + 1, root: newRoot } }, @@ -411,7 +407,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.3: Originally in `"immutablepriorityqueue"` module */ - provide let peek = pq => { match (pq.root) { None => None, @@ -422,8 +417,10 @@ provide module Immutable { let linkNodes = (comp, node1, node2) => { // make the node with higher priority the parent of the node with smaller // priority to presere heap-ordering - let (morePriority, lessPriority) = - if (comp(node1.val, node2.val) <= 0) (node1, node2) else (node2, node1) + let (morePriority, lessPriority) = if (comp(node1.val, node2.val) <= 0) + (node1, node2) + else + (node2, node1) { val: morePriority.val, rank: morePriority.rank + 1, @@ -545,7 +542,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.3: Originally in `"immutablepriorityqueue"` module */ - provide let pop = pq => { let pqWithRoot = pq let { comp, size, root } = pq @@ -559,7 +555,7 @@ provide module Immutable { { rootVal: findHighestPriority(comp, pq), pq: withoutHighestPriority(comp, pq), - } + }, ) } { comp, size: size - 1, root: newRoot } @@ -576,7 +572,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.3: Originally in `"immutablepriorityqueue"` module */ - provide let drain = pq => { let rec drainRec = (acc, pq) => { match (pq.root) { @@ -601,7 +596,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.3: Originally in `"immutablepriorityqueue"` module with `compare` being a required argument */ - provide let fromList = (list, compare=compare) => { List.reduce((pq, val) => push(val, pq), make(compare=compare), list) } @@ -620,7 +614,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutablepriorityqueue"` module with `compare` being a required argument */ - provide let fromArray = (array, compare=compare) => { Array.reduce((pq, val) => push(val, pq), make(compare=compare), array) } diff --git a/stdlib/queue.gr b/stdlib/queue.gr index cca825c04b..73ccd2a9f7 100644 --- a/stdlib/queue.gr +++ b/stdlib/queue.gr @@ -9,7 +9,6 @@ * * @since v0.2.0 */ - module Queue include "list" @@ -135,13 +134,15 @@ provide let pop = queue => { * @since v0.6.0 */ provide let toList = queue => { - let lst = List.init(queue.size, i => match (queue.array[(queue.headIndex + - i) % - Array.length(queue.array)]) { - Some(n) => n, - None => - fail "Impossible: Attempted to access non-existent bucket in Queue.toList", - }) + let lst = List.init( + queue.size, + i => + match (queue.array[(queue.headIndex + i) % Array.length(queue.array)]) { + Some(n) => n, + None => + fail "Impossible: Attempted to access non-existent bucket in Queue.toList", + } + ) lst } @@ -280,7 +281,6 @@ provide module Immutable { /** * An immutable FIFO (first-in-first-out) data structure. */ - abstract record ImmutableQueue { forwards: List, backwards: List, @@ -292,7 +292,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally a module root API */ - provide let empty = { let empty = { forwards: [], backwards: [] } empty @@ -307,7 +306,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.2.0: Originally a module root API */ - provide let isEmpty = queue => { match (queue) { { forwards: [], backwards: [] } => true, @@ -327,7 +325,6 @@ provide module Immutable { * @history v0.3.2: Originally a module root API * @history v0.4.0: Removed `head` function */ - provide let peek = queue => { match (queue) { { forwards: [], backwards: [] } => None, @@ -348,7 +345,6 @@ provide module Immutable { * @history v0.3.2: Originally a module root API * @history v0.4.0: Removed `enqueue` function */ - provide let push = (value, queue) => { match (queue) { { forwards: [], backwards: [] } => { forwards: [value], backwards: [] }, @@ -368,21 +364,14 @@ provide module Immutable { * @history v0.3.2: Originally a module root API * @history v0.4.0: Removed `dequeue` function */ - provide let pop = queue => { match (queue) { { forwards: [], backwards: [] } => queue, { forwards: [head], backwards: [] } => { forwards: [], backwards: [] }, { forwards: [head], backwards } => - { - forwards: List.reverse(backwards), - backwards: [], - }, + { forwards: List.reverse(backwards), backwards: [] }, { forwards: [head, ...ftail], backwards } => - { - forwards: ftail, - backwards, - }, + { forwards: ftail, backwards }, } } @@ -395,7 +384,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.3.2: Originally a module root API */ - provide let size = queue => { match (queue) { { forwards: [], backwards: [] } => 0, @@ -413,7 +401,6 @@ provide module Immutable { * * @since v0.6.0 */ - provide let toList = queue => { List.append(queue.forwards, List.reverse(queue.backwards)) } @@ -426,7 +413,6 @@ provide module Immutable { * * @since v0.6.0 */ - provide let fromList = list => { { forwards: list, backwards: [] } } diff --git a/stdlib/random.gr b/stdlib/random.gr index 3409918c87..879ff02a85 100644 --- a/stdlib/random.gr +++ b/stdlib/random.gr @@ -5,7 +5,6 @@ * * @since v0.5.0 */ - module Random include "sys/random" as WasiRandom diff --git a/stdlib/range.gr b/stdlib/range.gr index 56396eb395..ebeec141ae 100644 --- a/stdlib/range.gr +++ b/stdlib/range.gr @@ -11,7 +11,6 @@ * @since v0.3.0 * @history v0.6.0: Treats all ranges as exclusive */ - module Range /** @@ -29,13 +28,9 @@ module Range */ provide let inRange = (value, range) => { match (range) { - { rangeStart: lower, rangeEnd: upper } when ( - value >= lower && value < upper - ) => + { rangeStart: lower, rangeEnd: upper } when value >= lower && value < upper => true, - { rangeStart: upper, rangeEnd: lower } when ( - value >= lower && value < upper - ) => + { rangeStart: upper, rangeEnd: lower } when value >= lower && value < upper => true, _ => false, } @@ -126,16 +121,13 @@ provide module Inclusive { * @since v0.6.0 * @history v0.3.0: Root APIs originally handled Inclusive & Exclusive variants */ - provide let inRange = (value, range) => { match (range) { - { rangeStart: lower, rangeEnd: upper } when ( - value >= lower && value <= upper - ) => + { rangeStart: lower, rangeEnd: upper } when value >= lower && + value <= upper => true, - { rangeStart: upper, rangeEnd: lower } when ( - value >= lower && value <= upper - ) => + { rangeStart: upper, rangeEnd: lower } when value >= lower && + value <= upper => true, _ => false, } @@ -156,7 +148,6 @@ provide module Inclusive { * @since v0.3.0 * @history v0.3.0: Root APIs originally handled Inclusive & Exclusive variants */ - provide let forEach = (fn: Number => Void, range) => { match (range) { { rangeStart: lower, rangeEnd: upper } when lower <= upper => { @@ -192,7 +183,6 @@ provide module Inclusive { * @since v0.3.2 * @history v0.3.0: Root APIs originally handled Inclusive & Exclusive variants */ - provide let map = (fn, range) => { let mut result = [] match (range) { diff --git a/stdlib/rational.gr b/stdlib/rational.gr index 1bf4a42ad0..4d745b6c23 100644 --- a/stdlib/rational.gr +++ b/stdlib/rational.gr @@ -7,7 +7,6 @@ * * @since v0.6.0 */ - module Rational include "runtime/unsafe/wasmi32" diff --git a/stdlib/regex.gr b/stdlib/regex.gr index 21ac00ebea..5312a2f512 100644 --- a/stdlib/regex.gr +++ b/stdlib/regex.gr @@ -5,7 +5,6 @@ * * @since 0.4.3 */ - module Regex /* @@ -64,11 +63,7 @@ let makeRegExParserConfig = () => { } } -let configWithCaseSensitive = - ( - config: RegExParserConfig, - caseSensitive: Bool, - ) => { +let configWithCaseSensitive = (config: RegExParserConfig, caseSensitive: Bool) => { { isPerlRegExp: config.isPerlRegExp, caseSensitive, @@ -173,12 +168,12 @@ let eat = (buf: RegExBuf, char: Char) => { parseErr( buf, "Expected character '" ++ - Char.toString(char) ++ - ", but found character '" ++ - Char.toString(ret) ++ - "'", + Char.toString(char) ++ + ", but found character '" ++ + Char.toString(ret) ++ + "'", 0 - ) + ), ) } } @@ -249,9 +244,8 @@ and rangeUnion = (rng1, rng2) => { match ((rng1, rng2)) { ([], _) => rng2, (_, []) => rng1, - ([(r1start, r1end), ...r1tl], [(r2start, r2end), ...r2tl]) when ( - r1start <= r2start - ) => { + ([(r1start, r1end), ...r1tl], [(r2start, r2end), ...r2tl]) when r1start <= + r2start => { if (r1end + 1 >= r2start) { if (r1end <= r2end) { rangeUnion([(r1start, r2end), ...r2tl], r1tl) @@ -318,7 +312,7 @@ let rangeAddCaseAware = (rng: CharRange, c, config) => { Ok(rng) */ Err( - "NYI: Case-insensitive matching is not supported until grain-lang/grain#661 is resolved." + "NYI: Case-insensitive matching is not supported until grain-lang/grain#661 is resolved.", ) } }, @@ -417,18 +411,11 @@ enum rec ParsedRegularExpression { REWordBoundary, RENotWordBoundary, RELiteral(Char), - RELiteralString( - String - ), // <- sequences of literals are flattened into a string + RELiteralString(String), // <- sequences of literals are flattened into a string REAlts(ParsedRegularExpression, ParsedRegularExpression), RESequence(List, Bool), // seq elts, needs backtrack REGroup(ParsedRegularExpression, Number), // regex, group ID - RERepeat( - ParsedRegularExpression, - Number, - Option, - Bool - ), // regex, min, max (None for infinity), true=non-greedy + RERepeat(ParsedRegularExpression, Number, Option, Bool), // regex, min, max (None for infinity), true=non-greedy REMaybe(ParsedRegularExpression, Bool), // regex, true=non-greedy REConditional( ParsedRegularExpression, @@ -438,12 +425,7 @@ enum rec ParsedRegularExpression { Number, Bool ), // test, if-true, if-false, n-start, num-n, needs-backtrack - RELookahead( - ParsedRegularExpression, - Bool, - Number, - Number - ), // regex, is-match, n-start, num-n + RELookahead(ParsedRegularExpression, Bool, Number, Number), // regex, is-match, n-start, num-n RELookbehind( ParsedRegularExpression, Bool, @@ -452,23 +434,18 @@ enum rec ParsedRegularExpression { Number, Number ), // regex, is-match, lb-min, lb-max, n-start, num-n (lb-xx values patched in later) - RECut( - ParsedRegularExpression, - Number, - Number, - Bool - ), // regex, n-start, num-n, needs-backtrack + RECut(ParsedRegularExpression, Number, Number, Bool), // regex, n-start, num-n, needs-backtrack REReference(Number, Bool), // n, case-sensitive RERange(RERange), - REUnicodeCategories( - List, - Bool - ), // symlist, true=match/false=does-not-match + REUnicodeCategories(List, Bool), // symlist, true=match/false=does-not-match } let needsBacktrack = (rx: ParsedRegularExpression) => { match (rx) { - REAlts(_, _) | REGroup(_, _) | RERepeat(_, _, _, _) | REMaybe(_, _) | + REAlts(_, _) | + REGroup(_, _) | + RERepeat(_, _, _, _) | + REMaybe(_, _) | REUnicodeCategories(_, _) => true, RESequence(_, nb) | REConditional(_, _, _, _, _, nb) | RECut(_, _, _, nb) => @@ -527,8 +504,7 @@ let mergeAdjacent = lst => { }, } }, - [] => - fail "impossible (mergeAdjacent)", // avoid warning (can delete once TODO is resolved) + [] => fail "impossible (mergeAdjacent)", // avoid warning (can delete once TODO is resolved) [RELiteralString(x), ...tl] when Option.isSome(mode) => loop(mode, [x, ...accum], tl), [RELiteral(c), ...tl] when Option.isSome(mode) => @@ -574,11 +550,10 @@ let makeRECut = (rx, nStart, numN) => { } let makeREConditional = (tst, pces1, pces2, nStart, numN) => { - let nb = needsBacktrack(pces1) || - match (pces2) { - None => false, - Some(p2) => needsBacktrack(p2), - } + let nb = needsBacktrack(pces1) || match (pces2) { + None => false, + Some(p2) => needsBacktrack(p2), + } REConditional(tst, pces1, pces2, nStart, numN, nb) } @@ -663,9 +638,7 @@ and parseRange = (buf: RegExBuf) => { } and parseClass = (buf: RegExBuf) => { if (!more(buf)) { - Err( - "no chars" - ) // caught in handler (we use a Result to cleanly mesh with the Result type below) + Err("no chars") // caught in handler (we use a Result to cleanly mesh with the Result type below) } else { match (peek(buf)) { Err(e) => Err(e), @@ -715,15 +688,14 @@ and parsePosixCharClass = (buf: RegExBuf) => { Ok(_) => Ok(List.join("", List.reverse(acc))), } }, - Ok(c) when ( - Char.code('a') <= Char.code(c) && Char.code(c) <= Char.code('z') - ) => { + Ok(c) when Char.code('a') <= Char.code(c) && + Char.code(c) <= Char.code('z') => { ignore(eat(buf, c)) loop([Char.toString(c), ...acc]) }, Ok(_) => Err( - parseErr(buf, "Invalid character in POSIX character class", 0) + parseErr(buf, "Invalid character in POSIX character class", 0), ), } } @@ -737,7 +709,7 @@ and parsePosixCharClass = (buf: RegExBuf) => { rangeAddSpan([], Char.code('a'), Char.code('z')), Char.code('A'), Char.code('Z') - ) + ), ), "upper" => Ok(rangeAddSpan([], Char.code('A'), Char.code('Z'))), "lower" => Ok(rangeAddSpan([], Char.code('a'), Char.code('z'))), @@ -752,7 +724,7 @@ and parsePosixCharClass = (buf: RegExBuf) => { ), Char.code('A'), Char.code('F') - ) + ), ), "alnum" => Ok( @@ -764,7 +736,7 @@ and parsePosixCharClass = (buf: RegExBuf) => { ), Char.code('A'), Char.code('Z') - ) + ), ), "word" => Ok( @@ -775,7 +747,7 @@ and parsePosixCharClass = (buf: RegExBuf) => { Char.code('F') ), Char.code('_') - ) + ), ), "blank" => Ok(rangeAdd(rangeAdd([], 0x20), 0x9)), // space and tab "space" => Ok(range_s()), @@ -785,7 +757,7 @@ and parsePosixCharClass = (buf: RegExBuf) => { buf, "the [:graph:] character class is not currently supported. For more information, see https://github.com/grain-lang/grain/issues/661", 0 - ) + ), ), "print" => Err( @@ -793,7 +765,7 @@ and parsePosixCharClass = (buf: RegExBuf) => { buf, "the [:print:] character class is not currently supported. For more information, see https://github.com/grain-lang/grain/issues/661", 0 - ) + ), ), "cntrl" => Ok(rangeAddSpan([], 0, 31)), "ascii" => Ok(rangeAddSpan([], 0, 127)), @@ -809,18 +781,17 @@ and parsePosixCharClass = (buf: RegExBuf) => { buf, "Expected `:` after `[`. Found: `" ++ Char.toString(c) ++ "`", 0 - ) + ), ), } } } -and parseRangeRest = - ( - buf: RegExBuf, - rng: CharRange, - spanFrom: Option, - mustSpanFrom: Option, - ) => { +and parseRangeRest = ( + buf: RegExBuf, + rng: CharRange, + spanFrom: Option, + mustSpanFrom: Option, +) => { if (!more(buf)) { Err(parseErr(buf, "Missing closing `]`", 0)) } else { @@ -844,7 +815,7 @@ and parseRangeRest = buf, "misplaced hyphen within square brackets in pattern", 1 - ) + ), ), None => { ignore(eat(buf, '-')) @@ -862,7 +833,7 @@ and parseRangeRest = buf, "misplaced hyphen within square brackets in pattern", 1 - ) + ), ), Ok(_) => { ignore(eat(buf, '-')) @@ -882,16 +853,14 @@ and parseRangeRest = buf, "escaping backslash at end pattern (within square brackets)", 0 - ) + ), ) } else { match (peek(buf)) { Err(e) => Err(e), - Ok(c) when ( - Char.code('a') <= Char.code(c) && + Ok(c) when Char.code('a') <= Char.code(c) && Char.code(c) <= Char.code('z') || - Char.code('A') <= Char.code(c) && Char.code(c) <= Char.code('Z') - ) => { + Char.code('A') <= Char.code(c) && Char.code(c) <= Char.code('Z') => { match (mustSpanFrom) { Some(_) => Err( @@ -899,7 +868,7 @@ and parseRangeRest = buf, "misplaced hyphen within square brackets in pattern", 0 - ) + ), ), None => { let curPos = unbox(buf.cursor) @@ -907,8 +876,8 @@ and parseRangeRest = Err(e) => Err( "Invalid Regular Expression: illegal alphebetic escape (position " ++ - toString(curPos) ++ - ")" + toString(curPos) ++ + ")", ), Ok(range1) => { match (rangeAddCaseAware(rng, spanFrom, buf.config)) { @@ -967,14 +936,13 @@ and parseRangeRest = } } } -and parseRangeRestSpan = - ( - buf: RegExBuf, - c, - rng: CharRange, - spanFrom: Option, - mustSpanFrom: Option, - ) => { +and parseRangeRestSpan = ( + buf: RegExBuf, + c, + rng: CharRange, + spanFrom: Option, + mustSpanFrom: Option, +) => { match (mustSpanFrom) { Some(n) => { if (n > c) { @@ -1000,221 +968,212 @@ and parseRangeRestSpan = let rec parseAtom = (buf: RegExBuf) => { match (peek(buf)) { Err(e) => Err(e), - Ok(c) => - match (c) { - '(' => { - if (!moreN(buf, 1)) { - Err(parseErr(buf, "Parentheses not closed", 1)) - } else if (peekN(buf, 1) == Ok('?')) { - // fancy group - if (!moreN(buf, 2)) { - Err(parseErr(buf, "Parentheses not closed", 2)) - } else { - match (peekN(buf, 2)) { - Err(e) => Err(e), - Ok('>') => { - // cut - ignore(eat(buf, '(')) - ignore(eat(buf, '?')) - ignore(eat(buf, '>')) - let preNumGroups = unbox(buf.config.groupNumber) - match (parseRegex(buf)) { - Err(e) => Err(e), - Ok(rx) => { - let postNumGroups = unbox(buf.config.groupNumber) - match (eat(buf, ')')) { - Err(e) => Err(e), - Ok(_) => - Ok( - makeRECut( - rx, - preNumGroups, - postNumGroups - preNumGroups - ) + Ok(c) => match (c) { + '(' => { + if (!moreN(buf, 1)) { + Err(parseErr(buf, "Parentheses not closed", 1)) + } else if (peekN(buf, 1) == Ok('?')) { + // fancy group + if (!moreN(buf, 2)) { + Err(parseErr(buf, "Parentheses not closed", 2)) + } else { + match (peekN(buf, 2)) { + Err(e) => Err(e), + Ok('>') => { + // cut + ignore(eat(buf, '(')) + ignore(eat(buf, '?')) + ignore(eat(buf, '>')) + let preNumGroups = unbox(buf.config.groupNumber) + match (parseRegex(buf)) { + Err(e) => Err(e), + Ok(rx) => { + let postNumGroups = unbox(buf.config.groupNumber) + match (eat(buf, ')')) { + Err(e) => Err(e), + Ok(_) => + Ok( + makeRECut( + rx, + preNumGroups, + postNumGroups - preNumGroups ), - } - }, - } - }, - Ok('(') => { - // conditional - ignore(eat(buf, '(')) - ignore(eat(buf, '?')) - ignore(eat(buf, '(')) - let tstPreNumGroups = unbox(buf.config.groupNumber) - match (parseTest(buf)) { - Err(e) => Err(e), - Ok(test) => { - let tstSpanNumGroups = unbox(buf.config.groupNumber) - - tstPreNumGroups - match (parsePCEs(buf, false)) { - Err(e) => Err(e), - Ok(pces) => { - if (!more(buf)) { - Err(parseErr(buf, "Parentheses not closed", 0)) - } else { - match (peek(buf)) { - Err(e) => Err(e), - Ok('|') => { - ignore(eat(buf, '|')) - match (parsePCEs(buf, false)) { - Err(e) => Err(e), - Ok(pces2) => { - match (peek(buf)) { - Err(_) => - Err( - parseErr( - buf, - "Parentheses not closed", - 0 - ) + ), + } + }, + } + }, + Ok('(') => { + // conditional + ignore(eat(buf, '(')) + ignore(eat(buf, '?')) + ignore(eat(buf, '(')) + let tstPreNumGroups = unbox(buf.config.groupNumber) + match (parseTest(buf)) { + Err(e) => Err(e), + Ok(test) => { + let tstSpanNumGroups = unbox(buf.config.groupNumber) - + tstPreNumGroups + match (parsePCEs(buf, false)) { + Err(e) => Err(e), + Ok(pces) => { + if (!more(buf)) { + Err(parseErr(buf, "Parentheses not closed", 0)) + } else { + match (peek(buf)) { + Err(e) => Err(e), + Ok('|') => { + ignore(eat(buf, '|')) + match (parsePCEs(buf, false)) { + Err(e) => Err(e), + Ok(pces2) => { + match (peek(buf)) { + Err(_) => + Err( + parseErr( + buf, + "Parentheses not closed", + 0 ), - Ok(_) => { - ignore(eat(buf, ')')) - Ok( - makeREConditional( - test, - makeRESequence(pces), - Some(makeRESequence(pces2)), - tstPreNumGroups, - tstSpanNumGroups - ) - ) - }, - } - }, - } - }, - Ok(')') => { - ignore(eat(buf, ')')) - Ok( - makeREConditional( - test, - makeRESequence(pces), - None, - tstPreNumGroups, - tstSpanNumGroups - ) - ) - }, - Ok(_) => { - Err( - parseErr(buf, "Failed to parse condition", 0) - ) - }, - } - } - }, - } - }, - } - }, - Ok('i' | 's' | 'm' | '-' | ':') => { - // match with mode - ignore(eat(buf, '(')) - ignore(eat(buf, '?')) - match (parseMode(buf)) { - Err(e) => Err(e), - Ok(config) => { - if (!more(buf)) { - Err(parseErr(buf, "Parentheses not closed", 0)) - } else { - match (peek(buf)) { - Err(e) => Err(e), - Ok(':') => { - ignore(eat(buf, ':')) - match (parseRegex(withConfig(buf, config))) { - Err(e) => Err(e), - Ok(rx) => { - match (eat(buf, ')')) { - Err(e) => Err(e), - Ok(_) => Ok(rx), - } - }, - } - }, - Ok(_) => { - Err( - parseErr( - buf, - "expected `:` or another mode after `(?` and a mode sequence; a mode is `i`, `-i`, `m`, `-m`, `s`, or `-s`", - 0 + ), + Ok(_) => { + ignore(eat(buf, ')')) + Ok( + makeREConditional( + test, + makeRESequence(pces), + Some(makeRESequence(pces2)), + tstPreNumGroups, + tstSpanNumGroups + ), + ) + }, + } + }, + } + }, + Ok(')') => { + ignore(eat(buf, ')')) + Ok( + makeREConditional( + test, + makeRESequence(pces), + None, + tstPreNumGroups, + tstSpanNumGroups + ), ) - ) - }, + }, + Ok(_) => { + Err(parseErr(buf, "Failed to parse condition", 0)) + }, + } } - } - }, - } - }, - Ok(_) => { - ignore(eat(buf, '(')) - ignore(eat(buf, '?')) - parseLook(buf) - }, - } - } - } else { - // simple group - ignore(eat(buf, '(')) - let groupNum = unbox(buf.config.groupNumber) - // Note that this inc operation is side-effecting - match (parseRegex( - withConfig(buf, configIncGroupNumber(buf.config)) - )) { - Err(e) => Err(e), - Ok(r) => { - match (eat(buf, ')')) { + }, + } + }, + } + }, + Ok('i' | 's' | 'm' | '-' | ':') => { + // match with mode + ignore(eat(buf, '(')) + ignore(eat(buf, '?')) + match (parseMode(buf)) { Err(e) => Err(e), - Ok(_) => Ok(REGroup(r, groupNum)), + Ok(config) => { + if (!more(buf)) { + Err(parseErr(buf, "Parentheses not closed", 0)) + } else { + match (peek(buf)) { + Err(e) => Err(e), + Ok(':') => { + ignore(eat(buf, ':')) + match (parseRegex(withConfig(buf, config))) { + Err(e) => Err(e), + Ok(rx) => { + match (eat(buf, ')')) { + Err(e) => Err(e), + Ok(_) => Ok(rx), + } + }, + } + }, + Ok(_) => { + Err( + parseErr( + buf, + "expected `:` or another mode after `(?` and a mode sequence; a mode is `i`, `-i`, `m`, `-m`, `s`, or `-s`", + 0 + ), + ) + }, + } + } + }, } }, + Ok(_) => { + ignore(eat(buf, '(')) + ignore(eat(buf, '?')) + parseLook(buf) + }, } } - }, - '[' => { - ignore(eat(buf, '[')) - match (parseRangeNot(buf)) { + } else { + // simple group + ignore(eat(buf, '(')) + let groupNum = unbox(buf.config.groupNumber) + // Note that this inc operation is side-effecting + match (parseRegex(withConfig(buf, configIncGroupNumber(buf.config)))) { Err(e) => Err(e), - Ok(rng) => Ok(makeRERange(rng, rangeLimit)), - } - }, - '.' => { - ignore(eat(buf, '.')) - if (buf.config.multiline) { - // if in multiline mode, '.' matches everything but \n - Ok( - makeRERange( - rangeInvert(rangeAdd([], Char.code('\n')), rangeLimit), - rangeLimit - ) - ) - } else { - Ok(REAny) + Ok(r) => { + match (eat(buf, ')')) { + Err(e) => Err(e), + Ok(_) => Ok(REGroup(r, groupNum)), + } + }, } - }, - '^' => { - ignore(eat(buf, '^')) - Ok( - if (buf.config.multiline) { - RELineStart - } else { - REStart - } - ) - }, - '$' => { - ignore(eat(buf, '$')) + } + }, + '[' => { + ignore(eat(buf, '[')) + match (parseRangeNot(buf)) { + Err(e) => Err(e), + Ok(rng) => Ok(makeRERange(rng, rangeLimit)), + } + }, + '.' => { + ignore(eat(buf, '.')) + if (buf.config.multiline) { + // if in multiline mode, '.' matches everything but \n Ok( - if (buf.config.multiline) { - RELineEnd - } else { - REEnd - } + makeRERange( + rangeInvert(rangeAdd([], Char.code('\n')), rangeLimit), + rangeLimit + ), ) - }, - _ => parseLiteral(buf), + } else { + Ok(REAny) + } + }, + '^' => { + ignore(eat(buf, '^')) + Ok(if (buf.config.multiline) { + RELineStart + } else { + REStart + }) + }, + '$' => { + ignore(eat(buf, '$')) + Ok(if (buf.config.multiline) { + RELineEnd + } else { + REEnd + }) }, + _ => parseLiteral(buf), + }, } } and parseLook = (buf: RegExBuf) => { @@ -1273,8 +1232,8 @@ and parseLook = (buf: RegExBuf) => { box(0), box(0), preNumGroups, - spanNumGroups() - ) + spanNumGroups(), + ), ) } }, @@ -1294,9 +1253,8 @@ and parseTest = (buf: RegExBuf) => { ignore(eat(buf, '?')) parseLook(buf) }, - Ok(c) when ( - Char.code(c) >= Char.code('0') && Char.code(c) <= Char.code('9') - ) => { + Ok(c) when Char.code(c) >= Char.code('0') && + Char.code(c) <= Char.code('9') => { buf.config.references := true let curPos = unbox(buf.cursor) match (parseInteger(buf, 0)) { @@ -1304,7 +1262,7 @@ and parseTest = (buf: RegExBuf) => { Ok(n) => { if (unbox(buf.cursor) == curPos) { Err( - parseErr(buf, "expected `)` after `(?(` followed by digits", 0) + parseErr(buf, "expected `)` after `(?(` followed by digits", 0), ) } else { match (eat(buf, ')')) { @@ -1317,7 +1275,7 @@ and parseTest = (buf: RegExBuf) => { }, Ok(_) => Err( - parseErr(buf, "expected `(?=`, `(?!`, `(?<`, or digit after `(?(`", 0) + parseErr(buf, "expected `(?=`, `(?!`, `(?<`, or digit after `(?(`", 0), ), } } @@ -1328,9 +1286,8 @@ and parseInteger = (buf: RegExBuf, n) => { } else { match (peek(buf)) { Err(c) => Err(c), - Ok(c) when ( - Char.code(c) >= Char.code('0') && Char.code(c) <= Char.code('9') - ) => { + Ok(c) when Char.code(c) >= Char.code('0') && + Char.code(c) <= Char.code('9') => { ignore(next(buf)) parseInteger(buf, 10 * n + (Char.code(c) - Char.code('0'))) }, @@ -1443,7 +1400,7 @@ and parseUnicodeCategories = (buf: RegExBuf, pC: String) => { LetterUppercase, LetterTitlecase, LetterModifier, - ] + ], ), "Lo" => Ok([LetterOther]), "L" => @@ -1454,7 +1411,7 @@ and parseUnicodeCategories = (buf: RegExBuf, pC: String) => { LetterTitlecase, LetterModifier, LetterOther, - ] + ], ), "Nd" => Ok([NumberDecimalDigit]), "Nl" => Ok([NumberLetter]), @@ -1477,7 +1434,7 @@ and parseUnicodeCategories = (buf: RegExBuf, pC: String) => { PunctuationConnector, PunctuationDash, PunctuationOther, - ] + ], ), "Mn" => Ok([MarkNonSpacing]), "Mc" => Ok([MarkSpacingCombining]), @@ -1506,7 +1463,7 @@ and parseUnicodeCategories = (buf: RegExBuf, pC: String) => { OtherSurrogate, OtherNotAssigned, OtherPrivateUse, - ] + ], ), "." => Ok( @@ -1541,19 +1498,19 @@ and parseUnicodeCategories = (buf: RegExBuf, pC: String) => { OtherSurrogate, OtherNotAssigned, OtherPrivateUse, - ] + ], ), s => Err( parseErr( buf, "Unrecognized property name in `\\" ++ - pC ++ - "`: `" ++ - s ++ - "`", + pC ++ + "`: `" ++ + s ++ + "`", 0 - ) + ), ), } }, @@ -1585,7 +1542,7 @@ and parseLiteral = (buf: RegExBuf) => { Ok(')') => Err(parseErr(buf, "Unmatched `)` in pattern", 0)), Ok(c) when buf.config.isPerlRegExp && (c == ']' || c == '}') => Err( - parseErr(buf, "unmatched `" ++ Char.toString(c) ++ "` in pattern", 0) + parseErr(buf, "unmatched `" ++ Char.toString(c) ++ "` in pattern", 0), ), // TODO(#691): Enable case-insensitive regular expression matching Ok(c) when buf.config.caseSensitive => { @@ -1610,11 +1567,8 @@ and parseBackslashLiteral = (buf: RegExBuf) => { match (peek(buf)) { Err(e) => Err(e), // pregexp: - Ok(c) when ( - buf.config.isPerlRegExp && - (Char.code(c) >= Char.code('0') && - Char.code(c) <= Char.code('9')) - ) => { + Ok(c) when buf.config.isPerlRegExp && + (Char.code(c) >= Char.code('0') && Char.code(c) <= Char.code('9')) => { buf.config.references := true match (parseInteger(buf, 0)) { Err(e) => Err(e), @@ -1623,11 +1577,11 @@ and parseBackslashLiteral = (buf: RegExBuf) => { }, } }, - Ok(c) when ( - buf.config.isPerlRegExp && - (Char.code(c) >= Char.code('a') && Char.code(c) <= Char.code('z') || - Char.code(c) >= Char.code('A') && Char.code(c) <= Char.code('Z')) - ) => { + Ok(c) when buf.config.isPerlRegExp && + ( + Char.code(c) >= Char.code('a') && Char.code(c) <= Char.code('z') || + Char.code(c) >= Char.code('A') && Char.code(c) <= Char.code('Z') + ) => { match (c) { 'p' => { ignore(eat(buf, 'p')) @@ -1756,7 +1710,7 @@ and parsePCE = (buf: RegExBuf) => { buf, "expected digit or `}` to end repetition specification started with `{`", 0 - ) + ), ), } }, @@ -1776,7 +1730,7 @@ and parsePCE = (buf: RegExBuf) => { buf, "expected digit, `,`, or `}' for repetition specification started with `{`", 0 - ) + ), ), } }, @@ -1936,8 +1890,7 @@ let rec isAnchored = (re: ParsedRegularExpression) => { }, REAlts(a, b) => isAnchored(a) && isAnchored(b), REConditional(_, rx1, rx2, _, _, _) => - isAnchored(rx1) && - Option.mapWithDefault(isAnchored, false, rx2), + isAnchored(rx1) && Option.mapWithDefault(isAnchored, false, rx2), REGroup(rx, _) => isAnchored(rx), RECut(rx, _, _, _) => isAnchored(rx), _ => false, @@ -1980,7 +1933,11 @@ let rec mustString = (re: ParsedRegularExpression) => { let rec zeroSized = re => { match (re) { - REEmpty | REStart | RELineStart | REWordBoundary | RENotWordBoundary | + REEmpty | + REStart | + RELineStart | + REWordBoundary | + RENotWordBoundary | RELookahead(_, _, _, _) | RELookbehind(_, _, _, _, _, _) => true, @@ -2067,17 +2024,17 @@ let rec validate = (re: ParsedRegularExpression, numGroups) => { /** Computes the range of possible UTF-8 byte lengths for the given character range */ - let rangeUtf8EncodingLengths = (rng: CharRange) => { - let (min, max, _) = List.reduce(((min1, max1, n), (segStart, segEnd)) => { - if (rangeOverlaps(rng, segStart, segEnd)) { - (min(min1, n), max(max1, n), n + 1) - } else { - (min1, max1, n + 1) - } - }, - (4, 0, 1), - [(0, 127), (128, 0x7ff), (0x800, 0x7fff), (0x10000, 0x10ffff)] + let (min, max, _) = List.reduce( + ((min1, max1, n), (segStart, segEnd)) => { + if (rangeOverlaps(rng, segStart, segEnd)) { + (min(min1, n), max(max1, n), n + 1) + } else { + (min1, max1, n + 1) + } + }, + (4, 0, 1), + [(0, 127), (128, 0x7ff), (0x800, 0x7fff), (0x10000, 0x10ffff)] ) (min, max) } @@ -2229,42 +2186,38 @@ enum StackElt { SESavedGroup(Number, Option<(Number, Number)>), } -let done_m = - ( - buf: MatchBuf, - pos: Number, - start: Number, - limit: Number, - end: Number, - state, - stack, - ) => - Some(pos) -let continue_m = - ( - buf: MatchBuf, - pos: Number, - start: Number, - limit: Number, - end: Number, - state, - stack, - ) => { +let done_m = ( + buf: MatchBuf, + pos: Number, + start: Number, + limit: Number, + end: Number, + state, + stack, +) => Some(pos) +let continue_m = ( + buf: MatchBuf, + pos: Number, + start: Number, + limit: Number, + end: Number, + state, + stack, +) => { match (stack) { [SEPositionProducer(hd), ..._] => hd(pos), _ => fail "Impossible: continue_m", } } -let limit_m = - ( - buf: MatchBuf, - pos: Number, - start: Number, - limit: Number, - end: Number, - state, - stack, - ) => if (pos == limit) Some(pos) else None +let limit_m = ( + buf: MatchBuf, + pos: Number, + start: Number, + limit: Number, + end: Number, + state, + stack, +) => if (pos == limit) Some(pos) else None let iterateMatcher = (m, size, max) => ( @@ -2276,20 +2229,20 @@ let iterateMatcher = (m, size, max) => state, stack, ) => { - let limit = match (max) { - Some(max) => min(limit, pos + max * size), - None => limit, - } - let rec loop = (pos2, n) => { - let pos3 = pos2 + size - if (pos3 > limit || !m(buf, pos2, start, limit, end, state, stack)) { - (pos2, n, size) - } else { - loop(pos3, n + 1) + let limit = match (max) { + Some(max) => min(limit, pos + max * size), + None => limit, + } + let rec loop = (pos2, n) => { + let pos3 = pos2 + size + if (pos3 > limit || !m(buf, pos2, start, limit, end, state, stack)) { + (pos2, n, size) + } else { + loop(pos3, n + 1) + } } + loop(pos, 0) } - loop(pos, 0) -} // single-char matching @@ -2303,16 +2256,13 @@ let charMatcher = (toMatch, next_m) => state, stack, ) => { - if ( - { - pos < limit && - match (matchBufChar(buf, pos)) { - Err(_) => false, - Ok(c) => toMatch == c, - } - } - ) next_m(buf, pos + 1, start, limit, end, state, stack) else None -} + if ({ + pos < limit && match (matchBufChar(buf, pos)) { + Err(_) => false, + Ok(c) => toMatch == c, + } + }) next_m(buf, pos + 1, start, limit, end, state, stack) else None + } let charTailMatcher = toMatch => ( @@ -2324,38 +2274,40 @@ let charTailMatcher = toMatch => state, stack, ) => { - if ( - { - pos < limit && - match (matchBufChar(buf, pos)) { - Err(_) => false, - Ok(c) => toMatch == c, - } - } - ) Some(pos + 1) else None -} + if ({ + pos < limit && match (matchBufChar(buf, pos)) { + Err(_) => false, + Ok(c) => toMatch == c, + } + }) Some(pos + 1) else None + } let charMatcherIterated = (toMatch, max) => - iterateMatcher(( - buf: MatchBuf, - pos: Number, - start: Number, - limit: Number, - end: Number, - state, - stack - ) => { - match (matchBufChar(buf, pos)) { - Err(_) => false, - Ok(c) => toMatch == c, - } - }, 1, max) + iterateMatcher( + ( + buf: MatchBuf, + pos: Number, + start: Number, + limit: Number, + end: Number, + state, + stack, + ) => { + match (matchBufChar(buf, pos)) { + Err(_) => false, + Ok(c) => toMatch == c, + } + }, + 1, + max + ) // string matching let subArraysEqual = (arr1, start1, arr2, start2, length) => { if ( - Array.length(arr1) - start1 < length || Array.length(arr2) - start2 < length + Array.length(arr1) - start1 < length || + Array.length(arr2) - start2 < length ) { return false } @@ -2377,8 +2329,7 @@ let stringMatcher = (toMatch, len, next_m) => state, stack, ) => { - if ( - { + if ({ pos + len <= limit && subArraysEqual( buf.matchInputExploded, @@ -2387,9 +2338,8 @@ let stringMatcher = (toMatch, len, next_m) => 0, len ) - } - ) next_m(buf, pos + len, start, limit, end, state, stack) else None -} + }) next_m(buf, pos + len, start, limit, end, state, stack) else None + } let stringTailMatcher = (toMatch, len) => ( @@ -2401,8 +2351,7 @@ let stringTailMatcher = (toMatch, len) => state, stack, ) => { - if ( - { + if ({ pos + len <= limit && subArraysEqual( buf.matchInputExploded, @@ -2411,35 +2360,43 @@ let stringTailMatcher = (toMatch, len) => 0, len ) - } - ) Some(pos + len) else None -} + }) Some(pos + len) else None + } let stringMatcherIterated = (toMatch, len, max) => - iterateMatcher(( - buf: MatchBuf, - pos: Number, - start: Number, - limit: Number, - end: Number, - state, - stack - ) => { - subArraysEqual(buf.matchInputExploded, pos, String.explode(toMatch), 0, len) - }, len, max) + iterateMatcher( + ( + buf: MatchBuf, + pos: Number, + start: Number, + limit: Number, + end: Number, + state, + stack, + ) => { + subArraysEqual( + buf.matchInputExploded, + pos, + String.explode(toMatch), + 0, + len + ) + }, + len, + max + ) // match nothing -let neverMatcher = - ( - buf: MatchBuf, - pos: Number, - start: Number, - limit: Number, - end: Number, - state, - stack, - ) => { +let neverMatcher = ( + buf: MatchBuf, + pos: Number, + start: Number, + limit: Number, + end: Number, + state, + stack, +) => { None } @@ -2455,12 +2412,10 @@ let anyMatcher = next_m => state, stack, ) => { - if ( - { + if ({ pos < limit - } - ) next_m(buf, pos + 1, start, limit, end, state, stack) else None -} + }) next_m(buf, pos + 1, start, limit, end, state, stack) else None + } let anyTailMatcher = () => ( @@ -2472,12 +2427,10 @@ let anyTailMatcher = () => state, stack, ) => { - if ( - { + if ({ pos < limit - } - ) Some(pos + 1) else None -} + }) Some(pos + 1) else None + } let anyMatcherIterated = max => ( @@ -2489,12 +2442,12 @@ let anyMatcherIterated = max => state, stack, ) => { - let n = match (max) { - None => limit - pos, - Some(max) => min(max, limit - pos), + let n = match (max) { + None => limit - pos, + Some(max) => min(max, limit - pos), + } + (pos + n, n, 1) } - (pos + n, n, 1) -} // match byte in set (range) @@ -2508,16 +2461,13 @@ let rangeMatcher = (rng: CharRange, next_m) => state, stack, ) => { - if ( - { - pos < limit && - match (matchBufChar(buf, pos)) { - Err(_) => false, - Ok(c) => rangeContains(rng, Char.code(c)), - } - } - ) next_m(buf, pos + 1, start, limit, end, state, stack) else None -} + if ({ + pos < limit && match (matchBufChar(buf, pos)) { + Err(_) => false, + Ok(c) => rangeContains(rng, Char.code(c)), + } + }) next_m(buf, pos + 1, start, limit, end, state, stack) else None + } let rangeTailMatcher = (rng: CharRange) => ( @@ -2529,32 +2479,33 @@ let rangeTailMatcher = (rng: CharRange) => state, stack, ) => { - if ( - { - pos < limit && - match (matchBufChar(buf, pos)) { - Err(_) => false, - Ok(c) => rangeContains(rng, Char.code(c)), - } - } - ) Some(pos + 1) else None -} + if ({ + pos < limit && match (matchBufChar(buf, pos)) { + Err(_) => false, + Ok(c) => rangeContains(rng, Char.code(c)), + } + }) Some(pos + 1) else None + } let rangeMatcherIterated = (rng: CharRange, max) => - iterateMatcher(( - buf: MatchBuf, - pos: Number, - start: Number, - limit: Number, - end: Number, - state, - stack - ) => { - match (matchBufChar(buf, pos)) { - Err(_) => false, - Ok(c) => rangeContains(rng, Char.code(c)), - } - }, 1, max) + iterateMatcher( + ( + buf: MatchBuf, + pos: Number, + start: Number, + limit: Number, + end: Number, + state, + stack, + ) => { + match (matchBufChar(buf, pos)) { + Err(_) => false, + Ok(c) => rangeContains(rng, Char.code(c)), + } + }, + 1, + max + ) // zero-width matchers @@ -2568,8 +2519,11 @@ let startMatcher = next_m => state, stack, ) => { - if (pos == start) next_m(buf, pos, start, limit, end, state, stack) else None -} + if (pos == start) + next_m(buf, pos, start, limit, end, state, stack) + else + None + } let endMatcher = next_m => ( @@ -2581,8 +2535,8 @@ let endMatcher = next_m => state, stack, ) => { - if (pos == end) next_m(buf, pos, start, limit, end, state, stack) else None -} + if (pos == end) next_m(buf, pos, start, limit, end, state, stack) else None + } let lineStartMatcher = next_m => ( @@ -2594,9 +2548,11 @@ let lineStartMatcher = next_m => state, stack, ) => { - if (pos == start || matchBufChar(buf, pos - 1) == Ok('\n')) - next_m(buf, pos, start, limit, end, state, stack) else None -} + if (pos == start || matchBufChar(buf, pos - 1) == Ok('\n')) + next_m(buf, pos, start, limit, end, state, stack) + else + None + } let lineEndMatcher = next_m => ( @@ -2608,24 +2564,20 @@ let lineEndMatcher = next_m => state, stack, ) => { - if (pos == end || matchBufChar(buf, pos) == Ok('\n')) - next_m(buf, pos, start, limit, end, state, stack) else None -} + if (pos == end || matchBufChar(buf, pos) == Ok('\n')) + next_m(buf, pos, start, limit, end, state, stack) + else + None + } let isWordChar = c => { match (c) { Err(_) => false, - Ok(c) when ( - Char.code('0') <= Char.code(c) && Char.code(c) <= Char.code('9') - ) => + Ok(c) when Char.code('0') <= Char.code(c) && Char.code(c) <= Char.code('9') => true, - Ok(c) when ( - Char.code('a') <= Char.code(c) && Char.code(c) <= Char.code('z') - ) => + Ok(c) when Char.code('a') <= Char.code(c) && Char.code(c) <= Char.code('z') => true, - Ok(c) when ( - Char.code('A') <= Char.code(c) && Char.code(c) <= Char.code('Z') - ) => + Ok(c) when Char.code('A') <= Char.code(c) && Char.code(c) <= Char.code('Z') => true, Ok(c) when Char.code('_') <= Char.code(c) => true, _ => false, @@ -2633,8 +2585,10 @@ let isWordChar = c => { } let isWordBoundary = (buf, pos, start, limit, end) => { - !((pos == start || !isWordChar(matchBufChar(buf, pos - 1))) == - (pos == end || !isWordChar(matchBufChar(buf, pos)))) + !( + (pos == start || !isWordChar(matchBufChar(buf, pos - 1))) == + (pos == end || !isWordChar(matchBufChar(buf, pos))) + ) } let wordBoundaryMatcher = next_m => @@ -2647,9 +2601,11 @@ let wordBoundaryMatcher = next_m => state, stack, ) => { - if (isWordBoundary(buf, pos, start, limit, end)) - next_m(buf, pos, start, limit, end, state, stack) else None -} + if (isWordBoundary(buf, pos, start, limit, end)) + next_m(buf, pos, start, limit, end, state, stack) + else + None + } let notWordBoundaryMatcher = next_m => ( @@ -2661,9 +2617,11 @@ let notWordBoundaryMatcher = next_m => state, stack, ) => { - if (!isWordBoundary(buf, pos, start, limit, end)) - next_m(buf, pos, start, limit, end, state, stack) else None -} + if (!isWordBoundary(buf, pos, start, limit, end)) + next_m(buf, pos, start, limit, end, state, stack) + else + None + } // Alternatives @@ -2677,11 +2635,11 @@ let altsMatcher = (m1, m2) => state, stack, ) => { - match (m1(buf, pos, start, limit, end, state, stack)) { - None => m2(buf, pos, start, limit, end, state, stack), - Some(v) => Some(v), + match (m1(buf, pos, start, limit, end, state, stack)) { + None => m2(buf, pos, start, limit, end, state, stack), + Some(v) => Some(v), + } } -} // repeats, greedy (default) and non-greedy @@ -2695,27 +2653,25 @@ let repeatMatcher = (r_m, min, max, next_m) => state, stack, ) => { - let rec rloop = (pos, n) => { - if (n < min) { - let newStack = [SEPositionProducer(pos => rloop(pos, n + 1)), ...stack] - r_m(buf, pos, start, limit, end, state, newStack) - } else if ( - match (max) { + let rec rloop = (pos, n) => { + if (n < min) { + let newStack = [SEPositionProducer(pos => rloop(pos, n + 1)), ...stack] + r_m(buf, pos, start, limit, end, state, newStack) + } else if (match (max) { None => false, Some(max) => max == n, - } - ) { - next_m(buf, pos, start, limit, end, state, stack) - } else { - let newStack = [SEPositionProducer(pos => rloop(pos, n + 1)), ...stack] - match (r_m(buf, pos, start, limit, end, state, newStack)) { - Some(v) => Some(v), - None => next_m(buf, pos, start, limit, end, state, stack), + }) { + next_m(buf, pos, start, limit, end, state, stack) + } else { + let newStack = [SEPositionProducer(pos => rloop(pos, n + 1)), ...stack] + match (r_m(buf, pos, start, limit, end, state, newStack)) { + Some(v) => Some(v), + None => next_m(buf, pos, start, limit, end, state, stack), + } } } + rloop(pos, 0) } - rloop(pos, 0) -} let rStack = [SEPositionProducer(pos => Some(pos))] @@ -2772,44 +2728,45 @@ let repeatSimpleMatcher = (r_m, min, max, groupN, next_m) => state, stack, ) => { - let rec rloop = (pos, n, backAmt) => { - let pos2 = match (max) { - Some(max) when n < max => r_m(buf, pos, start, limit, end, state, rStack), - Some(_) => None, - _ => r_m(buf, pos, start, limit, end, state, rStack), - } - match (pos2) { - Some(pos2) => rloop(pos2, n + 1, pos2 - pos), - None => { - // Perform backtracking - let rec bloop = (pos, n) => { - if (n < min) { - None - } else { - addRepeatedGroup( - groupN, - state, - pos, - n, - backAmt, - groupRevert => { - match (next_m(buf, pos, start, limit, end, state, stack)) { - Some(v) => Some(v), - None => { - groupRevert() - bloop(pos - backAmt, n - 1) - }, + let rec rloop = (pos, n, backAmt) => { + let pos2 = match (max) { + Some(max) when n < max => + r_m(buf, pos, start, limit, end, state, rStack), + Some(_) => None, + _ => r_m(buf, pos, start, limit, end, state, rStack), + } + match (pos2) { + Some(pos2) => rloop(pos2, n + 1, pos2 - pos), + None => { + // Perform backtracking + let rec bloop = (pos, n) => { + if (n < min) { + None + } else { + addRepeatedGroup( + groupN, + state, + pos, + n, + backAmt, + groupRevert => { + match (next_m(buf, pos, start, limit, end, state, stack)) { + Some(v) => Some(v), + None => { + groupRevert() + bloop(pos - backAmt, n - 1) + }, + } } - } - ) + ) + } } - } - bloop(pos, n) - }, + bloop(pos, n) + }, + } } + rloop(pos, 0, 0) } - rloop(pos, 0, 0) -} let repeatSimpleManyMatcher = (r_m, min, max, groupN, next_m) => ( @@ -2821,18 +2778,12 @@ let repeatSimpleManyMatcher = (r_m, min, max, groupN, next_m) => state, stack, ) => { - let (pos2, n, backAmt) = r_m(buf, pos, start, limit, end, state, stack) - let rec bloop = (pos, n) => { - if (n < min) { - None - } else { - addRepeatedGroup( - groupN, - state, - pos, - n, - backAmt, - groupRevert => { + let (pos2, n, backAmt) = r_m(buf, pos, start, limit, end, state, stack) + let rec bloop = (pos, n) => { + if (n < min) { + None + } else { + addRepeatedGroup(groupN, state, pos, n, backAmt, groupRevert => { match (next_m(buf, pos, start, limit, end, state, stack)) { Some(v) => Some(v), None => { @@ -2840,12 +2791,11 @@ let repeatSimpleManyMatcher = (r_m, min, max, groupN, next_m) => bloop(pos - backAmt, n - 1) }, } - } - ) + }) + } } + bloop(pos2, n) } - bloop(pos2, n) -} let lazyRepeatMatcher = (r_m, min, max, next_m) => ( @@ -2857,29 +2807,27 @@ let lazyRepeatMatcher = (r_m, min, max, next_m) => state, stack, ) => { - let rec rloop = (pos, n, min) => { - if (n < min) { - let newStack = [ - SEPositionProducer(pos => rloop(pos, n + 1, min)), - ...stack - ] - r_m(buf, pos, start, limit, end, state, newStack) - } else if ( - match (max) { + let rec rloop = (pos, n, min) => { + if (n < min) { + let newStack = [ + SEPositionProducer(pos => rloop(pos, n + 1, min)), + ...stack + ] + r_m(buf, pos, start, limit, end, state, newStack) + } else if (match (max) { None => false, Some(max) => max == n, - } - ) { - next_m(buf, pos, start, limit, end, state, stack) - } else { - match (next_m(buf, pos, start, limit, end, state, stack)) { - Some(p) => Some(p), - None => rloop(pos, n, min + 1), + }) { + next_m(buf, pos, start, limit, end, state, stack) + } else { + match (next_m(buf, pos, start, limit, end, state, stack)) { + Some(p) => Some(p), + None => rloop(pos, n, min + 1), + } } } + rloop(pos, 0, min) } - rloop(pos, 0, min) -} let lazyRepeatSimpleMatcher = (r_m, min, max, next_m) => ( @@ -2891,28 +2839,26 @@ let lazyRepeatSimpleMatcher = (r_m, min, max, next_m) => state, stack, ) => { - let rec rloop = (pos, n, min) => { - if (n < min) { - match (r_m(buf, pos, start, limit, end, state, stack)) { - Some(p) => rloop(p, n + 1, min), - None => None, - } - } else if ( - match (max) { + let rec rloop = (pos, n, min) => { + if (n < min) { + match (r_m(buf, pos, start, limit, end, state, stack)) { + Some(p) => rloop(p, n + 1, min), + None => None, + } + } else if (match (max) { None => false, Some(max) => max == n, - } - ) { - next_m(buf, pos, start, limit, end, state, stack) - } else { - match (next_m(buf, pos, start, limit, end, state, stack)) { - Some(p) => Some(p), - None => rloop(pos, n, min + 1), + }) { + next_m(buf, pos, start, limit, end, state, stack) + } else { + match (next_m(buf, pos, start, limit, end, state, stack)) { + Some(p) => Some(p), + None => rloop(pos, n, min + 1), + } } } + rloop(pos, 0, min) } - rloop(pos, 0, min) -} // Recording and referencing group matches @@ -2926,12 +2872,12 @@ let groupPushMatcher = (n, next_m) => state, stack, ) => { - let newStack = [ - SESavedGroup(pos, if (Array.length(state) > 0) state[n] else None), - ...stack - ] - next_m(buf, pos, start, limit, end, state, newStack) -} + let newStack = [ + SESavedGroup(pos, if (Array.length(state) > 0) state[n] else None), + ...stack + ] + next_m(buf, pos, start, limit, end, state, newStack) + } let groupSetMatcher = (n, next_m) => ( @@ -2943,56 +2889,57 @@ let groupSetMatcher = (n, next_m) => state, stack, ) => { - match (stack) { - [SESavedGroup(oldPos, oldSpan), ...stackTl] => { - if (Array.length(state) > 0) { - state[n] = Some((oldPos, pos)) - } - match (next_m(buf, pos, start, limit, end, state, stackTl)) { - Some(v) => Some(v), - None => { - if (Array.length(state) > 0) { - state[n] = oldSpan - } - None - }, - } - }, - _ => fail "Impossible: groupSetMatcher", + match (stack) { + [SESavedGroup(oldPos, oldSpan), ...stackTl] => { + if (Array.length(state) > 0) { + state[n] = Some((oldPos, pos)) + } + match (next_m(buf, pos, start, limit, end, state, stackTl)) { + Some(v) => Some(v), + None => { + if (Array.length(state) > 0) { + state[n] = oldSpan + } + None + }, + } + }, + _ => fail "Impossible: groupSetMatcher", + } } -} -let makeReferenceMatcher = eq => (n, next_m) => - ( - buf: MatchBuf, - pos: Number, - start: Number, - limit: Number, - end: Number, - state, - stack, - ) => { - match (state[n]) { - None => None, - Some((refStart, refEnd)) => { - let len = refEnd - refStart - if ( - pos + len <= limit && - subArraysEqual( - buf.matchInputExploded, - refStart, - buf.matchInputExploded, - pos, - len - ) - ) { - next_m(buf, pos + len, start, limit, end, state, stack) - } else { - None +let makeReferenceMatcher = eq => + (n, next_m) => + ( + buf: MatchBuf, + pos: Number, + start: Number, + limit: Number, + end: Number, + state, + stack, + ) => { + match (state[n]) { + None => None, + Some((refStart, refEnd)) => { + let len = refEnd - refStart + if ( + pos + len <= limit && + subArraysEqual( + buf.matchInputExploded, + refStart, + buf.matchInputExploded, + pos, + len + ) + ) { + next_m(buf, pos + len, start, limit, end, state, stack) + } else { + None + } + }, } - }, - } -} + } let referenceMatcher = makeReferenceMatcher(((a, b)) => a == b) @@ -3004,8 +2951,9 @@ let asciiCharToLower = c => { } } -let referenceMatcherCaseInsensitive = makeReferenceMatcher(((a, b)) => - asciiCharToLower(a) == asciiCharToLower(b)) +let referenceMatcherCaseInsensitive = makeReferenceMatcher( + ((a, b)) => asciiCharToLower(a) == asciiCharToLower(b) +) // Lookahead, Lookbehind, Conditionals, and Cut @@ -3019,29 +2967,29 @@ let lookaheadMatcher = (isMatch, sub_m, nStart, numN, next_m) => state, stack, ) => { - let oldState = saveGroups(state, nStart, numN) - let ret = match (sub_m(buf, pos, start, limit, end, state, stack)) { - Some(_) when isMatch => { - match (next_m(buf, pos, start, limit, end, state, stack)) { - Some(p) => Some(p), - None => { - restoreGroups(state, oldState, nStart, numN) - None - }, - } - }, - Some(_) => { - restoreGroups(state, oldState, nStart, numN) - None - }, - None when isMatch => { - restoreGroups(state, oldState, nStart, numN) - None - }, - _ => next_m(buf, pos, start, limit, end, state, stack), + let oldState = saveGroups(state, nStart, numN) + let ret = match (sub_m(buf, pos, start, limit, end, state, stack)) { + Some(_) when isMatch => { + match (next_m(buf, pos, start, limit, end, state, stack)) { + Some(p) => Some(p), + None => { + restoreGroups(state, oldState, nStart, numN) + None + }, + } + }, + Some(_) => { + restoreGroups(state, oldState, nStart, numN) + None + }, + None when isMatch => { + restoreGroups(state, oldState, nStart, numN) + None + }, + _ => next_m(buf, pos, start, limit, end, state, stack), + } + ret } - ret -} let lookbehindMatcher = (isMatch, lbMin, lbMax, sub_m, nStart, numN, next_m) => ( @@ -3053,39 +3001,39 @@ let lookbehindMatcher = (isMatch, lbMin, lbMax, sub_m, nStart, numN, next_m) => state, stack, ) => { - let lbMinPos = max(start, pos - lbMax) - let rec loop = lbPos => { - if (lbPos < lbMinPos) { - if (isMatch) { - None - } else { - next_m(buf, pos, start, limit, end, state, stack) - } - } else { - let oldState = saveGroups(state, nStart, numN) - match (sub_m(buf, lbPos, start, pos, end, state, stack)) { - Some(_) when isMatch => { - match (next_m(buf, pos, start, limit, end, state, stack)) { - Some(p) => Some(p), - None => { - restoreGroups(state, oldState, nStart, numN) - None - }, - } - }, - _ when isMatch => { - loop(lbPos - 1) - }, - Some(_) => { - restoreGroups(state, oldState, nStart, numN) + let lbMinPos = max(start, pos - lbMax) + let rec loop = lbPos => { + if (lbPos < lbMinPos) { + if (isMatch) { None - }, - _ => next_m(buf, pos, start, limit, end, state, stack), + } else { + next_m(buf, pos, start, limit, end, state, stack) + } + } else { + let oldState = saveGroups(state, nStart, numN) + match (sub_m(buf, lbPos, start, pos, end, state, stack)) { + Some(_) when isMatch => { + match (next_m(buf, pos, start, limit, end, state, stack)) { + Some(p) => Some(p), + None => { + restoreGroups(state, oldState, nStart, numN) + None + }, + } + }, + _ when isMatch => { + loop(lbPos - 1) + }, + Some(_) => { + restoreGroups(state, oldState, nStart, numN) + None + }, + _ => next_m(buf, pos, start, limit, end, state, stack), + } } } + loop(pos - lbMin) } - loop(pos - lbMin) -} let conditionalReferenceMatcher = (n, m1, m2) => ( @@ -3097,12 +3045,12 @@ let conditionalReferenceMatcher = (n, m1, m2) => state, stack, ) => { - if (Option.isSome(state[n])) { - m1(buf, pos, start, limit, end, state, stack) - } else { - m2(buf, pos, start, limit, end, state, stack) + if (Option.isSome(state[n])) { + m1(buf, pos, start, limit, end, state, stack) + } else { + m2(buf, pos, start, limit, end, state, stack) + } } -} let conditionalLookMatcher = (tst_m, m1, m2, nStart, numN) => ( @@ -3114,19 +3062,19 @@ let conditionalLookMatcher = (tst_m, m1, m2, nStart, numN) => state, stack, ) => { - let oldState = saveGroups(state, nStart, numN) - let res = match (tst_m(buf, pos, start, limit, end, state, [])) { - Some(_) => m1(buf, pos, start, limit, end, state, stack), - None => m2(buf, pos, start, limit, end, state, stack), - } - match (res) { - Some(p) => Some(p), - None => { - restoreGroups(state, oldState, nStart, numN) - None - }, + let oldState = saveGroups(state, nStart, numN) + let res = match (tst_m(buf, pos, start, limit, end, state, [])) { + Some(_) => m1(buf, pos, start, limit, end, state, stack), + None => m2(buf, pos, start, limit, end, state, stack), + } + match (res) { + Some(p) => Some(p), + None => { + restoreGroups(state, oldState, nStart, numN) + None + }, + } } -} let cutMatcher = (sub_m, nStart, numN, next_m) => ( @@ -3138,20 +3086,20 @@ let cutMatcher = (sub_m, nStart, numN, next_m) => state, stack, ) => { - let oldState = saveGroups(state, nStart, numN) - match (sub_m(buf, pos, start, limit, end, state, [])) { - None => None, - Some(_) => { - match (next_m(buf, pos, start, limit, end, state, stack)) { - None => { - restoreGroups(state, oldState, nStart, numN) - None - }, - Some(p) => Some(p), - } - }, + let oldState = saveGroups(state, nStart, numN) + match (sub_m(buf, pos, start, limit, end, state, [])) { + None => None, + Some(_) => { + match (next_m(buf, pos, start, limit, end, state, stack)) { + None => { + restoreGroups(state, oldState, nStart, numN) + None + }, + Some(p) => Some(p), + } + }, + } } -} // Unicode characters in UTF-8 encoding @@ -3165,8 +3113,8 @@ let unicodeCategoriesMatcher = (cats, isMatch, next_m) => state, stack, ) => { - fail "NYI: unicodeCategoriesMatcher is not supported until grain-lang/grain#661 is resolved." -} + fail "NYI: unicodeCategoriesMatcher is not supported until grain-lang/grain#661 is resolved." + } // ------- // Regex matcher compilation @@ -3218,8 +3166,7 @@ let compileRegexToMatcher = (re: ParsedRegularExpression) => { }, REAlts(re1, re2) => altsMatcher(compile(re1, next_m), compile(re2, next_m)), - REMaybe(re, true) => - altsMatcher(next_m, compile(re, next_m)), // non-greedy + REMaybe(re, true) => altsMatcher(next_m, compile(re, next_m)), // non-greedy REMaybe(re, _) => altsMatcher(compile(re, next_m), next_m), RERepeat(actualRe, min, max, nonGreedy) => { // Special case: group around simple pattern in non-lazy repeat @@ -3229,11 +3176,10 @@ let compileRegexToMatcher = (re: ParsedRegularExpression) => { _ => actualRe, } let simple = !needsBacktrack(re) - let groupN = if (simple) - match (actualRe) { - REGroup(_, n) => Some(n), - _ => None, - } else None + let groupN = if (simple) match (actualRe) { + REGroup(_, n) => Some(n), + _ => None, + } else None match (compileMatcherRepeater(re, min, max)) { Some(matcher) when !nonGreedy => repeatSimpleManyMatcher(matcher, min, max, groupN, next_m), @@ -3482,7 +3428,7 @@ provide let make = (regexString: String) => { reMustString: mustString(parsed), reIsAnchored: isAnchored(parsed), reStartRange: startRange(parsed), - } + }, ) }, } @@ -3504,7 +3450,8 @@ let checkMustString = (ms, buf: MatchBuf, pos, endPos) => { None => true, Some(ms) => { let toCheck = if ( - pos == 0 && endPos == Array.length(buf.matchInputExploded) + pos == 0 && + endPos == Array.length(buf.matchInputExploded) ) { buf.matchInput } else { @@ -3520,15 +3467,14 @@ let checkStartRange = (startRange, buf, pos, endPos) => { rangeContains(startRange, Char.code(buf.matchInputExploded[pos])) } -let searchMatch = - ( - rx: RegularExpression, - buf: MatchBuf, - pos, - startPos, - endPos, - state, - ) => { +let searchMatch = ( + rx: RegularExpression, + buf: MatchBuf, + pos, + startPos, + endPos, + state, +) => { if (!checkMustString(rx.reMustString, buf, pos, endPos)) { None } else { @@ -3540,8 +3486,7 @@ let searchMatch = None } else { match (startRange) { - Some(_) when pos == endPos => - None, // Can't possibly match if chars are required and we are at EOS + Some(_) when pos == endPos => None, // Can't possibly match if chars are required and we are at EOS Some(rng) when !checkStartRange(rng, buf, pos, endPos) => loop(pos + 1), _ => { @@ -3635,12 +3580,14 @@ let makeMatchResult = (origString, start, end, state) => { // Helpers for user-facing match functionality let fastDriveRegexIsMatch = (rx, string, startOffset, endOffset) => { - let state = - if (rx.reReferences) Array.make(rx.reNumGroups, None) - else Array.make(0, None) - let toWrap = - if (startOffset == 0 && endOffset == String.length(string)) string - else String.slice(startOffset, end=endOffset, string) + let state = if (rx.reReferences) + Array.make(rx.reNumGroups, None) + else + Array.make(0, None) + let toWrap = if (startOffset == 0 && endOffset == String.length(string)) + string + else + String.slice(startOffset, end=endOffset, string) let buf = makeMatchBuffer(toWrap) Option.isSome( searchMatch(rx, buf, 0, 0, Array.length(buf.matchInputExploded), state) @@ -3652,18 +3599,14 @@ let rec fastDriveRegexMatchAll = (rx, string, startOffset, endOffset) => { [] } else { let state = Array.make(rx.reNumGroups, None) - let toWrap = - if (startOffset == 0 && endOffset == String.length(string)) string - else String.slice(startOffset, end=endOffset, string) + let toWrap = if (startOffset == 0 && endOffset == String.length(string)) + string + else + String.slice(startOffset, end=endOffset, string) let buf = makeMatchBuffer(toWrap) - match (searchMatch( - rx, - buf, - 0, - 0, - Array.length(buf.matchInputExploded), - state - )) { + match ( + searchMatch(rx, buf, 0, 0, Array.length(buf.matchInputExploded), state) + ) { None => [], Some((startPos, endPos)) => [ @@ -3692,18 +3635,14 @@ let rec fastDriveRegexMatchAll = (rx, string, startOffset, endOffset) => { let fastDriveRegexMatch = (rx, string, startOffset, endOffset) => { let state = Array.make(rx.reNumGroups, None) - let toWrap = - if (startOffset == 0 && endOffset == String.length(string)) string - else String.slice(startOffset, end=endOffset, string) + let toWrap = if (startOffset == 0 && endOffset == String.length(string)) + string + else + String.slice(startOffset, end=endOffset, string) let buf = makeMatchBuffer(toWrap) - match (searchMatch( - rx, - buf, - 0, - 0, - Array.length(buf.matchInputExploded), - state - )) { + match ( + searchMatch(rx, buf, 0, 0, Array.length(buf.matchInputExploded), state) + ) { None => None, Some((startPos, endPos)) => { Some( @@ -3718,7 +3657,7 @@ let fastDriveRegexMatch = (rx, string, startOffset, endOffset) => { Some((start + startOffset, end + startOffset)), } }, state) - ) + ), ) }, } @@ -3751,13 +3690,12 @@ provide let isMatch = (rx: RegularExpression, string: String) => { * * @since 0.4.3 */ -provide let isMatchRange = - ( - rx: RegularExpression, - string: String, - start: Number, - end: Number, - ) => { +provide let isMatchRange = ( + rx: RegularExpression, + string: String, + start: Number, + end: Number, +) => { fastDriveRegexIsMatch(rx, string, start, end) } @@ -3788,13 +3726,12 @@ provide let find = (rx: RegularExpression, string: String) => { * * @since 0.4.3 */ -provide let findRange = - ( - rx: RegularExpression, - string: String, - start: Number, - end: Number, - ) => { +provide let findRange = ( + rx: RegularExpression, + string: String, + start: Number, + end: Number, +) => { fastDriveRegexMatch(rx, string, start, end) } @@ -3821,24 +3758,22 @@ provide let findAll = (rx: RegularExpression, string: String) => { * * @since 0.4.3 */ -provide let findAllRange = - ( - rx: RegularExpression, - string: String, - start: Number, - end: Number, - ) => { +provide let findAllRange = ( + rx: RegularExpression, + string: String, + start: Number, + end: Number, +) => { fastDriveRegexMatchAll(rx, string, start, end) } -let computeReplacement = - ( - matchBuf: MatchBuf, - replacementString: String, - start, - end, - state, - ) => { +let computeReplacement = ( + matchBuf: MatchBuf, + replacementString: String, + start, + end, + state, +) => { let replacementExploded = String.explode(replacementString) let len = Array.length(replacementExploded) let mut acc = [] @@ -3857,8 +3792,10 @@ let computeReplacement = } } let consRange = (start, end, lst) => { - if (start == end) lst - else [String.slice(start, end=end, replacementString), ...lst] + if (start == end) + lst + else + [String.slice(start, end=end, replacementString), ...lst] } let rec loop = (pos, since) => { if (pos == len) { @@ -3872,34 +3809,30 @@ let computeReplacement = } else if (c == Some('\'')) { consRange(since, pos, [getAfterMatch(), ...loop(pos + 2, pos + 2)]) } else { - consRange( - since, - pos, - { - if (c == Some('$')) { - loop(pos + 2, pos + 1) - } else if (c == Some('.')) { - loop(pos + 2, pos + 2) - } else { - let rec dLoop = (pos, accum) => { - if (pos == len) { - [getInputSubstr(accum)] + consRange(since, pos, { + if (c == Some('$')) { + loop(pos + 2, pos + 1) + } else if (c == Some('.')) { + loop(pos + 2, pos + 2) + } else { + let rec dLoop = (pos, accum) => { + if (pos == len) { + [getInputSubstr(accum)] + } else { + let c = replacementExploded[pos] + if ( + Char.code('0') <= Char.code(c) && + Char.code(c) <= Char.code('9') + ) { + dLoop(pos + 1, 10 * accum + (Char.code(c) - Char.code('0'))) } else { - let c = replacementExploded[pos] - if ( - Char.code('0') <= Char.code(c) && - Char.code(c) <= Char.code('9') - ) { - dLoop(pos + 1, 10 * accum + (Char.code(c) - Char.code('0'))) - } else { - [getInputSubstr(accum), ...loop(pos, pos)] - } + [getInputSubstr(accum), ...loop(pos, pos)] } } - dLoop(pos + 1, 0) } + dLoop(pos + 1, 0) } - ) + }) } } else { loop(pos + 1, since) @@ -3909,13 +3842,12 @@ let computeReplacement = List.reduceRight(String.concat, "", res) } -let regexReplaceHelp = - ( - rx: RegularExpression, - toSearch: String, - replacement: String, - all: Bool, - ) => { +let regexReplaceHelp = ( + rx: RegularExpression, + toSearch: String, + replacement: String, + all: Bool, +) => { let buf = makeMatchBuffer(toSearch) let rec loop = searchPos => { let state = Array.make(rx.reNumGroups, None) @@ -3978,12 +3910,11 @@ let regexReplaceHelp = * * @since 0.4.3 */ -provide let replace = - ( - rx: RegularExpression, - toSearch: String, - replacement: String, - ) => { +provide let replace = ( + rx: RegularExpression, + toSearch: String, + replacement: String, +) => { regexReplaceHelp(rx, toSearch, replacement, false) } @@ -4000,12 +3931,11 @@ provide let replace = * * @since 0.4.3 */ -provide let replaceAll = - ( - rx: RegularExpression, - toSearch: String, - replacement: String, - ) => { +provide let replaceAll = ( + rx: RegularExpression, + toSearch: String, + replacement: String, +) => { regexReplaceHelp(rx, toSearch, replacement, true) } diff --git a/stdlib/result.gr b/stdlib/result.gr index e4bab575b7..06361ea1a4 100644 --- a/stdlib/result.gr +++ b/stdlib/result.gr @@ -11,7 +11,6 @@ * * @since v0.2.0 */ - module Result /** diff --git a/stdlib/runtime/atof/common.gr b/stdlib/runtime/atof/common.gr index 6228609229..cc7034e2b2 100644 --- a/stdlib/runtime/atof/common.gr +++ b/stdlib/runtime/atof/common.gr @@ -115,10 +115,7 @@ provide let _CHAR_CODE_y = 0x79n provide let fpZero = () => { f: 0L, e: 0l } @unsafe provide let fpInf = () => - { - f: 0L, - e: WasmI32.toGrain(newInt32(_INFINITE_POWER)): Int32, - } + { f: 0L, e: WasmI32.toGrain(newInt32(_INFINITE_POWER)): Int32 } provide let fpErr = () => { f: 0L, e: -1l } provide let fpNan = () => { f: 1L, e: -1l } diff --git a/stdlib/runtime/atof/decimal.gr b/stdlib/runtime/atof/decimal.gr index 27128f6f5c..121216e979 100644 --- a/stdlib/runtime/atof/decimal.gr +++ b/stdlib/runtime/atof/decimal.gr @@ -383,7 +383,7 @@ provide let leftShift = (d, shift) => { n = addWasmI64( n, WasmI64.extendI32U(WasmI32.load8U(digits + readIndex, 8n)) << - WasmI64.extendI32U(shift) + WasmI64.extendI32U(shift) ) let quotient = n / 10N let remainder = subWasmI64(n, 10N * quotient) @@ -632,10 +632,10 @@ provide let parseDecimal = (s: String) => { } let mut decimalPoint = WasmI32.load(WasmI32.fromGrain(d.decimalPoint), 4n) decimalPoint += if (negExp) { - 0n - expNum - } else { - expNum - } + 0n - expNum + } else { + expNum + } d.decimalPoint = WasmI32.toGrain(newInt32(decimalPoint)) } let mut numDigits = WasmI32.load(WasmI32.fromGrain(d.numDigits), 4n) diff --git a/stdlib/runtime/atof/lemire.gr b/stdlib/runtime/atof/lemire.gr index 9fff773386..8d396ace32 100644 --- a/stdlib/runtime/atof/lemire.gr +++ b/stdlib/runtime/atof/lemire.gr @@ -164,13 +164,10 @@ provide let computeFloat = (exponent: WasmI64, mantissa: WasmI64) => { true => fpErr(), false => { let upperbit = WasmI32.wrapI64(hi >>> 63N) - let mut mantissa = hi >>> - WasmI64.extendI32S( - { - from WasmI32 use { (+), (-) } - upperbit + 64n - _MANTISSA_EXPLICIT_BITS_32 - 3n - } - ) + let mut mantissa = hi >>> WasmI64.extendI32S({ + from WasmI32 use { (+), (-) } + upperbit + 64n - _MANTISSA_EXPLICIT_BITS_32 - 3n + }) let mut power2 = { from WasmI32 use { (+), (-) } let q = WasmI32.wrapI64(q) @@ -221,14 +218,10 @@ provide let computeFloat = (exponent: WasmI64, mantissa: WasmI64) => { q >= _MIN_EXPONENT_ROUND_TO_EVEN && q <= _MAX_EXPONENT_ROUND_TO_EVEN && (mantissa & 3N) == 1N && - mantissa << - WasmI64.extendI32S( - { - from WasmI32 use { (+), (-) } - upperbit + 64n - _MANTISSA_EXPLICIT_BITS_32 - 3n - } - ) == - hi + mantissa << WasmI64.extendI32S({ + from WasmI32 use { (+), (-) } + upperbit + 64n - _MANTISSA_EXPLICIT_BITS_32 - 3n + }) == hi ) { from WasmI64 use { (^) } // Zero the lowest bit, so we don't round up. @@ -255,7 +248,6 @@ provide let computeFloat = (exponent: WasmI64, mantissa: WasmI64) => { // Exponent is above largest normal value, must be infinite. fpInf() } else { - { f: WasmI32.toGrain(newInt64(mantissa)): Int64, e: WasmI32.toGrain(newInt32(power2)): Int32, diff --git a/stdlib/runtime/atof/parse.gr b/stdlib/runtime/atof/parse.gr index 6c10082b40..633af2e4fe 100644 --- a/stdlib/runtime/atof/parse.gr +++ b/stdlib/runtime/atof/parse.gr @@ -262,12 +262,12 @@ let parseFloatToParts = (string: String) => { from WasmI32 use { (<), (>) } let n = c - (_CHAR_CODE_0 - 1n) numDigits -= if (n < 0n) { - 0n - } else if (n > 255n) { - 255n - } else { - n - } + 0n + } else if (n > 255n) { + 255n + } else { + n + } i += 1n } else if (c == _CHAR_CODE_UNDERSCORE) { continue @@ -348,7 +348,7 @@ let parseFloatToParts = (string: String) => { WasmI32.toGrain(newInt64(mantissa)): Int64, negative, manyDigits, - ) + ), ) } else { Ok( @@ -357,7 +357,7 @@ let parseFloatToParts = (string: String) => { WasmI32.toGrain(newInt64(mantissa)): Int64, negative, manyDigits, - ) + ), ) } }, @@ -426,7 +426,9 @@ let parseInfNan = s => { ) { Ok((fpNan(), negative)) } else if ( - c1 == _CHAR_CODE_i && c2 == _CHAR_CODE_n && c3 == _CHAR_CODE_f + c1 == _CHAR_CODE_i && + c2 == _CHAR_CODE_n && + c3 == _CHAR_CODE_f ) { if (len - i == 3n) { Ok((fpInf(), negative)) @@ -475,13 +477,12 @@ let parseInfNan = s => { } @unsafe -provide let isFastPath = - ( - exponent: WasmI32, - mantissa: WasmI64, - negative: Bool, - manyDigits: Bool, - ) => { +provide let isFastPath = ( + exponent: WasmI32, + mantissa: WasmI64, + negative: Bool, + manyDigits: Bool, +) => { from WasmI32 use { (<=) } from WasmI64 use { (<<) } diff --git a/stdlib/runtime/bigint.gr b/stdlib/runtime/bigint.gr index ee4ee6837d..e867630f80 100644 --- a/stdlib/runtime/bigint.gr +++ b/stdlib/runtime/bigint.gr @@ -109,7 +109,7 @@ let init = (limbs: WasmI32) => { // in our multiplication/division algorithms. This means that BigInts // are limited to 16+17179869176 bytes, or just over 16GiB. throw Exception.InvalidArgument( - "Cannot allocate BigInt with >= 2147483648 limbs" + "Cannot allocate BigInt with >= 2147483648 limbs", ) } @@ -193,9 +193,7 @@ let setFlag = (ptr, flag, value) => { let (!) = lnot WasmI32.store( ptr, - WasmI32.load(ptr, 12n) & !flag | - flag * - (if (WasmI32.eqz(value)) { + WasmI32.load(ptr, 12n) & !flag | flag * (if (WasmI32.eqz(value)) { 0n } else { 1n @@ -224,7 +222,8 @@ let getHalfSize = ptr => { from WasmI32 use { (-), (<<), gtU as (>) } from WasmI64 use { (>>>) } if ( - size > 0n && WasmI32.eqz(WasmI32.wrapI64(getLimb(ptr, size - 1n) >>> 32N)) + size > 0n && + WasmI32.eqz(WasmI32.wrapI64(getLimb(ptr, size - 1n) >>> 32N)) ) { // last half-limb is trailing zeros (size << 1n) - 1n @@ -815,7 +814,7 @@ provide let bigIntToString = (num: WasmI32, base: WasmI32) => { ] acc = acc >>> bits if (accBits > 64N) { - acc = limb >>> 64N - (accBits - bits) + acc = limb >>> (64N - (accBits - bits)) } accBits -= bits } @@ -854,12 +853,10 @@ provide let bigIntToString = (num: WasmI32, base: WasmI32) => { ] } } - while ( - match (result) { - [c, ...tl] when DS.untagChar(c) == DS.untagChar('0') => true, - _ => false, - } - ) { + while (match (result) { + [c, ...tl] when DS.untagChar(c) == DS.untagChar('0') => true, + _ => false, + }) { match (result) { [c, ...tl] => result = tl, _ => void, // <- impossible @@ -1081,16 +1078,11 @@ let unsignedSubInt = (num1: WasmI32, int: WasmI64) => { from WasmI64 use { (==) as eqWasmI64, (-), ltU as (<) } if (num1Limbs == 0n || num1Limbs == 1n && getLimb(num1, 0n) < int) { let ret = init(1n) - setLimb( - ret, - 0n, - int - - (if (num1Limbs == 0n) { - 0N - } else { - getLimb(num1, 0n) - }) - ) + setLimb(ret, 0n, int - (if (num1Limbs == 0n) { + 0N + } else { + getLimb(num1, 0n) + })) trimNumberInPlace(negateInPlace(ret)) } else if (num1Limbs == 1n && eqWasmI64(getLimb(num1, 0n), int)) { makeZero() @@ -1293,7 +1285,8 @@ provide let shl = (num: WasmI32, places: WasmI32) => { let mut carry = 0N let a = places / 32n let b = places % 32n - let mask = ((1N << WasmI64.extendI32U(b)) - 1N) << 64N - WasmI64.extendI32U(b) + let mask = ((1N << WasmI64.extendI32U(b)) - 1N) << + (64N - WasmI64.extendI32U(b)) let result = init(numLimbs + a) setFlag(result, _IS_NEGATIVE, getFlag(num, _IS_NEGATIVE)) from WasmI32 use { (<<) } @@ -1315,7 +1308,7 @@ provide let shl = (num: WasmI32, places: WasmI32) => { setHalfLimb( ret, numHalfLimbs + a, - WasmI32.wrapI64(carry >>> 32N - WasmI64.extendI32U(b)) + WasmI32.wrapI64(carry >>> (32N - WasmI64.extendI32U(b))) ) ret } else { @@ -1353,8 +1346,7 @@ provide let shrS = (num: WasmI32, places: WasmI32) => { } else { let newHalfLimbs = numHalfLimbs - a let ret = init( - (newHalfLimbs >>> 1n) + - (if (WasmI32.eqz(newHalfLimbs & 1n)) { + (newHalfLimbs >>> 1n) + (if (WasmI32.eqz(newHalfLimbs & 1n)) { 0n } else { 1n @@ -1411,12 +1403,7 @@ let bitwiseNotUnsigned = (num: WasmI32) => { } @unsafe -let bitwiseAndPositive = - ( - num1: WasmI32, - num2: WasmI32, - copyTrailing: WasmI32, - ) => { +let bitwiseAndPositive = (num1: WasmI32, num2: WasmI32, copyTrailing: WasmI32) => { // bitwise and, but both num1 and num2 are assumed to be positive let num1Limbs = getSize(num1) let num2Limbs = getSize(num2) @@ -1581,11 +1568,7 @@ provide let bitwiseOr = (num1: WasmI32, num2: WasmI32) => { let num2Neg = negate(num2) // B let num1Sub1 = decr(num1Neg) // (A-1) let num2Sub1 = decr(num2Neg) // (B-1) - let andResult = bitwiseAndPositive( - num1Sub1, - num2Sub1, - 0n - ) // (A-1) & (B-1) + let andResult = bitwiseAndPositive(num1Sub1, num2Sub1, 0n) // (A-1) & (B-1) let ret = incr(andResult) // ((A-1) & (B-1)) + 1 Memory.decRef(num1Neg) Memory.decRef(num2Neg) @@ -1765,11 +1748,10 @@ let baseCaseDivRem = (a: WasmI32, b: WasmI32, result: WasmI32) => { let n = getHalfSize(b) let m = getHalfSize(a) - n let qsize = (if (WasmI32.eqz(m + 1n & 1n)) { - m + 1n - } else { - m + 2n - }) >>> - 1n + m + 1n + } else { + m + 2n + }) >>> 1n let mut q = init(qsize) let mut a = 0n let bTimesBetaM = shl(b, m * 32n) // b * \beta^m == b * (2^32)^m == b*2^(32*m) @@ -1783,18 +1765,12 @@ let baseCaseDivRem = (a: WasmI32, b: WasmI32, result: WasmI32) => { Memory.decRef(bTimesBetaM) for (let mut j = m - 1n; j >= 0n; j -= 1n) { from WasmI64 use { divU as (/), (|), (<<) } - let anjBeta = WasmI64.extendI32U(getHalfLimb(a, n + j)) << - 32N // a_{n+j}\beta + let anjBeta = WasmI64.extendI32U(getHalfLimb(a, n + j)) << 32N // a_{n+j}\beta let anj1 = WasmI64.extendI32U(getHalfLimb(a, n + j - 1n)) // a_{n+j-1} let bn1 = WasmI64.extendI32U(getHalfLimb(b, n - 1n)) // b_{n-1} let qjstar = (anjBeta | anj1) / bn1 // q_j^\ast (quotient selection) - let mut qj = WasmI32.wrapI64( - qjstar - ) // min(q_j^ast, \beta - 1) (equiv. to qjstar & _UMAX_I6432) - let bTimesBetaJ = shl( - b, - j * 32n - ) // b * \beta^j == b * (2^32)^j == b*2^(32*j) + let mut qj = WasmI32.wrapI64(qjstar) // min(q_j^ast, \beta - 1) (equiv. to qjstar & _UMAX_I6432) + let bTimesBetaJ = shl(b, j * 32n) // b * \beta^j == b * (2^32)^j == b*2^(32*j) let qjWrapped = makeWrappedUint32(qj) let qjTimesBTimesBetaJ = mul(bTimesBetaJ, qjWrapped) Memory.decRef(qjWrapped) @@ -1840,13 +1816,12 @@ let baseCaseDivRemUnnormalized = (a: WasmI32, b: WasmI32, result: WasmI32) => { } @unsafe -let division = - ( - num1: WasmI32, - num2: WasmI32, - destContainer: WasmI32, - divMod: Bool, - ) => { +let division = ( + num1: WasmI32, + num2: WasmI32, + destContainer: WasmI32, + divMod: Bool, +) => { from WasmI32 use { (^), (==) } if (eqz(num2)) { throw Exception.DivisionByZero @@ -1908,8 +1883,10 @@ let division = // https://en.wikipedia.org/wiki/Modulo_operation if ( divMod && - (flagIsSet(r, _IS_NEGATIVE) && !flagIsSet(num2, _IS_NEGATIVE) || - !flagIsSet(r, _IS_NEGATIVE) && flagIsSet(num2, _IS_NEGATIVE)) + ( + flagIsSet(r, _IS_NEGATIVE) && !flagIsSet(num2, _IS_NEGATIVE) || + !flagIsSet(r, _IS_NEGATIVE) && flagIsSet(num2, _IS_NEGATIVE) + ) ) { let newr = add(r, num2) Memory.decRef(r) diff --git a/stdlib/runtime/compare.gr b/stdlib/runtime/compare.gr index 6fc0b8de65..223aec32bd 100644 --- a/stdlib/runtime/compare.gr +++ b/stdlib/runtime/compare.gr @@ -97,9 +97,7 @@ let rec heapCompareHelp = (heapTag, xptr, yptr) => { return 0 }, - t when ( - t == Tags._GRAIN_STRING_HEAP_TAG || t == Tags._GRAIN_BYTES_HEAP_TAG - ) => { + t when t == Tags._GRAIN_STRING_HEAP_TAG || t == Tags._GRAIN_BYTES_HEAP_TAG => { let xlength = WasmI32.load(xptr, 4n) let ylength = WasmI32.load(yptr, 4n) @@ -136,20 +134,36 @@ let rec heapCompareHelp = (heapTag, xptr, yptr) => { t when t == Tags._GRAIN_INT32_HEAP_TAG => { let xval = WasmI32.load(xptr, 4n) let yval = WasmI32.load(yptr, 4n) - return if (xval < yval) -1 else if (xval > yval) 1 else 0 + return if (xval < yval) { + -1 + } else if (xval > yval) { + 1 + } else { + 0 + } }, // Float32 is handled by compareHelp directly t when t == Tags._GRAIN_UINT32_HEAP_TAG => { let xval = WasmI32.load(xptr, 4n) let yval = WasmI32.load(yptr, 4n) - return if (WasmI32.ltU(xval, yval)) -1 else if (WasmI32.gtU(xval, yval)) 1 - else 0 + return if (WasmI32.ltU(xval, yval)) { + -1 + } else if (WasmI32.gtU(xval, yval)) { + 1 + } else { + 0 + } }, t when t == Tags._GRAIN_UINT64_HEAP_TAG => { let xval = WasmI64.load(xptr, 8n) let yval = WasmI64.load(yptr, 8n) - return if (WasmI64.ltU(xval, yval)) -1 else if (WasmI64.gtU(xval, yval)) 1 - else 0 + return if (WasmI64.ltU(xval, yval)) { + -1 + } else if (WasmI64.gtU(xval, yval)) { + 1 + } else { + 0 + } }, _ => { // No other implementation @@ -164,20 +178,44 @@ and compareHelp = (x, y) => { // Short circuit for non-pointer values if ((xtag & Tags._GRAIN_NUMBER_TAG_MASK) == Tags._GRAIN_NUMBER_TAG_TYPE) { // Signed comparisons are necessary for numbers - if (x < y) -1 else if (x > y) 1 else 0 + if (x < y) { + -1 + } else if (x > y) { + 1 + } else { + 0 + } } else if (xtag == Tags._GRAIN_SHORTVAL_TAG_TYPE) { let shortValTag = x & Tags._GRAIN_GENERIC_SHORTVAL_TAG_MASK if ( shortValTag == Tags._GRAIN_INT8_TAG_MASK || shortValTag == Tags._GRAIN_INT16_TAG_MASK ) { - if (x < y) -1 else if (x > y) 1 else 0 + if (x < y) { + -1 + } else if (x > y) { + 1 + } else { + 0 + } } else { - if (WasmI32.ltU(x, y)) -1 else if (WasmI32.gtU(x, y)) 1 else 0 + if (WasmI32.ltU(x, y)) { + -1 + } else if (WasmI32.gtU(x, y)) { + 1 + } else { + 0 + } } } else { // Unsigned comparisons are necessary for other stack-allocated values - if (WasmI32.ltU(x, y)) -1 else if (WasmI32.gtU(x, y)) 1 else 0 + if (WasmI32.ltU(x, y)) { + -1 + } else if (WasmI32.gtU(x, y)) { + 1 + } else { + 0 + } } } else if (isNumber(x)) { // Numbers have special comparison rules, e.g. NaN == NaN @@ -196,7 +234,13 @@ and compareHelp = (x, y) => { 1 } else { from WasmF32 use { (<), (>) } - if (xval < yval) -1 else if (xval > yval) 1 else 0 + if (xval < yval) { + -1 + } else if (xval > yval) { + 1 + } else { + 0 + } } } else { // Handle all other heap allocated things diff --git a/stdlib/runtime/debugPrint.gr b/stdlib/runtime/debugPrint.gr index f433418341..f88361141c 100644 --- a/stdlib/runtime/debugPrint.gr +++ b/stdlib/runtime/debugPrint.gr @@ -6,12 +6,8 @@ from WasmI32 use { (+) } include "runtime/unsafe/wasmf64" include "runtime/unsafe/memory" -foreign wasm fd_write: ( - WasmI32, - WasmI32, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" +foreign wasm fd_write: + (WasmI32, WasmI32, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" @unsafe provide let print = (s: String) => { diff --git a/stdlib/runtime/equal.gr b/stdlib/runtime/equal.gr index ef9d3d76d2..a5f4b1ebb2 100644 --- a/stdlib/runtime/equal.gr +++ b/stdlib/runtime/equal.gr @@ -116,9 +116,7 @@ let rec heapEqualHelp = (heapTag, xptr, yptr) => { result } }, - t when ( - t == Tags._GRAIN_STRING_HEAP_TAG || t == Tags._GRAIN_BYTES_HEAP_TAG - ) => { + t when t == Tags._GRAIN_STRING_HEAP_TAG || t == Tags._GRAIN_BYTES_HEAP_TAG => { let xlength = WasmI32.load(xptr, 4n) let ylength = WasmI32.load(yptr, 4n) @@ -156,9 +154,7 @@ let rec heapEqualHelp = (heapTag, xptr, yptr) => { result } }, - t when ( - t == Tags._GRAIN_UINT32_HEAP_TAG || t == Tags._GRAIN_INT32_HEAP_TAG - ) => { + t when t == Tags._GRAIN_UINT32_HEAP_TAG || t == Tags._GRAIN_INT32_HEAP_TAG => { let xval = WasmI32.load(xptr, 4n) let yval = WasmI32.load(yptr, 4n) xval == yval diff --git a/stdlib/runtime/exception.gr b/stdlib/runtime/exception.gr index dcc793a235..568db6ceee 100644 --- a/stdlib/runtime/exception.gr +++ b/stdlib/runtime/exception.gr @@ -4,12 +4,8 @@ module Exception include "runtime/unsafe/wasmi32" as WasmI32 from WasmI32 use { (==), (+), (-) } -foreign wasm fd_write: ( - WasmI32, - WasmI32, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" +foreign wasm fd_write: + (WasmI32, WasmI32, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" primitive unreachable = "@unreachable" @@ -23,10 +19,8 @@ provide let dangerouslyRegisterBasePrinter = f => { let mut current = printers while (true) { // There will be at least one printer registered by the time this is called - let (_, next) = WasmI32.toGrain(current): ( - Exception => Option, - WasmI32 - ) + let (_, next) = WasmI32.toGrain(current): + (Exception => Option, WasmI32) if (next == 0n) { // Using a tuple in runtime mode is typically disallowed as there is no way // to reclaim the memory, but this function is only called once @@ -62,10 +56,8 @@ let exceptionToString = (e: Exception) => { let mut current = printers while (true) { if (current == 0n) return result - let (printer, next) = WasmI32.toGrain(current): ( - Exception => Option, - WasmI32 - ) + let (printer, next) = WasmI32.toGrain(current): + (Exception => Option, WasmI32) // as GC is not available, manually increment the references match (incRef(printer)(incRef(e))) { Some(str) => return str, diff --git a/stdlib/runtime/gc.gr b/stdlib/runtime/gc.gr index 091cf6f5b4..f6f2b22eaf 100644 --- a/stdlib/runtime/gc.gr +++ b/stdlib/runtime/gc.gr @@ -25,12 +25,8 @@ include "runtime/unsafe/wasmi32" as WasmI32 from WasmI32 use { (+), (-), (*), (&), (==), (!=) } // Using foreigns directly here to avoid cyclic dependency -foreign wasm fd_write: ( - WasmI32, - WasmI32, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" +foreign wasm fd_write: + (WasmI32, WasmI32, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" primitive (&&) = "@and" primitive (||) = "@or" @@ -134,9 +130,7 @@ and decRefChildren = (userPtr: WasmI32) => { ignore(decRef(WasmI32.load(userPtr + i, 16n), false)) } }, - t when ( - t == Tags._GRAIN_ARRAY_HEAP_TAG || t == Tags._GRAIN_TUPLE_HEAP_TAG - ) => { + t when t == Tags._GRAIN_ARRAY_HEAP_TAG || t == Tags._GRAIN_TUPLE_HEAP_TAG => { let arity = WasmI32.load(userPtr, 4n) let maxOffset = arity * 4n for (let mut i = 0n; WasmI32.ltU(i, maxOffset); i += 4n) { diff --git a/stdlib/runtime/malloc.gr b/stdlib/runtime/malloc.gr index e1419a3b80..8c01017035 100644 --- a/stdlib/runtime/malloc.gr +++ b/stdlib/runtime/malloc.gr @@ -260,12 +260,10 @@ provide let malloc = (nbytes: WasmI32) => { let mut ret = -1n // Search the freelist for any blocks large enough. - for ( - let mut p = getNext(prevp); ; { - prevp = p - p = getNext(p) - } - ) { + for (let mut p = getNext(prevp);; { + prevp = p + p = getNext(p) + }) { let size = getSize(p) if (size >= nbytes) { // If this block is big enough, allocate from it. diff --git a/stdlib/runtime/numberUtils.gr b/stdlib/runtime/numberUtils.gr index 2e3d478e8e..6b6ff4d42c 100644 --- a/stdlib/runtime/numberUtils.gr +++ b/stdlib/runtime/numberUtils.gr @@ -960,7 +960,7 @@ provide let utoa32Buffered = (buf, value, radix) => { from WasmI32 use { (>>>), (<), (>) } if (radix < 2n || radix > 36n) { throw Exception.InvalidArgument( - "toString() radix argument must be between 2 and 36" + "toString() radix argument must be between 2 and 36", ) } if (WasmI32.eqz(value)) { @@ -983,7 +983,7 @@ provide let utoa32 = (value, radix) => { from WasmI32 use { (>>>), (<), (>) } if (radix < 2n || radix > 36n) { throw Exception.InvalidArgument( - "toString() radix argument must be between 2 and 36" + "toString() radix argument must be between 2 and 36", ) } if (WasmI32.eqz(value)) { @@ -1013,7 +1013,7 @@ provide let itoa32 = (value, radix) => { let mut value = value if (radix < 2n || radix > 36n) { throw Exception.InvalidArgument( - "toString() radix argument must be between 2 and 36" + "toString() radix argument must be between 2 and 36", ) } let sign = value >>> 31n @@ -1048,7 +1048,7 @@ provide let utoa64 = (value, radix) => { from WasmI32 use { (>>>), (<), (>) } if (radix < 2n || radix > 36n) { throw Exception.InvalidArgument( - "toString() radix argument must be between 2 and 36" + "toString() radix argument must be between 2 and 36", ) } if (WasmI64.eqz(value)) { @@ -1085,7 +1085,7 @@ provide let itoa64 = (value, radix) => { from WasmI64 use { (>>>) } if (radix < 2n || radix > 36n) { throw Exception.InvalidArgument( - "toString() radix argument must be between 2 and 36" + "toString() radix argument must be between 2 and 36", ) } @@ -1168,8 +1168,10 @@ let grisuRound = (buffer, len, delta, rest, ten_kappa, wp_w) => { while ( WasmI64.ltU(rest, wp_w) && WasmI64.geU(delta - rest, ten_kappa) && - (WasmI64.ltU(rest + ten_kappa, wp_w) || - WasmI64.gtU(wp_w - rest, rest + ten_kappa - wp_w)) + ( + WasmI64.ltU(rest + ten_kappa, wp_w) || + WasmI64.gtU(wp_w - rest, rest + ten_kappa - wp_w) + ) ) { from WasmI32 use { (-) } digit -= 1n @@ -1272,41 +1274,40 @@ let genDigits = (buffer, w_frc, mp_frc, mp_exp, delta, sign) => { WasmI64.extendI32U( WasmI32.load(get_POWERS10() + shlWasmI32(kappa, 2n), 0n) ) << - WasmI64.extendI32U(one_exp), + WasmI64.extendI32U(one_exp), wp_w_frc ) done = true break } } - if (!done) - while (true) { - from WasmI64 use { (!=), (*), (|), (>>>) } - p2 *= 10N - delta *= 10N - - let d = p2 >>> WasmI64.extendI32U(one_exp) - if ((d | WasmI64.extendI32U(len)) != 0N) { - WasmI32.store8( - buffer + len, - _CHAR_CODE_0 + (WasmI32.wrapI64(d) & 0xffn), - 0n - ) - len += 1n - } - - p2 = andWasmI64(p2, mask) - kappa -= 1n - if (WasmI64.ltU(p2, delta)) { - from WasmI32 use { (<<) } - _K += kappa - wp_w_frc *= WasmI64.extendI32U( - WasmI32.load(get_POWERS10() + ((0n - kappa) << 2n), 0n) - ) - grisuRound(buffer, len, delta, p2, one_frc, wp_w_frc) - break - } + if (!done) while (true) { + from WasmI64 use { (!=), (*), (|), (>>>) } + p2 *= 10N + delta *= 10N + + let d = p2 >>> WasmI64.extendI32U(one_exp) + if ((d | WasmI64.extendI32U(len)) != 0N) { + WasmI32.store8( + buffer + len, + _CHAR_CODE_0 + (WasmI32.wrapI64(d) & 0xffn), + 0n + ) + len += 1n } + + p2 = andWasmI64(p2, mask) + kappa -= 1n + if (WasmI64.ltU(p2, delta)) { + from WasmI32 use { (<<) } + _K += kappa + wp_w_frc *= WasmI64.extendI32U( + WasmI32.load(get_POWERS10() + ((0n - kappa) << 2n), 0n) + ) + grisuRound(buffer, len, delta, p2, one_frc, wp_w_frc) + break + } + } len } @@ -1355,17 +1356,11 @@ let grisu2 = (value, buffer, sign) => { let _exp = exp_norm // get cached power - let c = WasmF64.reinterpretI64( - 0x3FD34413509F79FEN - ) // 1 / lg(10) = 0.30102999566398114 + let c = WasmF64.reinterpretI64(0x3FD34413509F79FEN) // 1 / lg(10) = 0.30102999566398114 from WasmF64 use { (+) as addWasmF64, (*) as mulWasmF64, (!=) as neWasmF64 } - let dk = addWasmF64( - mulWasmF64(WasmF64.convertI32S(-61n - _exp), c), - 347.0W - ) // dk must be positive, so can do ceiling in positive + let dk = addWasmF64(mulWasmF64(WasmF64.convertI32S(-61n - _exp), c), 347.0W) // dk must be positive, so can do ceiling in positive let mut k = WasmI32.truncF64S(dk) - k += if (neWasmF64(WasmF64.convertI32S(k), dk)) 1n - else 0n // conversion with ceil + k += if (neWasmF64(WasmF64.convertI32S(k), dk)) 1n else 0n // conversion with ceil let index = (k >> 3n) + 1n from WasmI32 use { (<<) } diff --git a/stdlib/runtime/numbers.gr b/stdlib/runtime/numbers.gr index 7cf980feb1..10d32bfd09 100644 --- a/stdlib/runtime/numbers.gr +++ b/stdlib/runtime/numbers.gr @@ -633,7 +633,6 @@ let isSafeIntegerF64 = value => { * NOTE: The preconditions in these functions are important, so do NOT * provide them! */ - @unsafe let numberEqualSimpleHelp = (x, y) => { // PRECONDITION: x is a "simple" number (value tag is 0) and x !== y and isNumber(y) @@ -897,9 +896,10 @@ let numberAddSubSimpleHelp = (x, y, isSub) => { let yDenominator = boxedRationalDenominator(y) let expandedXNumerator = BI.mul(xBig, yDenominator) Memory.decRef(xBig) - let result = - if (isSub) BI.sub(expandedXNumerator, yNumerator) - else BI.add(expandedXNumerator, yNumerator) + let result = if (isSub) + BI.sub(expandedXNumerator, yNumerator) + else + BI.add(expandedXNumerator, yNumerator) let ret = reducedFractionBigInt(result, yDenominator, false) Memory.decRef(expandedXNumerator) Memory.decRef(result) @@ -979,9 +979,10 @@ let numberAddSubInt64Help = (xval, y, isSub) => { let yDenominator = boxedRationalDenominator(y) let expandedXNumerator = BI.mul(xBig, yDenominator) Memory.decRef(xBig) - let result = - if (isSub) BI.sub(expandedXNumerator, yNumerator) - else BI.add(expandedXNumerator, yNumerator) + let result = if (isSub) + BI.sub(expandedXNumerator, yNumerator) + else + BI.add(expandedXNumerator, yNumerator) let ret = reducedFractionBigInt(result, yDenominator, false) Memory.decRef(expandedXNumerator) Memory.decRef(result) @@ -1049,9 +1050,10 @@ let numberAddSubBigIntHelp = (x, y, isSub) => { let yNumerator = boxedRationalNumerator(y) let yDenominator = boxedRationalDenominator(y) let expandedXNumerator = BI.mul(x, yDenominator) - let result = - if (isSub) BI.sub(expandedXNumerator, yNumerator) - else BI.add(expandedXNumerator, yNumerator) + let result = if (isSub) + BI.sub(expandedXNumerator, yNumerator) + else + BI.add(expandedXNumerator, yNumerator) Memory.decRef(expandedXNumerator) let ret = reducedFractionBigInt(result, yDenominator, false) Memory.decRef(result) @@ -1078,18 +1080,20 @@ provide let addSubRational = (x, y, isSub, keepRational) => { let yNumerator = boxedRationalNumerator(y) let yDenominator = boxedRationalDenominator(y) if (BI.eq(xDenominator, yDenominator)) { - let newNumerator = - if (isSub) BI.sub(xNumerator, yNumerator) - else BI.add(xNumerator, yNumerator) + let newNumerator = if (isSub) + BI.sub(xNumerator, yNumerator) + else + BI.add(xNumerator, yNumerator) let ret = reducedFractionBigInt(newNumerator, xDenominator, keepRational) Memory.decRef(newNumerator) ret } else { let numerator1 = BI.mul(xNumerator, yDenominator) let numerator2 = BI.mul(yNumerator, xDenominator) - let numerator = - if (isSub) BI.sub(numerator1, numerator2) - else BI.add(numerator1, numerator2) + let numerator = if (isSub) + BI.sub(numerator1, numerator2) + else + BI.add(numerator1, numerator2) let denominator = BI.mul(xDenominator, yDenominator) let ret = reducedFractionBigInt(numerator, denominator, keepRational) Memory.decRef(numerator1) @@ -1107,12 +1111,14 @@ provide let timesDivideRational = (x, y, isDivide, keepRational) => { let yDenominator = boxedRationalDenominator(y) // (a / b) * (c / d) == (a * c) / (b * d) // (a / b) / (c / d) == (a * d) / (b * c) - let numerator = - if (isDivide) BI.mul(xNumerator, yDenominator) - else BI.mul(xNumerator, yNumerator) - let denominator = - if (isDivide) BI.mul(xDenominator, yNumerator) - else BI.mul(xDenominator, yDenominator) + let numerator = if (isDivide) + BI.mul(xNumerator, yDenominator) + else + BI.mul(xNumerator, yNumerator) + let denominator = if (isDivide) + BI.mul(xDenominator, yNumerator) + else + BI.mul(xDenominator, yDenominator) reducedFractionBigInt(numerator, denominator, keepRational) } @@ -1189,9 +1195,10 @@ let numberAddSubRationalHelp = (x, y, isSub) => { let yval = untagSimple(y) let yBig = BI.makeWrappedInt32(yval) let expandedYNumerator = BI.mul(xDenominator, yBig) - let result = - if (isSub) BI.sub(xNumerator, expandedYNumerator) - else BI.add(xNumerator, expandedYNumerator) + let result = if (isSub) + BI.sub(xNumerator, expandedYNumerator) + else + BI.add(xNumerator, expandedYNumerator) Memory.decRef(expandedYNumerator) Memory.decRef(yBig) let ret = reducedFractionBigInt(result, xDenominator, false) @@ -1208,9 +1215,10 @@ let numberAddSubRationalHelp = (x, y, isSub) => { let yBig = BI.makeWrappedInt64(boxedInt64Number(y)) let expandedYNumerator = BI.mul(yBig, xDenominator) Memory.decRef(yBig) - let result = - if (isSub) BI.sub(xNumerator, expandedYNumerator) - else BI.add(xNumerator, expandedYNumerator) + let result = if (isSub) + BI.sub(xNumerator, expandedYNumerator) + else + BI.add(xNumerator, expandedYNumerator) let ret = reducedFractionBigInt(result, xDenominator, false) Memory.decRef(expandedYNumerator) Memory.decRef(result) @@ -1218,9 +1226,10 @@ let numberAddSubRationalHelp = (x, y, isSub) => { }, t when t == Tags._GRAIN_BIGINT_BOXED_NUM_TAG => { let expandedYNumerator = BI.mul(xDenominator, y) - let result = - if (isSub) BI.sub(xNumerator, expandedYNumerator) - else BI.add(xNumerator, expandedYNumerator) + let result = if (isSub) + BI.sub(xNumerator, expandedYNumerator) + else + BI.add(xNumerator, expandedYNumerator) Memory.decRef(expandedYNumerator) let ret = reducedFractionBigInt(result, xDenominator, false) Memory.decRef(result) @@ -1597,9 +1606,7 @@ let numberMod = (x, y) => { let xval = coerceNumberToWasmF64(WasmI32.toGrain(x): Number) let yval = coerceNumberToWasmF64(WasmI32.toGrain(y): Number) let yInfinite = yval == InfinityW || yval == -InfinityW - if ( - yval == 0.0W || yInfinite && (xval == InfinityW || xval == -InfinityW) - ) { + if (yval == 0.0W || yInfinite && (xval == InfinityW || xval == -InfinityW)) { newFloat64(NaNW) } else if (yInfinite) { newFloat64(xval) @@ -1695,7 +1702,13 @@ let cmpFloat = (x: WasmI32, y: WasmI32) => { 1n } } else { - if (xf < yf) -1n else if (xf > yf) 1n else 0n + if (xf < yf) { + -1n + } else if (xf > yf) { + 1n + } else { + 0n + } } } else { let yBoxedNumberTag = boxedNumberTag(y) @@ -1734,7 +1747,13 @@ let cmpFloat = (x: WasmI32, y: WasmI32) => { 1n } } else { - if (xf < yf) -1n else if (xf > yf) 1n else 0n + if (xf < yf) { + -1n + } else if (xf > yf) { + 1n + } else { + 0n + } } } } @@ -1747,13 +1766,25 @@ let cmpSmallInt = (x: WasmI32, y: WasmI32) => { let xi = boxedInt64Number(x) if (isSimpleNumber(y)) { let yi = WasmI64.extendI32S(untagSimple(y)) - if (xi < yi) -1n else if (xi > yi) 1n else 0n + if (xi < yi) { + -1n + } else if (xi > yi) { + 1n + } else { + 0n + } } else { let yBoxedNumberTag = boxedNumberTag(y) match (yBoxedNumberTag) { t when t == Tags._GRAIN_INT64_BOXED_NUM_TAG => { let yi = boxedInt64Number(y) - if (xi < yi) -1n else if (xi > yi) 1n else 0n + if (xi < yi) { + -1n + } else if (xi > yi) { + 1n + } else { + 0n + } }, t when t == Tags._GRAIN_BIGINT_BOXED_NUM_TAG => { 0n - cmpBigInt(y, x) @@ -1764,8 +1795,11 @@ let cmpSmallInt = (x: WasmI32, y: WasmI32) => { if ( WasmF64.convertI64S(xi) < BI.toFloat64(boxedRationalNumerator(y)) / - BI.toFloat64(boxedRationalDenominator(y)) - ) -1n else 1n + BI.toFloat64(boxedRationalDenominator(y)) + ) + -1n + else + 1n }, t when t == Tags._GRAIN_FLOAT64_BOXED_NUM_TAG => { 0n - cmpFloat(y, x) @@ -2054,9 +2088,8 @@ provide let (&) = (value1: Number, value2: Number) => { if (isBigInt(xw32) || isBigInt(yw32)) { let xval = coerceNumberToBigInt(value1) let yval = coerceNumberToBigInt(value2) - let ret = WasmI32.toGrain( - reducedBigInteger(BI.bitwiseAnd(xval, yval)) - ): Number + let ret = WasmI32.toGrain(reducedBigInteger(BI.bitwiseAnd(xval, yval))): + Number if (!(xw32 == xval)) { Memory.decRef(xval) void @@ -2092,9 +2125,8 @@ provide let (|) = (value1: Number, value2: Number) => { if (isBigInt(xw32) || isBigInt(yw32)) { let xval = coerceNumberToBigInt(value1) let yval = coerceNumberToBigInt(value2) - let ret = WasmI32.toGrain( - reducedBigInteger(BI.bitwiseOr(xval, yval)) - ): Number + let ret = WasmI32.toGrain(reducedBigInteger(BI.bitwiseOr(xval, yval))): + Number if (!(xw32 == xval)) { Memory.decRef(xval) void @@ -2131,9 +2163,8 @@ provide let (^) = (value1: Number, value2: Number) => { if (isBigInt(xw32) || isBigInt(yw32)) { let xval = coerceNumberToBigInt(value1) let yval = coerceNumberToBigInt(value2) - let ret = WasmI32.toGrain( - reducedBigInteger(BI.bitwiseXor(xval, yval)) - ): Number + let ret = WasmI32.toGrain(reducedBigInteger(BI.bitwiseXor(xval, yval))): + Number if (!(xw32 == xval)) { Memory.decRef(xval) void @@ -2215,15 +2246,7 @@ let coerceNumberToShortUint = (x: Number, max32, max64, is8bit) => { } @unsafe -let coerceNumberToShortInt = - ( - x: Number, - min32, - max32, - min64, - max64, - is8bit, - ) => { +let coerceNumberToShortInt = (x: Number, min32, max32, min64, max64, is8bit) => { from WasmI32 use { (<), (>) } let x = WasmI32.fromGrain(x) let int32 = if (isSimpleNumber(x)) { @@ -2350,7 +2373,8 @@ provide let coerceNumberToInt32 = (number: Number) => { provide let coerceNumberToInt64 = (number: Number) => { let x = WasmI32.fromGrain(number) let result = if ( - !isSimpleNumber(x) && boxedNumberTag(x) == Tags._GRAIN_INT64_BOXED_NUM_TAG + !isSimpleNumber(x) && + boxedNumberTag(x) == Tags._GRAIN_INT64_BOXED_NUM_TAG ) { // avoid extra malloc and prevent x from being freed Memory.incRef(x) @@ -2435,7 +2459,8 @@ provide let coerceNumberToFloat32 = (number: Number) => { provide let coerceNumberToFloat64 = (number: Number) => { let x = WasmI32.fromGrain(number) let result = if ( - !isSimpleNumber(x) && boxedNumberTag(x) == Tags._GRAIN_FLOAT64_BOXED_NUM_TAG + !isSimpleNumber(x) && + boxedNumberTag(x) == Tags._GRAIN_FLOAT64_BOXED_NUM_TAG ) { // avoid extra malloc and prevent x from being freed Memory.incRef(x) @@ -2529,9 +2554,8 @@ provide let coerceInt32ToNumber = (value: Int32) => { */ @unsafe provide let coerceInt64ToNumber = (value: Int64) => { - WasmI32.toGrain( - reducedInteger(boxedInt64Number(WasmI32.fromGrain(value))) - ): Number + WasmI32.toGrain(reducedInteger(boxedInt64Number(WasmI32.fromGrain(value)))): + Number } /** @@ -2853,8 +2877,10 @@ provide let (**) = (base, power) => { if (base == 1 && power != 0) { return 1 } else if (isInteger(basePtr) && isInteger(powerPtr)) { - if (power < 0) return expBySquaring(1, 1 / base, power * -1) - else return expBySquaring(1, base, power) + if (power < 0) + return expBySquaring(1, 1 / base, power * -1) + else + return expBySquaring(1, base, power) } else if (isRational(basePtr) && isInteger(powerPtr)) { // Apply expBySquaring to numerator and denominator let numerator = WasmI32.fromGrain(base) @@ -2865,12 +2891,14 @@ provide let (**) = (base, power) => { Memory.incRef(denominator) let denominator = WasmI32.toGrain(denominator): Rational let denominator = rationalDenominator(denominator) - let numerator = - if (power < 0) expBySquaring(1, 1 / numerator, power * -1) - else expBySquaring(1, numerator, power) - let denominator = - if (power < 0) expBySquaring(1, 1 / denominator, power * -1) - else expBySquaring(1, denominator, power) + let numerator = if (power < 0) + expBySquaring(1, 1 / numerator, power * -1) + else + expBySquaring(1, numerator, power) + let denominator = if (power < 0) + expBySquaring(1, 1 / denominator, power * -1) + else + expBySquaring(1, denominator, power) return numerator / denominator } else { // Based on https://git.musl-libc.org/cgit/musl/tree/src/math/pow.c @@ -2883,9 +2911,8 @@ provide let (**) = (base, power) => { return WasmI32.toGrain(newFloat64(x * x)): Number } else if (y == 0.5W) { if (x != InfinityW) { - return WasmI32.toGrain( - newFloat64(WasmF64.abs(WasmF64.sqrt(x))) - ): Number + return WasmI32.toGrain(newFloat64(WasmF64.abs(WasmF64.sqrt(x)))): + Number } else { return Infinity } @@ -2985,15 +3012,21 @@ provide let (**) = (base, power) => { if ((ix - 0x3FF00000n | lx) == 0n) { // C: (-1)**+-inf is 1, JS: NaN return NaN } else if (ix >= 0x3FF00000n) { // (|x|>1)**+-inf = inf,0 - if (hy >= 0n) return WasmI32.toGrain(newFloat64(y)): Number - else return 0.0 + if (hy >= 0n) + return WasmI32.toGrain(newFloat64(y)): Number + else + return 0.0 } else { // (|x|<1)**+-inf = 0,inf - if (hy >= 0n) return 0.0 - else return WasmI32.toGrain(newFloat64(y * -1.0W)): Number + if (hy >= 0n) + return 0.0 + else + return WasmI32.toGrain(newFloat64(y * -1.0W)): Number } } else if (iy == 0x3FF00000n) { - if (hy >= 0n) return WasmI32.toGrain(newFloat64(x)): Number - else return WasmI32.toGrain(newFloat64(1.0W / x)): Number + if (hy >= 0n) + return WasmI32.toGrain(newFloat64(x)): Number + else + return WasmI32.toGrain(newFloat64(1.0W / x)): Number } else if (hy == 0x3FE00000n) { return WasmI32.toGrain(newFloat64(x * x)): Number } else if (hy == 0x3FE00000n) { @@ -3012,7 +3045,9 @@ provide let (**) = (base, power) => { from WasmF64 use { (-) } let d = z - z z = d / d - } else if (yisint == 1n) z *= -1.0W + } else if (yisint == 1n) { + z *= -1.0W + } } return WasmI32.toGrain(newFloat64(z)): Number } @@ -3020,7 +3055,9 @@ provide let (**) = (base, power) => { if (hx < 0n) { if (yisint == 0n) { return NaN - } else if (yisint == 1n) s = -1.0W + } else if (yisint == 1n) { + s = -1.0W + } } let mut t1 = 0.0W and t2 = 0.0W @@ -3112,7 +3149,7 @@ provide let (**) = (base, power) => { WasmI64.extendI32S( (ix >> 1n | 0x20000000n) + 0x00080000n + shlWasmI64(k, 18n) ) << - 32N + 32N ) from WasmF64 use { (+) } t_l = ax - (t_h - bp) @@ -3177,13 +3214,13 @@ provide let (**) = (base, power) => { n = 0n if (i > 0x3FE00000n) { from WasmI64 use { (<<) } - n = j + (0x00100000n >> k + 1n) + n = j + (0x00100000n >> (k + 1n)) k = ((n & 0x7FFFFFFFn) >> 20n) - 0x3FFn t = 0.0W t = WasmF64.reinterpretI64( WasmI64.extendI32S(n & (0x000FFFFFn >> k ^ -1n)) << 32N ) - n = (n & 0x000FFFFFn | 0x00100000n) >> 20n - k + n = (n & 0x000FFFFFn | 0x00100000n) >> (20n - k) if (j < 0n) n *= -1n from WasmF64 use { (-) } p_h -= t diff --git a/stdlib/runtime/string.gr b/stdlib/runtime/string.gr index 34cc489cdb..1f03c94783 100644 --- a/stdlib/runtime/string.gr +++ b/stdlib/runtime/string.gr @@ -31,12 +31,8 @@ include "runtime/numberUtils" as NumberUtils include "runtime/dataStructures" as DataStructures from DataStructures use { allocateString, allocateArray, untagSimpleNumber } -foreign wasm fd_write: ( - WasmI32, - WasmI32, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" +foreign wasm fd_write: + (WasmI32, WasmI32, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" primitive (!) = "@not" primitive (&&) = "@and" @@ -411,12 +407,12 @@ let usvToString = usv => { offset = 0xF0n } let string = allocateString(count + 1n) - WasmI32.store8(string, (usv >>> 6n * count) + offset, 8n) + WasmI32.store8(string, (usv >>> (6n * count)) + offset, 8n) let mut n = 0n while (count > 0n) { n += 1n - let temp = usv >>> 6n * (count - 1n) + let temp = usv >>> (6n * (count - 1n)) WasmI32.store8(string + n, 0x80n | temp & 0x3Fn, 8n) count -= 1n } @@ -782,25 +778,20 @@ and tupleVariantToString = (ptr, variantName, extraIndents, cycles) => { join(strings) } } -and recordToString = - ( - ptr, - recordArity, - fields, - contentOffset, - extraIndents, - cycles, - ) => { +and recordToString = ( + ptr, + recordArity, + fields, + contentOffset, + extraIndents, + cycles, +) => { let prevPadAmount = extraIndents * 2n let prevSpacePadding = if (prevPadAmount == 0n) { "" } else { let v = allocateString(prevPadAmount) - Memory.fill( - v + 8n, - 0x20n, - prevPadAmount - ) // create indentation for closing brace + Memory.fill(v + 8n, 0x20n, prevPadAmount) // create indentation for closing brace WasmI32.toGrain(v): String } let padAmount = (extraIndents + 1n) * 2n diff --git a/stdlib/runtime/utils/printing.gr b/stdlib/runtime/utils/printing.gr index 6a790dd65f..4628e314dc 100644 --- a/stdlib/runtime/utils/printing.gr +++ b/stdlib/runtime/utils/printing.gr @@ -6,12 +6,8 @@ include "runtime/unsafe/wasmi32" include "runtime/unsafe/memory" include "runtime/numberUtils" -foreign wasm fd_write: ( - WasmI32, - WasmI32, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" +foreign wasm fd_write: + (WasmI32, WasmI32, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" @unsafe provide let numberToString = (n: WasmI64) => { diff --git a/stdlib/runtime/wasi.gr b/stdlib/runtime/wasi.gr index 8882857b2d..aa86fb4834 100644 --- a/stdlib/runtime/wasi.gr +++ b/stdlib/runtime/wasi.gr @@ -5,75 +5,50 @@ include "runtime/unsafe/wasmi32" include "exception" // env -provide foreign wasm args_get: ( - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm args_sizes_get: ( - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm environ_get: ( - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm environ_sizes_get: ( - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm args_get: + (WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm args_sizes_get: + (WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm environ_get: + (WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm environ_sizes_get: + (WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" // proc provide foreign wasm proc_exit: WasmI32 => Void from "wasi_snapshot_preview1" -provide foreign wasm proc_raise: WasmI32 => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm proc_raise: + WasmI32 => WasmI32 from "wasi_snapshot_preview1" provide foreign wasm sched_yield: () => WasmI32 from "wasi_snapshot_preview1" // random -provide foreign wasm random_get: ( - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm random_get: + (WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" // time -provide foreign wasm clock_time_get: ( - WasmI32, - WasmI64, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm clock_time_get: + (WasmI32, WasmI64, WasmI32) => WasmI32 from "wasi_snapshot_preview1" // file -provide foreign wasm path_open: ( - WasmI32, - WasmI32, - WasmI32, - WasmI32, - WasmI32, - WasmI64, - WasmI64, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_read: ( - WasmI32, - WasmI32, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_pread: ( - WasmI32, - WasmI32, - WasmI32, - WasmI64, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_prestat_get: ( - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_prestat_dir_name: ( - WasmI32, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm path_open: + ( + WasmI32, + WasmI32, + WasmI32, + WasmI32, + WasmI32, + WasmI64, + WasmI64, + WasmI32, + WasmI32, + ) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_read: + (WasmI32, WasmI32, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_pread: + (WasmI32, WasmI32, WasmI32, WasmI64, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_prestat_get: + (WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_prestat_dir_name: + (WasmI32, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" /** * Invokes the `fd_write` system call. * @@ -83,138 +58,54 @@ provide foreign wasm fd_prestat_dir_name: ( * @param 3: Where to store the number of bytes written * @returns The number of bytes written */ -provide foreign wasm fd_write: ( - WasmI32, - WasmI32, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_pwrite: ( - WasmI32, - WasmI32, - WasmI32, - WasmI64, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_allocate: ( - WasmI32, - WasmI64, - WasmI64, -) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_write: + (WasmI32, WasmI32, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_pwrite: + (WasmI32, WasmI32, WasmI32, WasmI64, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_allocate: + (WasmI32, WasmI64, WasmI64) => WasmI32 from "wasi_snapshot_preview1" provide foreign wasm fd_close: WasmI32 => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_datasync: WasmI32 => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_datasync: + WasmI32 => WasmI32 from "wasi_snapshot_preview1" provide foreign wasm fd_sync: WasmI32 => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_fdstat_get: ( - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_fdstat_set_flags: ( - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_fdstat_set_rights: ( - WasmI32, - WasmI64, - WasmI64, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_filestat_get: ( - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_filestat_set_size: ( - WasmI32, - WasmI64, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_filestat_set_times: ( - WasmI32, - WasmI64, - WasmI64, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_readdir: ( - WasmI32, - WasmI32, - WasmI32, - WasmI64, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_renumber: ( - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_seek: ( - WasmI32, - WasmI64, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm fd_tell: ( - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm path_create_directory: ( - WasmI32, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm path_filestat_get: ( - WasmI32, - WasmI32, - WasmI32, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm path_filestat_set_times: ( - WasmI32, - WasmI32, - WasmI32, - WasmI32, - WasmI64, - WasmI64, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm path_link: ( - WasmI32, - WasmI32, - WasmI32, - WasmI32, - WasmI32, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm path_symlink: ( - WasmI32, - WasmI32, - WasmI32, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm path_unlink_file: ( - WasmI32, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm path_readlink: ( - WasmI32, - WasmI32, - WasmI32, - WasmI32, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm path_remove_directory: ( - WasmI32, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" -provide foreign wasm path_rename: ( - WasmI32, - WasmI32, - WasmI32, - WasmI32, - WasmI32, - WasmI32, -) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_fdstat_get: + (WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_fdstat_set_flags: + (WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_fdstat_set_rights: + (WasmI32, WasmI64, WasmI64) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_filestat_get: + (WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_filestat_set_size: + (WasmI32, WasmI64) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_filestat_set_times: + (WasmI32, WasmI64, WasmI64, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_readdir: + (WasmI32, WasmI32, WasmI32, WasmI64, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_renumber: + (WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_seek: + (WasmI32, WasmI64, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm fd_tell: + (WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm path_create_directory: + (WasmI32, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm path_filestat_get: + (WasmI32, WasmI32, WasmI32, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm path_filestat_set_times: + (WasmI32, WasmI32, WasmI32, WasmI32, WasmI64, WasmI64, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm path_link: + (WasmI32, WasmI32, WasmI32, WasmI32, WasmI32, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm path_symlink: + (WasmI32, WasmI32, WasmI32, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm path_unlink_file: + (WasmI32, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm path_readlink: + (WasmI32, WasmI32, WasmI32, WasmI32, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm path_remove_directory: + (WasmI32, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" +provide foreign wasm path_rename: + (WasmI32, WasmI32, WasmI32, WasmI32, WasmI32, WasmI32) => WasmI32 from "wasi_snapshot_preview1" // clocks provide let _CLOCK_REALTIME = 0n diff --git a/stdlib/set.gr b/stdlib/set.gr index 435ca4317f..71055202f5 100644 --- a/stdlib/set.gr +++ b/stdlib/set.gr @@ -6,7 +6,6 @@ * * @since v0.3.0 */ - module Set include "list" @@ -499,7 +498,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let empty = Empty // returns the minimum value in a tree @@ -520,7 +518,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let size = set => { match (set) { Empty => 0, @@ -537,7 +534,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let isEmpty = set => { match (set) { Empty => true, @@ -623,7 +619,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let rec add = (key, set) => { match (set) { Empty => Tree({ key, size: 1, left: Empty, right: Empty }), @@ -647,7 +642,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let rec contains = (key, set) => { match (set) { Empty => false, @@ -693,7 +687,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let rec remove = (key, set) => { match (set) { Empty => Empty, @@ -716,7 +709,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let forEach = (fn, set) => { let rec forEachInner = node => { match (node) { @@ -742,7 +734,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let reduce = (fn, init, set) => { let rec reduceInner = (acc, node) => { match (node) { @@ -825,7 +816,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let filter = (fn, set) => { let rec filterInner = node => { match (node) { @@ -852,7 +842,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let reject = (fn, set) => { filter(key => !fn(key), set) } @@ -867,7 +856,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let rec union = (set1, set2) => { match ((set1, set2)) { (Empty, node) | (node, Empty) => node, @@ -889,7 +877,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let diff = (set1, set2) => { let rec diffInner = (node1, node2) => { match ((node1, node2)) { @@ -914,7 +901,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let rec intersect = (set1, set2) => { match ((set1, set2)) { (Empty, _) | (_, Empty) => Empty, @@ -939,7 +925,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let fromList = list => { List.reduce((set, key) => add(key, set), empty, list) } @@ -953,7 +938,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let toList = set => { let rec toListInner = (acc, node) => { match (node) { @@ -975,7 +959,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let fromArray = array => { Array.reduce((set, key) => add(key, set), empty, array) } @@ -989,7 +972,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally in `"immutableset"` module */ - provide let toArray = set => { Array.fromList(toList(set)) } diff --git a/stdlib/stack.gr b/stdlib/stack.gr index c8fa18ae1f..98cd327d11 100644 --- a/stdlib/stack.gr +++ b/stdlib/stack.gr @@ -9,7 +9,6 @@ * * @since v0.3.0 */ - module Stack include "list" @@ -142,7 +141,6 @@ provide module Immutable { /** * ImmutableStacks are immutable data structures that store their data in a List. */ - abstract record ImmutableStack { data: List, } @@ -153,7 +151,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.5.4: Originally a module root API */ - provide let empty = { let empty = { data: [], } empty @@ -168,7 +165,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.3.0: Originally a module root API */ - provide let isEmpty = stack => { match (stack) { { data: [] } => true, @@ -186,7 +182,6 @@ provide module Immutable { * @history v0.3.0: Originally a module root API * @history v0.3.1: Rename from `head` to `peek` */ - provide let peek = stack => { match (stack) { { data: [] } => None, @@ -204,7 +199,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.3.0: Originally a module root API */ - provide let push = (value, stack) => { match (stack) { { data: [] } => { data: [value], }, @@ -221,7 +215,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.3.0: Originally a module root API */ - provide let pop = stack => { match (stack) { { data: [] } => stack, @@ -238,7 +231,6 @@ provide module Immutable { * @since v0.6.0 * @history v0.3.2: Originally a module root API */ - provide let size = stack => { match (stack) { { data: [] } => 0, diff --git a/stdlib/string.gr b/stdlib/string.gr index b10c022292..d97fdcdbeb 100644 --- a/stdlib/string.gr +++ b/stdlib/string.gr @@ -7,7 +7,6 @@ * @history v0.1.0: Originally named `strings` * @history v0.2.0: Renamed to `string` */ - module String include "char" @@ -459,11 +458,11 @@ provide let implode = (arr: Array) => { count = 3n marker = 0xF0n } - WasmI32.store8(str + offset, (usv >>> 6n * count) + marker, 0n) + WasmI32.store8(str + offset, (usv >>> (6n * count)) + marker, 0n) offset += 1n while (count > 0n) { - let temp = usv >>> 6n * (count - 1n) + let temp = usv >>> (6n * (count - 1n)) WasmI32.store8(str + offset, 0x80n | temp & 0x3Fn, 0n) count -= 1n offset += 1n @@ -852,12 +851,11 @@ provide let endsWith = (search: String, string: String) => { * @since v0.5.4 */ @unsafe -provide let replaceFirst = - ( - searchPattern: String, - replacement: String, - string: String, - ) => { +provide let replaceFirst = ( + searchPattern: String, + replacement: String, + string: String, +) => { from WasmI32 use { (+), (-), gtU as (>), ltU as (<), (==) } let mut patternPtr = WasmI32.fromGrain(searchPattern) @@ -913,12 +911,11 @@ provide let replaceFirst = * @since v0.5.4 */ @unsafe -provide let replaceLast = - ( - searchPattern: String, - replacement: String, - string: String, - ) => { +provide let replaceLast = ( + searchPattern: String, + replacement: String, + string: String, +) => { from WasmI32 use { (+), (-), gtU as (>), ltU as (<), (==) } let mut patternPtr = WasmI32.fromGrain(searchPattern) @@ -976,12 +973,11 @@ provide let replaceLast = * @since v0.5.4 */ @unsafe -provide let replaceAll = - ( - searchPattern: String, - replacement: String, - string: String, - ) => { +provide let replaceAll = ( + searchPattern: String, + replacement: String, + string: String, +) => { from WasmI32 use { (+), (-), (*), (>>), gtU as (>), ltU as (<), (==) } let mut patternPtr = WasmI32.fromGrain(searchPattern) @@ -1124,14 +1120,13 @@ let mut _BYTES_SIZE_OFFSET = 4n let mut _BYTES_OFFSET = 8n @unsafe -let encodeAtHelp = - ( - string: String, - encoding: Encoding, - includeBom: Bool, - dest: Bytes, - destPos: Number, - ) => { +let encodeAtHelp = ( + string: String, + encoding: Encoding, + includeBom: Bool, + dest: Bytes, + destPos: Number, +) => { from WasmI32 use { (+), (-), (&), (>>>), ltU as (<), (>), leU as (<=), (==) } let byteSize = WasmI32.fromGrain(byteLength(string)) >>> 1n let len = WasmI32.fromGrain(length(string)) >>> 1n @@ -1253,10 +1248,8 @@ let encodeAtHelp = throw IndexOutOfBounds } let uPrime = codePoint - 0x10000n - let w1 = ((uPrime & 0b11111111110000000000n) >>> 10n) + - 0xD800n // High surrogate - let w2 = (uPrime & 0b00000000001111111111n) + - 0xDC00n // Low surrogate + let w1 = ((uPrime & 0b11111111110000000000n) >>> 10n) + 0xD800n // High surrogate + let w2 = (uPrime & 0b00000000001111111111n) + 0xDC00n // Low surrogate WasmI32.store8( bytes + bytesIdx, (w1 & 0xff00n) >>> 8n, @@ -1292,10 +1285,8 @@ let encodeAtHelp = throw IndexOutOfBounds } let uPrime = codePoint - 0x10000n - let w1 = ((uPrime & 0b11111111110000000000n) >>> 10n) + - 0xD800n // High surrogate - let w2 = (uPrime & 0b00000000001111111111n) + - 0xDC00n // Low surrogate + let w1 = ((uPrime & 0b11111111110000000000n) >>> 10n) + 0xD800n // High surrogate + let w2 = (uPrime & 0b00000000001111111111n) + 0xDC00n // Low surrogate WasmI32.store8(bytes + bytesIdx, w1 & 0xffn, _BYTES_OFFSET) WasmI32.store8( bytes + bytesIdx + 1n, @@ -1392,18 +1383,17 @@ provide let encodeAt = (string, encoding, dest, destPos, includeBom=false) => { @unsafe let encodeHelp = (string: String, encoding: Encoding, includeBom: Bool) => { - let size = encodedLength(string, encoding) + - (if (includeBom) { - match (encoding) { - UTF8 => 3, - UTF16_LE => 2, - UTF16_BE => 2, - UTF32_LE => 4, - UTF32_BE => 4, - } - } else { - 0 - }) + let size = encodedLength(string, encoding) + (if (includeBom) { + match (encoding) { + UTF8 => 3, + UTF16_LE => 2, + UTF16_BE => 2, + UTF32_LE => 4, + UTF32_BE => 4, + } + } else { + 0 + }) from WasmI32 use { (>>>) } let bytes = WasmI32.toGrain(allocateBytes(WasmI32.fromGrain(size) >>> 1n)) encodeAtHelp(string, encoding, includeBom, bytes, 0) @@ -1517,13 +1507,12 @@ let bytesHaveBom = (bytes: Bytes, encoding: Encoding, start: WasmI32) => { } @unsafe -let decodedLength = - ( - bytes: Bytes, - encoding: Encoding, - start: WasmI32, - size: WasmI32, - ) => { +let decodedLength = ( + bytes: Bytes, + encoding: Encoding, + start: WasmI32, + size: WasmI32, +) => { from WasmI32 use { (+), (-), @@ -1688,14 +1677,13 @@ let decodedLength = } @unsafe -let decodeRangeHelp = - ( - bytes: Bytes, - encoding: Encoding, - skipBom: Bool, - start: Number, - size: Number, - ) => { +let decodeRangeHelp = ( + bytes: Bytes, + encoding: Encoding, + skipBom: Bool, + start: Number, + size: Number, +) => { from WasmI32 use { (+), (-), @@ -1751,8 +1739,10 @@ let decodeRangeHelp = let codeWord = if (w1 >= 0xD800n && w1 <= 0xDBFFn) { // high surrogate. next character is low srurrogate let w1 = (w1 & 0x03FFn) << 10n - let w2 = (WasmI32.load8U(bytesPtr, 2n) << 8n | - WasmI32.load8U(bytesPtr, 3n)) & + let w2 = ( + WasmI32.load8U(bytesPtr, 2n) << 8n | + WasmI32.load8U(bytesPtr, 3n) + ) & 0x03FFn let codeWord = w1 + w2 + 0x10000n // no problems, so go past both code words @@ -1774,8 +1764,10 @@ let decodeRangeHelp = let codeWord = if (w1 >= 0xD800n && w1 <= 0xDBFFn) { // high surrogate. next character is low srurrogate let w1 = (w1 & 0x03FFn) << 10n - let w2 = (WasmI32.load8U(bytesPtr, 3n) << 8n | - WasmI32.load8U(bytesPtr, 2n)) & + let w2 = ( + WasmI32.load8U(bytesPtr, 3n) << 8n | + WasmI32.load8U(bytesPtr, 2n) + ) & 0x03FFn //let uPrime = codePoint - 0x10000n //let w1 = ((uPrime & 0b11111111110000000000n) >>> 10n) + 0xD800n // High surrogate @@ -1836,14 +1828,13 @@ let decodeRangeHelp = * @since v0.4.0 * @history v0.6.0: Added `keepBom` default argument */ -provide let decodeRange = - ( - bytes: Bytes, - encoding: Encoding, - start: Number, - size: Number, - keepBom=false, - ) => { +provide let decodeRange = ( + bytes: Bytes, + encoding: Encoding, + start: Number, + size: Number, + keepBom=false, +) => { decodeRangeHelp(bytes, encoding, !keepBom, start, size) } diff --git a/stdlib/sys/file.gr b/stdlib/sys/file.gr index b4a8056e4b..dcfa210bbb 100644 --- a/stdlib/sys/file.gr +++ b/stdlib/sys/file.gr @@ -5,7 +5,6 @@ * * @example include "sys/file" */ - module File include "runtime/unsafe/wasmi32" @@ -526,10 +525,7 @@ provide record DirectoryEntry { * Information about a preopened directory */ provide enum Prestat { - Dir{ - prefix: String, - fd: FileDescriptor, - }, + Dir{ prefix: String, fd: FileDescriptor }, } /** @@ -558,20 +554,21 @@ provide let stderr = FileDescriptor(2) * @returns `Ok(fd)` of the opened file or directory if successful or `Err(exception)` otherwise */ @unsafe -provide let pathOpen = - ( - dirFd: FileDescriptor, - dirFlags: List, - path: String, - openFlags: List, - rights: List, - rightsInheriting: List, - flags: List, - ) => { +provide let pathOpen = ( + dirFd: FileDescriptor, + dirFlags: List, + path: String, + openFlags: List, + rights: List, + rightsInheriting: List, + flags: List, +) => { let dirFdArg = dirFd let pathArg = path let rightsInheritingArg = rightsInheriting - let dirFd = match (dirFd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let dirFd = match (dirFd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let combinedDirFlags = combineLookupFlags(dirFlags) @@ -620,7 +617,9 @@ provide let pathOpen = @unsafe provide let fdRead = (fd: FileDescriptor, size: Number) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let n = WasmI32.fromGrain(size) >> 1n @@ -657,7 +656,9 @@ provide let fdRead = (fd: FileDescriptor, size: Number) => { provide let fdPread = (fd: FileDescriptor, offset: Int64, size: Number) => { let fdArg = fd let offsetArg = offset - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let offset = WasmI64.load(WasmI32.fromGrain(offset), 8n) @@ -695,7 +696,9 @@ provide let fdPread = (fd: FileDescriptor, offset: Int64, size: Number) => { @unsafe provide let fdPrestatGet = (fd: FileDescriptor) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let dir = Memory.malloc(8n) let err = Wasi.fd_prestat_get(fd, dir) @@ -715,7 +718,8 @@ provide let fdPrestatGet = (fd: FileDescriptor) => { } if ( - prefixLen > 0n && WasmI32.eqz(WasmI32.load8U(prefix + prefixLen - 1n, 8n)) + prefixLen > 0n && + WasmI32.eqz(WasmI32.load8U(prefix + prefixLen - 1n, 8n)) ) { // in uvwasi environments the string is null-terminated and the size is reported including it // https://github.com/grain-lang/grain/issues/1818 @@ -738,7 +742,9 @@ provide let fdPrestatGet = (fd: FileDescriptor) => { @unsafe provide let fdWrite = (fd: FileDescriptor, data: Bytes) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let iovs = Memory.malloc(3n * 4n) let strPtr = WasmI32.fromGrain(data) @@ -771,7 +777,9 @@ provide let fdWrite = (fd: FileDescriptor, data: Bytes) => { provide let fdPwrite = (fd: FileDescriptor, data: Bytes, offset: Int64) => { let fdArg = fd let offsetArg = offset - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let iovs = Memory.malloc(3n * 4n) let strPtr = WasmI32.fromGrain(data) @@ -807,7 +815,9 @@ provide let fdAllocate = (fd: FileDescriptor, offset: Int64, size: Int64) => { let fdArg = fd let offsetArg = offset let sizeArg = size - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let offset = WasmI64.load(WasmI32.fromGrain(offset), 8n) @@ -830,7 +840,9 @@ provide let fdAllocate = (fd: FileDescriptor, offset: Int64, size: Int64) => { @unsafe provide let fdClose = (fd: FileDescriptor) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let err = Wasi.fd_close(fd) if (err != Wasi._ESUCCESS) { @@ -849,7 +861,9 @@ provide let fdClose = (fd: FileDescriptor) => { @unsafe provide let fdDatasync = (fd: FileDescriptor) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let err = Wasi.fd_datasync(fd) if (err != Wasi._ESUCCESS) { @@ -868,7 +882,9 @@ provide let fdDatasync = (fd: FileDescriptor) => { @unsafe provide let fdSync = (fd: FileDescriptor) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let err = Wasi.fd_sync(fd) if (err != Wasi._ESUCCESS) { @@ -920,7 +936,9 @@ let orderedRights = [ @unsafe provide let fdStats = (fd: FileDescriptor) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let structPtr = Memory.malloc(24n) @@ -961,7 +979,7 @@ provide let fdStats = (fd: FileDescriptor) => { flags: fdflagsList, rights: rightsList, rightsInheriting: rightsInheritingList, - } + }, ) } @@ -976,7 +994,9 @@ provide let fdStats = (fd: FileDescriptor) => { provide let fdSetFlags = (fd: FileDescriptor, flags: List) => { let fdArg = fd let flagsArg = flags - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let flags = combineFdFlags(flags) @@ -997,16 +1017,17 @@ provide let fdSetFlags = (fd: FileDescriptor, flags: List) => { * @returns `Ok(void)` if successful or `Err(exception)` otherwise */ @unsafe -provide let fdSetRights = - ( - fd: FileDescriptor, - rights: List, - rightsInheriting: List, - ) => { +provide let fdSetRights = ( + fd: FileDescriptor, + rights: List, + rightsInheriting: List, +) => { let fdArg = fd let rightsArg = rights let rightsInheritingArg = rightsInheriting - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let rights = combineRights(rights) let rightsInheriting = combineRights(rightsInheriting) @@ -1028,7 +1049,9 @@ provide let fdSetRights = @unsafe provide let fdFilestats = (fd: FileDescriptor) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let filestats = Memory.malloc(64n) @@ -1050,7 +1073,7 @@ provide let fdFilestats = (fd: FileDescriptor) => { Memory.free(filestats) return Ok( - { device, inode, filetype, linkcount, size, accessed, modified, changed } + { device, inode, filetype, linkcount, size, accessed, modified, changed }, ) } @@ -1065,7 +1088,9 @@ provide let fdFilestats = (fd: FileDescriptor) => { provide let fdSetSize = (fd: FileDescriptor, size: Int64) => { let fdArg = fd let sizeArg = size - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let size = WasmI64.load(WasmI32.fromGrain(size), 8n) @@ -1087,7 +1112,9 @@ provide let fdSetSize = (fd: FileDescriptor, size: Int64) => { @unsafe provide let fdSetAccessTime = (fd: FileDescriptor, timestamp: Int64) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let time = WasmI64.load(WasmI32.fromGrain(timestamp), 8n) @@ -1108,7 +1135,9 @@ provide let fdSetAccessTime = (fd: FileDescriptor, timestamp: Int64) => { @unsafe provide let fdSetAccessTimeNow = (fd: FileDescriptor) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let err = Wasi.fd_filestat_set_times(fd, 0N, 0N, Wasi._TIME_SET_ATIM_NOW) if (err != Wasi._ESUCCESS) { @@ -1128,7 +1157,9 @@ provide let fdSetAccessTimeNow = (fd: FileDescriptor) => { @unsafe provide let fdSetModifiedTime = (fd: FileDescriptor, timestamp: Int64) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let time = WasmI64.load(WasmI32.fromGrain(timestamp), 8n) @@ -1149,7 +1180,9 @@ provide let fdSetModifiedTime = (fd: FileDescriptor, timestamp: Int64) => { @unsafe provide let fdSetModifiedTimeNow = (fd: FileDescriptor) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let err = Wasi.fd_filestat_set_times(fd, 0N, 0N, Wasi._TIME_SET_MTIM_NOW) if (err != Wasi._ESUCCESS) { @@ -1168,7 +1201,9 @@ provide let fdSetModifiedTimeNow = (fd: FileDescriptor) => { @unsafe provide let fdReaddir = (fd: FileDescriptor) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let structWidth = 24n @@ -1280,7 +1315,9 @@ provide let fdRenumber = (fromFd: FileDescriptor, toFd: FileDescriptor) => { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, } - let toFd = match (toFd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let toFd = match (toFd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let err = Wasi.fd_renumber(fromFd, toFd) if (err != Wasi._ESUCCESS) { @@ -1303,7 +1340,9 @@ provide let fdSeek = (fd: FileDescriptor, offset: Int64, whence: Whence) => { let fdArg = fd let offsetArg = offset let whenceArg = whence - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let offset = WasmI64.load(WasmI32.fromGrain(offset), 8n) @@ -1334,7 +1373,9 @@ provide let fdSeek = (fd: FileDescriptor, offset: Int64, whence: Whence) => { @unsafe provide let fdTell = (fd: FileDescriptor) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let offset = allocateInt64() let offsetPtr = offset + 8n @@ -1358,7 +1399,9 @@ provide let fdTell = (fd: FileDescriptor) => { @unsafe provide let pathCreateDirectory = (fd: FileDescriptor, path: String) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let stringPtr = WasmI32.fromGrain(path) @@ -1381,14 +1424,15 @@ provide let pathCreateDirectory = (fd: FileDescriptor, path: String) => { * @returns `Ok(info)` of the `Filestats` associated with the file descriptor if successful or `Err(exception)` otherwise */ @unsafe -provide let pathFilestats = - ( - fd: FileDescriptor, - dirFlags: List, - path: String, - ) => { +provide let pathFilestats = ( + fd: FileDescriptor, + dirFlags: List, + path: String, +) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let combinedDirFlags = combineLookupFlags(dirFlags) @@ -1422,7 +1466,7 @@ provide let pathFilestats = Memory.free(filestats) return Ok( - { device, inode, filetype, linkcount, size, accessed, modified, changed } + { device, inode, filetype, linkcount, size, accessed, modified, changed }, ) } @@ -1436,15 +1480,16 @@ provide let pathFilestats = * @returns `Ok(void)` if successful or `Err(exception)` otherwise */ @unsafe -provide let pathSetAccessTime = - ( - fd: FileDescriptor, - dirFlags: List, - path: String, - timestamp: Int64, - ) => { +provide let pathSetAccessTime = ( + fd: FileDescriptor, + dirFlags: List, + path: String, + timestamp: Int64, +) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let combinedDirFlags = combineLookupFlags(dirFlags) @@ -1479,14 +1524,15 @@ provide let pathSetAccessTime = * @returns `Ok(void)` if successful or `Err(exception)` otherwise */ @unsafe -provide let pathSetAccessTimeNow = - ( - fd: FileDescriptor, - dirFlags: List, - path: String, - ) => { +provide let pathSetAccessTimeNow = ( + fd: FileDescriptor, + dirFlags: List, + path: String, +) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let combinedDirFlags = combineLookupFlags(dirFlags) @@ -1520,15 +1566,16 @@ provide let pathSetAccessTimeNow = * @returns `Ok(void)` if successful or `Err(exception)` otherwise */ @unsafe -provide let pathSetModifiedTime = - ( - fd: FileDescriptor, - dirFlags: List, - path: String, - timestamp: Int64, - ) => { +provide let pathSetModifiedTime = ( + fd: FileDescriptor, + dirFlags: List, + path: String, + timestamp: Int64, +) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let combinedDirFlags = combineLookupFlags(dirFlags) @@ -1563,14 +1610,15 @@ provide let pathSetModifiedTime = * @returns `Ok(void)` if successful or `Err(exception)` otherwise */ @unsafe -provide let pathSetModifiedTimeNow = - ( - fd: FileDescriptor, - dirFlags: List, - path: String, - ) => { +provide let pathSetModifiedTimeNow = ( + fd: FileDescriptor, + dirFlags: List, + path: String, +) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let combinedDirFlags = combineLookupFlags(dirFlags) @@ -1605,14 +1653,13 @@ provide let pathSetModifiedTimeNow = * @returns `Ok(void)` if successful or `Err(exception)` otherwise */ @unsafe -provide let pathLink = - ( - sourceFd: FileDescriptor, - dirFlags: List, - sourcePath: String, - targetFd: FileDescriptor, - targetPath: String, - ) => { +provide let pathLink = ( + sourceFd: FileDescriptor, + dirFlags: List, + sourcePath: String, + targetFd: FileDescriptor, + targetPath: String, +) => { let sourceFdArg = sourceFd let targetFdArg = targetFd let sourceFd = match (sourceFd) { @@ -1656,14 +1703,15 @@ provide let pathLink = * @returns `Ok(void)` if successful or `Err(exception)` otherwise */ @unsafe -provide let pathSymlink = - ( - fd: FileDescriptor, - sourcePath: String, - targetPath: String, - ) => { +provide let pathSymlink = ( + fd: FileDescriptor, + sourcePath: String, + targetPath: String, +) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let sourcePtr = WasmI32.fromGrain(sourcePath) let targetPtr = WasmI32.fromGrain(targetPath) @@ -1695,7 +1743,9 @@ provide let pathSymlink = @unsafe provide let pathUnlink = (fd: FileDescriptor, path: String) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let pathPtr = WasmI32.fromGrain(path) let pathSize = WasmI32.load(pathPtr, 4n) @@ -1720,7 +1770,9 @@ provide let pathUnlink = (fd: FileDescriptor, path: String) => { provide let pathReadlink = (fd: FileDescriptor, path: String, size: Number) => { let fdArg = fd let sizeArg = size - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let pathPtr = WasmI32.fromGrain(path) let pathSize = WasmI32.load(pathPtr, 4n) @@ -1755,7 +1807,9 @@ provide let pathReadlink = (fd: FileDescriptor, path: String, size: Number) => { @unsafe provide let pathRemoveDirectory = (fd: FileDescriptor, path: String) => { let fdArg = fd - let fd = match (fd) { FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n } + let fd = match (fd) { + FileDescriptor(n) => WasmI32.fromGrain(n) >> 1n, + } let pathPtr = WasmI32.fromGrain(path) let pathSize = WasmI32.load(pathPtr, 4n) @@ -1778,13 +1832,12 @@ provide let pathRemoveDirectory = (fd: FileDescriptor, path: String) => { * @returns `Ok(void)` if successful or `Err(exception)` otherwise */ @unsafe -provide let pathRename = - ( - sourceFd: FileDescriptor, - sourcePath: String, - targetFd: FileDescriptor, - targetPath: String, - ) => { +provide let pathRename = ( + sourceFd: FileDescriptor, + sourcePath: String, + targetFd: FileDescriptor, + targetPath: String, +) => { let sourceFdArg = sourceFd let targetFdArg = targetFd let sourceFd = match (sourceFd) { @@ -1993,8 +2046,8 @@ let makeAbsolute = (path: String) => { pathLen == 0n || pathLen == 1n && WasmI32.load8U(pathPtr, 0n) == _CHAR_DOT || pathLen == 2n && - WasmI32.load8U(pathPtr, 0n) == _CHAR_DOT && - WasmI32.load8U(pathPtr, 1n) == _CHAR_SLASH + WasmI32.load8U(pathPtr, 0n) == _CHAR_DOT && + WasmI32.load8U(pathPtr, 1n) == _CHAR_SLASH ) { return cwd } @@ -2041,14 +2094,13 @@ let makeAbsolute = (path: String) => { * @since v0.6.0 */ @unsafe -provide let open = - ( - path: String, - openFlags: List, - rights: List, - rightsInheriting: List, - flags: List, - ) => { +provide let open = ( + path: String, + openFlags: List, + rights: List, + rightsInheriting: List, + flags: List, +) => { match (findPath(makeAbsolute(path))) { Ok((fd, relativePath)) => { pathOpen( diff --git a/stdlib/sys/process.gr b/stdlib/sys/process.gr index c4f8662061..1174488a29 100644 --- a/stdlib/sys/process.gr +++ b/stdlib/sys/process.gr @@ -5,7 +5,6 @@ * * @example include "sys/process" */ - module Process include "runtime/unsafe/wasmi32" diff --git a/stdlib/sys/random.gr b/stdlib/sys/random.gr index 5077eac63a..8c0cff3a01 100644 --- a/stdlib/sys/random.gr +++ b/stdlib/sys/random.gr @@ -3,7 +3,6 @@ * * @example include "sys/random" */ - module Random include "runtime/unsafe/wasmi32" diff --git a/stdlib/sys/time.gr b/stdlib/sys/time.gr index a4f4d5c289..b1d2dc1e4e 100644 --- a/stdlib/sys/time.gr +++ b/stdlib/sys/time.gr @@ -3,7 +3,6 @@ * * @example include "sys/time" */ - module Time include "runtime/unsafe/wasmi32" diff --git a/stdlib/uint16.gr b/stdlib/uint16.gr index 05cdf51b7d..ccc70f25bb 100644 --- a/stdlib/uint16.gr +++ b/stdlib/uint16.gr @@ -4,7 +4,6 @@ * * @since v0.6.0 */ - module Uint16 include "runtime/unsafe/wasmi32" diff --git a/stdlib/uint32.gr b/stdlib/uint32.gr index 71518c4f21..c5b6f39c85 100644 --- a/stdlib/uint32.gr +++ b/stdlib/uint32.gr @@ -4,7 +4,6 @@ * * @since v0.6.0 */ - module Uint32 include "runtime/unsafe/wasmi32" diff --git a/stdlib/uint64.gr b/stdlib/uint64.gr index 53185ab8e5..9f3eab16a5 100644 --- a/stdlib/uint64.gr +++ b/stdlib/uint64.gr @@ -4,7 +4,6 @@ * * @since v0.6.0 */ - module Uint64 include "runtime/unsafe/wasmi32" diff --git a/stdlib/uint8.gr b/stdlib/uint8.gr index 3531d789fa..7162d5d0af 100644 --- a/stdlib/uint8.gr +++ b/stdlib/uint8.gr @@ -4,7 +4,6 @@ * * @since v0.6.0 */ - module Uint8 include "runtime/unsafe/wasmi32"