Legend:
Page
Library
Module
Module type
Parameter
Class
Class type
Source
Source file reason_toolchain_ocaml.ml
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146openReason_toolchain_conf(* The OCaml parser keep doc strings in the comment list.
To avoid duplicating comments, we need to filter comments that appear
as doc strings is the AST out of the comment list. *)letdoc_comments_filter()=letopenAst_mapperinletopenParsetreeinletseen=Hashtbl.create7inletattributemapper=function|{attr_name={Location.txt=("ocaml.doc"|"ocaml.text")};attr_payload=PStr[{pstr_desc=Pstr_eval({pexp_desc=Pexp_constant(Pconst_string(_text,_loc,None))},_);pstr_loc=loc}]}asattribute->(* Workaround: OCaml 4.02.3 kept an initial '*' in docstrings.
* For other versions, we have to put the '*' back. *)Hashtbl.addseenloc();default_mapper.attributemapperattribute|attribute->default_mapper.attributemapperattributeinletmapper={default_mapperwithattribute}inletfilter(_text,loc)=not(Hashtbl.memseenloc)in(mapper,filter)moduleLexer_impl=structtypet=Lexing.lexbufletinit?insert_completion_ident:_lexbuf=Lexer.init();lexbuflettoken=Lexer.tokenletfiltered_comments=ref[]letfilter_commentsfilter=filtered_comments:=List.filterfilter(Lexer.comments())letget_comments_lexbuf_docstrings=!filtered_commentsendmoduleOCaml_parser=Parsertypetoken=OCaml_parser.tokentypeinvalid_docstrings=unit(* OCaml parser parses into compiler-libs version of Ast.
Parsetrees are converted to Reason version on the fly. *)letparse_and_filter_doc_commentsiterfnlexbuf=letit,filter=doc_comments_filter()inletresult=fnlexbufinignore(iteritresult);Lexer_impl.filter_commentsfilter;(result,())letimplementationlexbuf=parse_and_filter_doc_comments(funit->it.Ast_mapper.structureit)(funlexbuf->From_current.copy_structure(Parser.implementationLexer.tokenlexbuf))lexbufletcore_typelexbuf=parse_and_filter_doc_comments(funit->it.Ast_mapper.typit)(funlexbuf->From_current.copy_core_type(Parser.parse_core_typeLexer.tokenlexbuf))lexbufletinterfacelexbuf=parse_and_filter_doc_comments(funit->it.Ast_mapper.signatureit)(funlexbuf->From_current.copy_signature(Parser.interfaceLexer.tokenlexbuf))lexbufletfilter_toplevel_phraseit=function|Parsetree.Ptop_defstr->ignore(it.Ast_mapper.structureitstr)|Parsetree.Ptop_dir_->()lettoplevel_phraselexbuf=parse_and_filter_doc_commentsfilter_toplevel_phrase(funlexbuf->From_current.copy_toplevel_phrase(Parser.toplevel_phraseLexer.tokenlexbuf))lexbufletuse_filelexbuf=parse_and_filter_doc_comments(funitresult->List.map(filter_toplevel_phraseit)result)(funlexbuf->List.mapFrom_current.copy_toplevel_phrase(Parser.use_fileLexer.tokenlexbuf))lexbuf(* Skip tokens to the end of the phrase *)(* TODO: consolidate these copy-paste skip/trys into something that works for
* every syntax (also see [Reason_syntax_util]). *)letrecskip_phraselexbuf=trymatchLexer.tokenlexbufwithOCaml_parser.SEMISEMI|OCaml_parser.EOF->()|_->skip_phraselexbufwith|Lexer.Error(Lexer.Unterminated_comment_,_)|Lexer.Error(Lexer.Unterminated_string,_)|Lexer.Error(Lexer.Unterminated_string_in_comment_,_)|Lexer.Error(Lexer.Illegal_character_,_)->skip_phraselexbufletmaybe_skip_phraselexbuf=ifParsing.is_current_lookaheadOCaml_parser.SEMISEMI||Parsing.is_current_lookaheadOCaml_parser.EOFthen()elseskip_phraselexbufletsafeguard_parsinglexbuffn=tryfn()with|Lexer.Error(Lexer.Illegal_character_,_)aserrwhen!Location.input_name="//toplevel//"->skip_phraselexbuf;raiseerr|Syntaxerr.Error_aserrwhen!Location.input_name="//toplevel//"->maybe_skip_phraselexbuf;raiseerr(* Escape error is raised as a general catchall when a syntax_error() is
thrown in the parser.
*)|Parsing.Parse_error|Syntaxerr.Escape_error->letloc=Location.currlexbufinif!Location.input_name="//toplevel//"thenmaybe_skip_phraselexbuf;raise(Syntaxerr.Error(Syntaxerr.Otherloc))(* Unfortunately we drop the comments because there doesn't exist an ML
* printer that formats comments *and* line wrapping! (yet) *)letformat_interface_with_comments(signature,_)formatter=Pprintast.signatureformatter(To_current.copy_signaturesignature)letformat_implementation_with_comments(structure,_)formatter=letstructure=Reason_syntax_util.(apply_mapper_to_structurestructure(backport_letopt_mapperremove_stylistic_attrs_mapper))inPprintast.structureformatter(To_current.copy_structurestructure)moduleLexer=Lexer_impl