Source file token_translator.ml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
module Translate (Impl : Translator_intf.S) : sig
type t
val token : Offset_utils.t -> Parser_env.token_sink_result -> t
val token_list : Offset_utils.t -> Parser_env.token_sink_result list -> t
end
with type t = Impl.t = struct
type t = Impl.t
let token offset_table { Parser_env.token_loc; token; token_context } =
Loc.(
Impl.obj
[
("type", Impl.string (Token.token_to_string token));
( "context",
Impl.string
Parser_env.Lex_mode.(
match token_context with
| NORMAL -> "normal"
| TYPE -> "type"
| JSX_TAG -> "jsxTag"
| JSX_CHILD -> "jsxChild"
| TEMPLATE -> "template"
| REGEXP -> "regexp"
)
);
( "loc",
Impl.obj
[
( "start",
Impl.obj
[
("line", Impl.number (float token_loc.start.line));
("column", Impl.number (float token_loc.start.column));
]
);
( "end",
Impl.obj
[
("line", Impl.number (float token_loc._end.line));
("column", Impl.number (float token_loc._end.column));
]
);
]
);
( "range",
Impl.array
[
Impl.number (float (Offset_utils.offset offset_table token_loc.start));
Impl.number (float (Offset_utils.offset offset_table token_loc._end));
]
);
("value", Impl.string (Token.value_of_token token));
]
)
let token_list offset_table tokens =
Impl.array (List.rev_map (token offset_table) tokens |> List.rev)
end