mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Merge remote-tracking branch 'upstream/master'
This commit is contained in:
856
samples/Erlang/elixir_parser.yrl
Normal file
856
samples/Erlang/elixir_parser.yrl
Normal file
@@ -0,0 +1,856 @@
|
||||
Nonterminals
|
||||
grammar expr_list
|
||||
expr container_expr block_expr access_expr
|
||||
no_parens_expr no_parens_zero_expr no_parens_one_expr no_parens_one_ambig_expr
|
||||
bracket_expr bracket_at_expr bracket_arg matched_expr unmatched_expr max_expr
|
||||
unmatched_op_expr matched_op_expr no_parens_op_expr no_parens_many_expr
|
||||
comp_op_eol at_op_eol unary_op_eol and_op_eol or_op_eol capture_op_eol
|
||||
add_op_eol mult_op_eol two_op_eol three_op_eol pipe_op_eol stab_op_eol
|
||||
arrow_op_eol match_op_eol when_op_eol in_op_eol in_match_op_eol
|
||||
type_op_eol rel_op_eol
|
||||
open_paren close_paren empty_paren eoe
|
||||
list list_args open_bracket close_bracket
|
||||
tuple open_curly close_curly
|
||||
bit_string open_bit close_bit
|
||||
map map_op map_close map_args map_expr struct_op
|
||||
assoc_op_eol assoc_expr assoc_base assoc_update assoc_update_kw assoc
|
||||
container_args_base container_args
|
||||
call_args_parens_expr call_args_parens_base call_args_parens parens_call
|
||||
call_args_no_parens_one call_args_no_parens_ambig call_args_no_parens_expr
|
||||
call_args_no_parens_comma_expr call_args_no_parens_all call_args_no_parens_many
|
||||
call_args_no_parens_many_strict
|
||||
stab stab_eoe stab_expr stab_op_eol_and_expr stab_parens_many
|
||||
kw_eol kw_base kw call_args_no_parens_kw_expr call_args_no_parens_kw
|
||||
dot_op dot_alias dot_alias_container
|
||||
dot_identifier dot_op_identifier dot_do_identifier
|
||||
dot_paren_identifier dot_bracket_identifier
|
||||
do_block fn_eoe do_eoe end_eoe block_eoe block_item block_list
|
||||
.
|
||||
|
||||
Terminals
|
||||
identifier kw_identifier kw_identifier_safe kw_identifier_unsafe bracket_identifier
|
||||
paren_identifier do_identifier block_identifier
|
||||
fn 'end' aliases
|
||||
number atom atom_safe atom_unsafe bin_string list_string sigil
|
||||
dot_call_op op_identifier
|
||||
comp_op at_op unary_op and_op or_op arrow_op match_op in_op in_match_op
|
||||
type_op dual_op add_op mult_op two_op three_op pipe_op stab_op when_op assoc_op
|
||||
capture_op rel_op
|
||||
'true' 'false' 'nil' 'do' eol ';' ',' '.'
|
||||
'(' ')' '[' ']' '{' '}' '<<' '>>' '%{}' '%'
|
||||
.
|
||||
|
||||
Rootsymbol grammar.
|
||||
|
||||
%% Two shift/reduce conflicts coming from call_args_parens.
|
||||
Expect 2.
|
||||
|
||||
%% Changes in ops and precedence should be reflected on lib/elixir/lib/macro.ex
|
||||
%% Note though the operator => in practice has lower precedence than all others,
|
||||
%% its entry in the table is only to support the %{user | foo => bar} syntax.
|
||||
Left 5 do.
|
||||
Right 10 stab_op_eol. %% ->
|
||||
Left 20 ','.
|
||||
Nonassoc 30 capture_op_eol. %% &
|
||||
Left 40 in_match_op_eol. %% <-, \\ (allowed in matches along =)
|
||||
Right 50 when_op_eol. %% when
|
||||
Right 60 type_op_eol. %% ::
|
||||
Right 70 pipe_op_eol. %% |
|
||||
Right 80 assoc_op_eol. %% =>
|
||||
Right 90 match_op_eol. %% =
|
||||
Left 130 or_op_eol. %% ||, |||, or
|
||||
Left 140 and_op_eol. %% &&, &&&, and
|
||||
Left 150 comp_op_eol. %% ==, !=, =~, ===, !==
|
||||
Left 160 rel_op_eol. %% <, >, <=, >=
|
||||
Left 170 arrow_op_eol. %% |>, <<<, >>>, ~>>, <<~, ~>, <~, <~>, <|>
|
||||
Left 180 in_op_eol. %% in
|
||||
Left 190 three_op_eol. %% ^^^
|
||||
Right 200 two_op_eol. %% ++, --, .., <>
|
||||
Left 210 add_op_eol. %% +, -
|
||||
Left 220 mult_op_eol. %% *, /
|
||||
Nonassoc 300 unary_op_eol. %% +, -, !, ^, not, ~~~
|
||||
Left 310 dot_call_op.
|
||||
Left 310 dot_op. %% .
|
||||
Nonassoc 320 at_op_eol. %% @
|
||||
Nonassoc 330 dot_identifier.
|
||||
|
||||
%%% MAIN FLOW OF EXPRESSIONS
|
||||
|
||||
grammar -> eoe : nil.
|
||||
grammar -> expr_list : to_block('$1').
|
||||
grammar -> eoe expr_list : to_block('$2').
|
||||
grammar -> expr_list eoe : to_block('$1').
|
||||
grammar -> eoe expr_list eoe : to_block('$2').
|
||||
grammar -> '$empty' : nil.
|
||||
|
||||
% Note expressions are on reverse order
|
||||
expr_list -> expr : ['$1'].
|
||||
expr_list -> expr_list eoe expr : ['$3'|'$1'].
|
||||
|
||||
expr -> matched_expr : '$1'.
|
||||
expr -> no_parens_expr : '$1'.
|
||||
expr -> unmatched_expr : '$1'.
|
||||
|
||||
%% In Elixir we have three main call syntaxes: with parentheses,
|
||||
%% without parentheses and with do blocks. They are represented
|
||||
%% in the AST as matched, no_parens and unmatched.
|
||||
%%
|
||||
%% Calls without parentheses are further divided according to how
|
||||
%% problematic they are:
|
||||
%%
|
||||
%% (a) no_parens_one: a call with one unproblematic argument
|
||||
%% (e.g. `f a` or `f g a` and similar) (includes unary operators)
|
||||
%%
|
||||
%% (b) no_parens_many: a call with several arguments (e.g. `f a, b`)
|
||||
%%
|
||||
%% (c) no_parens_one_ambig: a call with one argument which is
|
||||
%% itself a no_parens_many or no_parens_one_ambig (e.g. `f g a, b`
|
||||
%% or `f g h a, b` and similar)
|
||||
%%
|
||||
%% Note, in particular, that no_parens_one_ambig expressions are
|
||||
%% ambiguous and are interpreted such that the outer function has
|
||||
%% arity 1 (e.g. `f g a, b` is interpreted as `f(g(a, b))` rather
|
||||
%% than `f(g(a), b)`). Hence the name, no_parens_one_ambig.
|
||||
%%
|
||||
%% The distinction is required because we can't, for example, have
|
||||
%% a function call with a do block as argument inside another do
|
||||
%% block call, unless there are parentheses:
|
||||
%%
|
||||
%% if if true do true else false end do #=> invalid
|
||||
%% if(if true do true else false end) do #=> valid
|
||||
%%
|
||||
%% Similarly, it is not possible to nest calls without parentheses
|
||||
%% if their arity is more than 1:
|
||||
%%
|
||||
%% foo a, bar b, c #=> invalid
|
||||
%% foo(a, bar b, c) #=> invalid
|
||||
%% foo bar a, b #=> valid
|
||||
%% foo a, bar(b, c) #=> valid
|
||||
%%
|
||||
%% So the different grammar rules need to take into account
|
||||
%% if calls without parentheses are do blocks in particular
|
||||
%% segments and act accordingly.
|
||||
matched_expr -> matched_expr matched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
matched_expr -> unary_op_eol matched_expr : build_unary_op('$1', '$2').
|
||||
matched_expr -> at_op_eol matched_expr : build_unary_op('$1', '$2').
|
||||
matched_expr -> capture_op_eol matched_expr : build_unary_op('$1', '$2').
|
||||
matched_expr -> no_parens_one_expr : '$1'.
|
||||
matched_expr -> no_parens_zero_expr : '$1'.
|
||||
matched_expr -> access_expr : '$1'.
|
||||
matched_expr -> access_expr kw_identifier : throw_invalid_kw_identifier('$2').
|
||||
|
||||
unmatched_expr -> matched_expr unmatched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
unmatched_expr -> unmatched_expr matched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
unmatched_expr -> unmatched_expr unmatched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
unmatched_expr -> unmatched_expr no_parens_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
unmatched_expr -> unary_op_eol expr : build_unary_op('$1', '$2').
|
||||
unmatched_expr -> at_op_eol expr : build_unary_op('$1', '$2').
|
||||
unmatched_expr -> capture_op_eol expr : build_unary_op('$1', '$2').
|
||||
unmatched_expr -> block_expr : '$1'.
|
||||
|
||||
no_parens_expr -> matched_expr no_parens_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
no_parens_expr -> unary_op_eol no_parens_expr : build_unary_op('$1', '$2').
|
||||
no_parens_expr -> at_op_eol no_parens_expr : build_unary_op('$1', '$2').
|
||||
no_parens_expr -> capture_op_eol no_parens_expr : build_unary_op('$1', '$2').
|
||||
no_parens_expr -> no_parens_one_ambig_expr : '$1'.
|
||||
no_parens_expr -> no_parens_many_expr : '$1'.
|
||||
|
||||
block_expr -> parens_call call_args_parens do_block : build_identifier('$1', '$2' ++ '$3').
|
||||
block_expr -> parens_call call_args_parens call_args_parens do_block : build_nested_parens('$1', '$2', '$3' ++ '$4').
|
||||
block_expr -> dot_do_identifier do_block : build_identifier('$1', '$2').
|
||||
block_expr -> dot_identifier call_args_no_parens_all do_block : build_identifier('$1', '$2' ++ '$3').
|
||||
|
||||
matched_op_expr -> match_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> add_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> mult_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> two_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> three_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> and_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> or_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> in_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> in_match_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> type_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> when_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> pipe_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> comp_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> rel_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> arrow_op_eol matched_expr : {'$1', '$2'}.
|
||||
%% Warn for no parens subset
|
||||
matched_op_expr -> arrow_op_eol no_parens_one_expr : warn_pipe('$1', '$2'), {'$1', '$2'}.
|
||||
|
||||
unmatched_op_expr -> match_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> add_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> mult_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> two_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> three_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> and_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> or_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> in_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> in_match_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> type_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> when_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> pipe_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> comp_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> rel_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> arrow_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
|
||||
no_parens_op_expr -> match_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> add_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> mult_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> two_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> three_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> and_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> or_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> in_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> in_match_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> type_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> when_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> pipe_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> comp_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> rel_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> arrow_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
%% Warn for no parens subset
|
||||
no_parens_op_expr -> arrow_op_eol no_parens_one_ambig_expr : warn_pipe('$1', '$2'), {'$1', '$2'}.
|
||||
no_parens_op_expr -> arrow_op_eol no_parens_many_expr : warn_pipe('$1', '$2'), {'$1', '$2'}.
|
||||
|
||||
%% Allow when (and only when) with keywords
|
||||
no_parens_op_expr -> when_op_eol call_args_no_parens_kw : {'$1', '$2'}.
|
||||
|
||||
no_parens_one_ambig_expr -> dot_op_identifier call_args_no_parens_ambig : build_identifier('$1', '$2').
|
||||
no_parens_one_ambig_expr -> dot_identifier call_args_no_parens_ambig : build_identifier('$1', '$2').
|
||||
|
||||
no_parens_many_expr -> dot_op_identifier call_args_no_parens_many_strict : build_identifier('$1', '$2').
|
||||
no_parens_many_expr -> dot_identifier call_args_no_parens_many_strict : build_identifier('$1', '$2').
|
||||
|
||||
no_parens_one_expr -> dot_op_identifier call_args_no_parens_one : build_identifier('$1', '$2').
|
||||
no_parens_one_expr -> dot_identifier call_args_no_parens_one : build_identifier('$1', '$2').
|
||||
no_parens_zero_expr -> dot_do_identifier : build_identifier('$1', nil).
|
||||
no_parens_zero_expr -> dot_identifier : build_identifier('$1', nil).
|
||||
|
||||
%% From this point on, we just have constructs that can be
|
||||
%% used with the access syntax. Notice that (dot_)identifier
|
||||
%% is not included in this list simply because the tokenizer
|
||||
%% marks identifiers followed by brackets as bracket_identifier.
|
||||
access_expr -> bracket_at_expr : '$1'.
|
||||
access_expr -> bracket_expr : '$1'.
|
||||
access_expr -> at_op_eol number : build_unary_op('$1', ?exprs('$2')).
|
||||
access_expr -> unary_op_eol number : build_unary_op('$1', ?exprs('$2')).
|
||||
access_expr -> capture_op_eol number : build_unary_op('$1', ?exprs('$2')).
|
||||
access_expr -> fn_eoe stab end_eoe : build_fn('$1', reverse('$2')).
|
||||
access_expr -> open_paren stab close_paren : build_stab(reverse('$2')).
|
||||
access_expr -> open_paren stab ';' close_paren : build_stab(reverse('$2')).
|
||||
access_expr -> open_paren ';' stab ';' close_paren : build_stab(reverse('$3')).
|
||||
access_expr -> open_paren ';' stab close_paren : build_stab(reverse('$3')).
|
||||
access_expr -> open_paren ';' close_paren : build_stab([]).
|
||||
access_expr -> empty_paren : nil.
|
||||
access_expr -> number : ?exprs('$1').
|
||||
access_expr -> list : element(1, '$1').
|
||||
access_expr -> map : '$1'.
|
||||
access_expr -> tuple : '$1'.
|
||||
access_expr -> 'true' : ?id('$1').
|
||||
access_expr -> 'false' : ?id('$1').
|
||||
access_expr -> 'nil' : ?id('$1').
|
||||
access_expr -> bin_string : build_bin_string('$1').
|
||||
access_expr -> list_string : build_list_string('$1').
|
||||
access_expr -> bit_string : '$1'.
|
||||
access_expr -> sigil : build_sigil('$1').
|
||||
access_expr -> max_expr : '$1'.
|
||||
|
||||
%% Aliases and properly formed calls. Used by map_expr.
|
||||
max_expr -> atom : ?exprs('$1').
|
||||
max_expr -> atom_safe : build_quoted_atom('$1', true).
|
||||
max_expr -> atom_unsafe : build_quoted_atom('$1', false).
|
||||
max_expr -> parens_call call_args_parens : build_identifier('$1', '$2').
|
||||
max_expr -> parens_call call_args_parens call_args_parens : build_nested_parens('$1', '$2', '$3').
|
||||
max_expr -> dot_alias : '$1'.
|
||||
|
||||
bracket_arg -> open_bracket kw close_bracket : build_list('$1', '$2').
|
||||
bracket_arg -> open_bracket container_expr close_bracket : build_list('$1', '$2').
|
||||
bracket_arg -> open_bracket container_expr ',' close_bracket : build_list('$1', '$2').
|
||||
|
||||
bracket_expr -> dot_bracket_identifier bracket_arg : build_access(build_identifier('$1', nil), '$2').
|
||||
bracket_expr -> access_expr bracket_arg : build_access('$1', '$2').
|
||||
|
||||
bracket_at_expr -> at_op_eol dot_bracket_identifier bracket_arg :
|
||||
build_access(build_unary_op('$1', build_identifier('$2', nil)), '$3').
|
||||
bracket_at_expr -> at_op_eol access_expr bracket_arg :
|
||||
build_access(build_unary_op('$1', '$2'), '$3').
|
||||
|
||||
%% Blocks
|
||||
|
||||
do_block -> do_eoe 'end' : [[{do, nil}]].
|
||||
do_block -> do_eoe stab end_eoe : [[{do, build_stab(reverse('$2'))}]].
|
||||
do_block -> do_eoe block_list 'end' : [[{do, nil}|'$2']].
|
||||
do_block -> do_eoe stab_eoe block_list 'end' : [[{do, build_stab(reverse('$2'))}|'$3']].
|
||||
|
||||
eoe -> eol : '$1'.
|
||||
eoe -> ';' : '$1'.
|
||||
eoe -> eol ';' : '$1'.
|
||||
|
||||
fn_eoe -> 'fn' : '$1'.
|
||||
fn_eoe -> 'fn' eoe : '$1'.
|
||||
|
||||
do_eoe -> 'do' : '$1'.
|
||||
do_eoe -> 'do' eoe : '$1'.
|
||||
|
||||
end_eoe -> 'end' : '$1'.
|
||||
end_eoe -> eoe 'end' : '$2'.
|
||||
|
||||
block_eoe -> block_identifier : '$1'.
|
||||
block_eoe -> block_identifier eoe : '$1'.
|
||||
|
||||
stab -> stab_expr : ['$1'].
|
||||
stab -> stab eoe stab_expr : ['$3'|'$1'].
|
||||
|
||||
stab_eoe -> stab : '$1'.
|
||||
stab_eoe -> stab eoe : '$1'.
|
||||
|
||||
%% Here, `element(1, Token)` is the stab operator,
|
||||
%% while `element(2, Token)` is the expression.
|
||||
stab_expr -> expr :
|
||||
'$1'.
|
||||
stab_expr -> stab_op_eol_and_expr :
|
||||
build_op(element(1, '$1'), [], element(2, '$1')).
|
||||
stab_expr -> empty_paren stab_op_eol_and_expr :
|
||||
build_op(element(1, '$2'), [], element(2, '$2')).
|
||||
stab_expr -> call_args_no_parens_all stab_op_eol_and_expr :
|
||||
build_op(element(1, '$2'), unwrap_when(unwrap_splice('$1')), element(2, '$2')).
|
||||
stab_expr -> stab_parens_many stab_op_eol_and_expr :
|
||||
build_op(element(1, '$2'), unwrap_splice('$1'), element(2, '$2')).
|
||||
stab_expr -> stab_parens_many when_op expr stab_op_eol_and_expr :
|
||||
build_op(element(1, '$4'), [{'when', meta_from_token('$2'), unwrap_splice('$1') ++ ['$3']}], element(2, '$4')).
|
||||
|
||||
stab_op_eol_and_expr -> stab_op_eol expr : {'$1', '$2'}.
|
||||
stab_op_eol_and_expr -> stab_op_eol : warn_empty_stab_clause('$1'), {'$1', nil}.
|
||||
|
||||
block_item -> block_eoe stab_eoe : {?exprs('$1'), build_stab(reverse('$2'))}.
|
||||
block_item -> block_eoe : {?exprs('$1'), nil}.
|
||||
|
||||
block_list -> block_item : ['$1'].
|
||||
block_list -> block_item block_list : ['$1'|'$2'].
|
||||
|
||||
%% Helpers
|
||||
|
||||
open_paren -> '(' : '$1'.
|
||||
open_paren -> '(' eol : '$1'.
|
||||
close_paren -> ')' : '$1'.
|
||||
close_paren -> eol ')' : '$2'.
|
||||
|
||||
empty_paren -> open_paren ')' : '$1'.
|
||||
|
||||
open_bracket -> '[' : '$1'.
|
||||
open_bracket -> '[' eol : '$1'.
|
||||
close_bracket -> ']' : '$1'.
|
||||
close_bracket -> eol ']' : '$2'.
|
||||
|
||||
open_bit -> '<<' : '$1'.
|
||||
open_bit -> '<<' eol : '$1'.
|
||||
close_bit -> '>>' : '$1'.
|
||||
close_bit -> eol '>>' : '$2'.
|
||||
|
||||
open_curly -> '{' : '$1'.
|
||||
open_curly -> '{' eol : '$1'.
|
||||
close_curly -> '}' : '$1'.
|
||||
close_curly -> eol '}' : '$2'.
|
||||
|
||||
% Operators
|
||||
|
||||
add_op_eol -> add_op : '$1'.
|
||||
add_op_eol -> add_op eol : '$1'.
|
||||
add_op_eol -> dual_op : '$1'.
|
||||
add_op_eol -> dual_op eol : '$1'.
|
||||
|
||||
mult_op_eol -> mult_op : '$1'.
|
||||
mult_op_eol -> mult_op eol : '$1'.
|
||||
|
||||
two_op_eol -> two_op : '$1'.
|
||||
two_op_eol -> two_op eol : '$1'.
|
||||
|
||||
three_op_eol -> three_op : '$1'.
|
||||
three_op_eol -> three_op eol : '$1'.
|
||||
|
||||
pipe_op_eol -> pipe_op : '$1'.
|
||||
pipe_op_eol -> pipe_op eol : '$1'.
|
||||
|
||||
capture_op_eol -> capture_op : '$1'.
|
||||
capture_op_eol -> capture_op eol : '$1'.
|
||||
|
||||
unary_op_eol -> unary_op : '$1'.
|
||||
unary_op_eol -> unary_op eol : '$1'.
|
||||
unary_op_eol -> dual_op : '$1'.
|
||||
unary_op_eol -> dual_op eol : '$1'.
|
||||
|
||||
match_op_eol -> match_op : '$1'.
|
||||
match_op_eol -> match_op eol : '$1'.
|
||||
|
||||
and_op_eol -> and_op : '$1'.
|
||||
and_op_eol -> and_op eol : '$1'.
|
||||
|
||||
or_op_eol -> or_op : '$1'.
|
||||
or_op_eol -> or_op eol : '$1'.
|
||||
|
||||
in_op_eol -> in_op : '$1'.
|
||||
in_op_eol -> in_op eol : '$1'.
|
||||
|
||||
in_match_op_eol -> in_match_op : '$1'.
|
||||
in_match_op_eol -> in_match_op eol : '$1'.
|
||||
|
||||
type_op_eol -> type_op : '$1'.
|
||||
type_op_eol -> type_op eol : '$1'.
|
||||
|
||||
when_op_eol -> when_op : '$1'.
|
||||
when_op_eol -> when_op eol : '$1'.
|
||||
|
||||
stab_op_eol -> stab_op : '$1'.
|
||||
stab_op_eol -> stab_op eol : '$1'.
|
||||
|
||||
at_op_eol -> at_op : '$1'.
|
||||
at_op_eol -> at_op eol : '$1'.
|
||||
|
||||
comp_op_eol -> comp_op : '$1'.
|
||||
comp_op_eol -> comp_op eol : '$1'.
|
||||
|
||||
rel_op_eol -> rel_op : '$1'.
|
||||
rel_op_eol -> rel_op eol : '$1'.
|
||||
|
||||
arrow_op_eol -> arrow_op : '$1'.
|
||||
arrow_op_eol -> arrow_op eol : '$1'.
|
||||
|
||||
% Dot operator
|
||||
|
||||
dot_op -> '.' : '$1'.
|
||||
dot_op -> '.' eol : '$1'.
|
||||
|
||||
dot_identifier -> identifier : '$1'.
|
||||
dot_identifier -> matched_expr dot_op identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
dot_alias -> aliases : {'__aliases__', meta_from_token('$1', 0), ?exprs('$1')}.
|
||||
dot_alias -> matched_expr dot_op aliases : build_dot_alias('$2', '$1', '$3').
|
||||
dot_alias -> matched_expr dot_op dot_alias_container : build_dot_container('$2', '$1', '$3').
|
||||
|
||||
dot_alias_container -> open_curly '}' : [].
|
||||
dot_alias_container -> open_curly container_args close_curly : '$2'.
|
||||
|
||||
dot_op_identifier -> op_identifier : '$1'.
|
||||
dot_op_identifier -> matched_expr dot_op op_identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
dot_do_identifier -> do_identifier : '$1'.
|
||||
dot_do_identifier -> matched_expr dot_op do_identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
dot_bracket_identifier -> bracket_identifier : '$1'.
|
||||
dot_bracket_identifier -> matched_expr dot_op bracket_identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
dot_paren_identifier -> paren_identifier : '$1'.
|
||||
dot_paren_identifier -> matched_expr dot_op paren_identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
parens_call -> dot_paren_identifier : '$1'.
|
||||
parens_call -> matched_expr dot_call_op : {'.', meta_from_token('$2'), ['$1']}. % Fun/local calls
|
||||
|
||||
% Function calls with no parentheses
|
||||
|
||||
call_args_no_parens_expr -> matched_expr : '$1'.
|
||||
call_args_no_parens_expr -> no_parens_expr : throw_no_parens_many_strict('$1').
|
||||
|
||||
call_args_no_parens_comma_expr -> matched_expr ',' call_args_no_parens_expr : ['$3', '$1'].
|
||||
call_args_no_parens_comma_expr -> call_args_no_parens_comma_expr ',' call_args_no_parens_expr : ['$3'|'$1'].
|
||||
|
||||
call_args_no_parens_all -> call_args_no_parens_one : '$1'.
|
||||
call_args_no_parens_all -> call_args_no_parens_ambig : '$1'.
|
||||
call_args_no_parens_all -> call_args_no_parens_many : '$1'.
|
||||
|
||||
call_args_no_parens_one -> call_args_no_parens_kw : ['$1'].
|
||||
call_args_no_parens_one -> matched_expr : ['$1'].
|
||||
|
||||
call_args_no_parens_ambig -> no_parens_expr : ['$1'].
|
||||
|
||||
call_args_no_parens_many -> matched_expr ',' call_args_no_parens_kw : ['$1', '$3'].
|
||||
call_args_no_parens_many -> call_args_no_parens_comma_expr : reverse('$1').
|
||||
call_args_no_parens_many -> call_args_no_parens_comma_expr ',' call_args_no_parens_kw : reverse(['$3'|'$1']).
|
||||
|
||||
call_args_no_parens_many_strict -> call_args_no_parens_many : '$1'.
|
||||
call_args_no_parens_many_strict -> open_paren call_args_no_parens_kw close_paren : throw_no_parens_strict('$1').
|
||||
call_args_no_parens_many_strict -> open_paren call_args_no_parens_many close_paren : throw_no_parens_strict('$1').
|
||||
|
||||
stab_parens_many -> open_paren call_args_no_parens_kw close_paren : ['$2'].
|
||||
stab_parens_many -> open_paren call_args_no_parens_many close_paren : '$2'.
|
||||
|
||||
% Containers
|
||||
|
||||
container_expr -> matched_expr : '$1'.
|
||||
container_expr -> unmatched_expr : '$1'.
|
||||
container_expr -> no_parens_expr : throw_no_parens_container_strict('$1').
|
||||
|
||||
container_args_base -> container_expr : ['$1'].
|
||||
container_args_base -> container_args_base ',' container_expr : ['$3'|'$1'].
|
||||
|
||||
container_args -> container_args_base : lists:reverse('$1').
|
||||
container_args -> container_args_base ',' : lists:reverse('$1').
|
||||
container_args -> container_args_base ',' kw : lists:reverse(['$3'|'$1']).
|
||||
|
||||
% Function calls with parentheses
|
||||
|
||||
call_args_parens_expr -> matched_expr : '$1'.
|
||||
call_args_parens_expr -> unmatched_expr : '$1'.
|
||||
call_args_parens_expr -> no_parens_expr : throw_no_parens_many_strict('$1').
|
||||
|
||||
call_args_parens_base -> call_args_parens_expr : ['$1'].
|
||||
call_args_parens_base -> call_args_parens_base ',' call_args_parens_expr : ['$3'|'$1'].
|
||||
|
||||
call_args_parens -> empty_paren : [].
|
||||
call_args_parens -> open_paren no_parens_expr close_paren : ['$2'].
|
||||
call_args_parens -> open_paren kw close_paren : ['$2'].
|
||||
call_args_parens -> open_paren call_args_parens_base close_paren : reverse('$2').
|
||||
call_args_parens -> open_paren call_args_parens_base ',' kw close_paren : reverse(['$4'|'$2']).
|
||||
|
||||
% KV
|
||||
|
||||
kw_eol -> kw_identifier : ?exprs('$1').
|
||||
kw_eol -> kw_identifier eol : ?exprs('$1').
|
||||
kw_eol -> kw_identifier_safe : build_quoted_atom('$1', true).
|
||||
kw_eol -> kw_identifier_safe eol : build_quoted_atom('$1', true).
|
||||
kw_eol -> kw_identifier_unsafe : build_quoted_atom('$1', false).
|
||||
kw_eol -> kw_identifier_unsafe eol : build_quoted_atom('$1', false).
|
||||
|
||||
kw_base -> kw_eol container_expr : [{'$1', '$2'}].
|
||||
kw_base -> kw_base ',' kw_eol container_expr : [{'$3', '$4'}|'$1'].
|
||||
|
||||
kw -> kw_base : reverse('$1').
|
||||
kw -> kw_base ',' : reverse('$1').
|
||||
|
||||
call_args_no_parens_kw_expr -> kw_eol matched_expr : {'$1', '$2'}.
|
||||
call_args_no_parens_kw_expr -> kw_eol no_parens_expr : {'$1', '$2'}.
|
||||
|
||||
call_args_no_parens_kw -> call_args_no_parens_kw_expr : ['$1'].
|
||||
call_args_no_parens_kw -> call_args_no_parens_kw_expr ',' call_args_no_parens_kw : ['$1'|'$3'].
|
||||
|
||||
% Lists
|
||||
|
||||
list_args -> kw : '$1'.
|
||||
list_args -> container_args_base : reverse('$1').
|
||||
list_args -> container_args_base ',' : reverse('$1').
|
||||
list_args -> container_args_base ',' kw : reverse('$1', '$3').
|
||||
|
||||
list -> open_bracket ']' : build_list('$1', []).
|
||||
list -> open_bracket list_args close_bracket : build_list('$1', '$2').
|
||||
|
||||
% Tuple
|
||||
|
||||
tuple -> open_curly '}' : build_tuple('$1', []).
|
||||
tuple -> open_curly container_args close_curly : build_tuple('$1', '$2').
|
||||
|
||||
% Bitstrings
|
||||
|
||||
bit_string -> open_bit '>>' : build_bit('$1', []).
|
||||
bit_string -> open_bit container_args close_bit : build_bit('$1', '$2').
|
||||
|
||||
% Map and structs
|
||||
|
||||
%% Allow unquote/@something/aliases inside maps and structs.
|
||||
map_expr -> max_expr : '$1'.
|
||||
map_expr -> dot_identifier : build_identifier('$1', nil).
|
||||
map_expr -> at_op_eol map_expr : build_unary_op('$1', '$2').
|
||||
|
||||
assoc_op_eol -> assoc_op : '$1'.
|
||||
assoc_op_eol -> assoc_op eol : '$1'.
|
||||
|
||||
assoc_expr -> matched_expr assoc_op_eol matched_expr : {'$1', '$3'}.
|
||||
assoc_expr -> unmatched_expr assoc_op_eol unmatched_expr : {'$1', '$3'}.
|
||||
assoc_expr -> matched_expr assoc_op_eol unmatched_expr : {'$1', '$3'}.
|
||||
assoc_expr -> unmatched_expr assoc_op_eol matched_expr : {'$1', '$3'}.
|
||||
assoc_expr -> map_expr : '$1'.
|
||||
|
||||
assoc_update -> matched_expr pipe_op_eol assoc_expr : {'$2', '$1', ['$3']}.
|
||||
assoc_update -> unmatched_expr pipe_op_eol assoc_expr : {'$2', '$1', ['$3']}.
|
||||
|
||||
assoc_update_kw -> matched_expr pipe_op_eol kw : {'$2', '$1', '$3'}.
|
||||
assoc_update_kw -> unmatched_expr pipe_op_eol kw : {'$2', '$1', '$3'}.
|
||||
|
||||
assoc_base -> assoc_expr : ['$1'].
|
||||
assoc_base -> assoc_base ',' assoc_expr : ['$3'|'$1'].
|
||||
|
||||
assoc -> assoc_base : reverse('$1').
|
||||
assoc -> assoc_base ',' : reverse('$1').
|
||||
|
||||
map_op -> '%{}' : '$1'.
|
||||
map_op -> '%{}' eol : '$1'.
|
||||
|
||||
map_close -> kw close_curly : '$1'.
|
||||
map_close -> assoc close_curly : '$1'.
|
||||
map_close -> assoc_base ',' kw close_curly : reverse('$1', '$3').
|
||||
|
||||
map_args -> open_curly '}' : build_map('$1', []).
|
||||
map_args -> open_curly map_close : build_map('$1', '$2').
|
||||
map_args -> open_curly assoc_update close_curly : build_map_update('$1', '$2', []).
|
||||
map_args -> open_curly assoc_update ',' close_curly : build_map_update('$1', '$2', []).
|
||||
map_args -> open_curly assoc_update ',' map_close : build_map_update('$1', '$2', '$4').
|
||||
map_args -> open_curly assoc_update_kw close_curly : build_map_update('$1', '$2', []).
|
||||
|
||||
struct_op -> '%' : '$1'.
|
||||
|
||||
map -> map_op map_args : '$2'.
|
||||
map -> struct_op map_expr map_args : {'%', meta_from_token('$1'), ['$2', '$3']}.
|
||||
map -> struct_op map_expr eol map_args : {'%', meta_from_token('$1'), ['$2', '$4']}.
|
||||
|
||||
Erlang code.
|
||||
|
||||
-define(file(), get(elixir_parser_file)).
|
||||
-define(id(Token), element(1, Token)).
|
||||
-define(location(Token), element(2, Token)).
|
||||
-define(exprs(Token), element(3, Token)).
|
||||
-define(meta(Node), element(2, Node)).
|
||||
-define(rearrange_uop(Op), (Op == 'not' orelse Op == '!')).
|
||||
|
||||
%% The following directive is needed for (significantly) faster
|
||||
%% compilation of the generated .erl file by the HiPE compiler
|
||||
-compile([{hipe, [{regalloc, linear_scan}]}]).
|
||||
-import(lists, [reverse/1, reverse/2]).
|
||||
|
||||
meta_from_token(Token, Counter) -> [{counter, Counter}|meta_from_token(Token)].
|
||||
meta_from_token(Token) -> meta_from_location(?location(Token)).
|
||||
|
||||
meta_from_location({Line, Column, EndColumn})
|
||||
when is_integer(Line), is_integer(Column), is_integer(EndColumn) -> [{line, Line}].
|
||||
|
||||
%% Operators
|
||||
|
||||
build_op({_Kind, Location, 'in'}, {UOp, _, [Left]}, Right) when ?rearrange_uop(UOp) ->
|
||||
{UOp, meta_from_location(Location), [{'in', meta_from_location(Location), [Left, Right]}]};
|
||||
|
||||
build_op({_Kind, Location, Op}, Left, Right) ->
|
||||
{Op, meta_from_location(Location), [Left, Right]}.
|
||||
|
||||
build_unary_op({_Kind, Location, Op}, Expr) ->
|
||||
{Op, meta_from_location(Location), [Expr]}.
|
||||
|
||||
build_list(Marker, Args) ->
|
||||
{Args, ?location(Marker)}.
|
||||
|
||||
build_tuple(_Marker, [Left, Right]) ->
|
||||
{Left, Right};
|
||||
build_tuple(Marker, Args) ->
|
||||
{'{}', meta_from_token(Marker), Args}.
|
||||
|
||||
build_bit(Marker, Args) ->
|
||||
{'<<>>', meta_from_token(Marker), Args}.
|
||||
|
||||
build_map(Marker, Args) ->
|
||||
{'%{}', meta_from_token(Marker), Args}.
|
||||
|
||||
build_map_update(Marker, {Pipe, Left, Right}, Extra) ->
|
||||
{'%{}', meta_from_token(Marker), [build_op(Pipe, Left, Right ++ Extra)]}.
|
||||
|
||||
%% Blocks
|
||||
|
||||
build_block([{Op, _, [_]}]=Exprs) when ?rearrange_uop(Op) -> {'__block__', [], Exprs};
|
||||
build_block([{unquote_splicing, _, Args}]=Exprs) when
|
||||
length(Args) =< 2 -> {'__block__', [], Exprs};
|
||||
build_block([Expr]) -> Expr;
|
||||
build_block(Exprs) -> {'__block__', [], Exprs}.
|
||||
|
||||
%% Dots
|
||||
|
||||
build_dot_alias(Dot, {'__aliases__', _, Left}, {'aliases', _, Right}) ->
|
||||
{'__aliases__', meta_from_token(Dot), Left ++ Right};
|
||||
|
||||
build_dot_alias(_Dot, Atom, {'aliases', _, _} = Token) when is_atom(Atom) ->
|
||||
throw_bad_atom(Token);
|
||||
|
||||
build_dot_alias(Dot, Other, {'aliases', _, Right}) ->
|
||||
{'__aliases__', meta_from_token(Dot), [Other|Right]}.
|
||||
|
||||
build_dot_container(Dot, Left, Right) ->
|
||||
Meta = meta_from_token(Dot),
|
||||
{{'.', Meta, [Left, '{}']}, Meta, Right}.
|
||||
|
||||
build_dot(Dot, Left, Right) ->
|
||||
{'.', meta_from_token(Dot), [Left, extract_identifier(Right)]}.
|
||||
|
||||
extract_identifier({Kind, _, Identifier}) when
|
||||
Kind == identifier; Kind == bracket_identifier; Kind == paren_identifier;
|
||||
Kind == do_identifier; Kind == op_identifier ->
|
||||
Identifier.
|
||||
|
||||
%% Identifiers
|
||||
|
||||
build_nested_parens(Dot, Args1, Args2) ->
|
||||
Identifier = build_identifier(Dot, Args1),
|
||||
Meta = ?meta(Identifier),
|
||||
{Identifier, Meta, Args2}.
|
||||
|
||||
build_identifier({'.', Meta, _} = Dot, Args) ->
|
||||
FArgs = case Args of
|
||||
nil -> [];
|
||||
_ -> Args
|
||||
end,
|
||||
{Dot, Meta, FArgs};
|
||||
|
||||
build_identifier({op_identifier, Location, Identifier}, [Arg]) ->
|
||||
{Identifier, [{ambiguous_op, nil}|meta_from_location(Location)], [Arg]};
|
||||
|
||||
build_identifier({_, Location, Identifier}, Args) ->
|
||||
{Identifier, meta_from_location(Location), Args}.
|
||||
|
||||
%% Fn
|
||||
|
||||
build_fn(Op, [{'->', _, [_, _]}|_] = Stab) ->
|
||||
{fn, meta_from_token(Op), build_stab(Stab)};
|
||||
build_fn(Op, _Stab) ->
|
||||
throw(meta_from_token(Op), "expected clauses to be defined with -> inside: ", "'fn'").
|
||||
|
||||
%% Access
|
||||
|
||||
build_access(Expr, {List, Location}) ->
|
||||
Meta = meta_from_location(Location),
|
||||
{{'.', Meta, ['Elixir.Access', get]}, Meta, [Expr, List]}.
|
||||
|
||||
%% Interpolation aware
|
||||
|
||||
build_sigil({sigil, Location, Sigil, Parts, Modifiers}) ->
|
||||
Meta = meta_from_location(Location),
|
||||
{list_to_atom("sigil_" ++ [Sigil]), Meta, [{'<<>>', Meta, string_parts(Parts)}, Modifiers]}.
|
||||
|
||||
build_bin_string({bin_string, _Location, [H]}) when is_binary(H) ->
|
||||
H;
|
||||
build_bin_string({bin_string, Location, Args}) ->
|
||||
{'<<>>', meta_from_location(Location), string_parts(Args)}.
|
||||
|
||||
build_list_string({list_string, _Location, [H]}) when is_binary(H) ->
|
||||
elixir_utils:characters_to_list(H);
|
||||
build_list_string({list_string, Location, Args}) ->
|
||||
Meta = meta_from_location(Location),
|
||||
{{'.', Meta, ['Elixir.String', to_char_list]}, Meta, [{'<<>>', Meta, string_parts(Args)}]}.
|
||||
|
||||
build_quoted_atom({_, _Location, [H]}, Safe) when is_binary(H) ->
|
||||
Op = binary_to_atom_op(Safe), erlang:Op(H, utf8);
|
||||
build_quoted_atom({_, Location, Args}, Safe) ->
|
||||
Meta = meta_from_location(Location),
|
||||
{{'.', Meta, [erlang, binary_to_atom_op(Safe)]}, Meta, [{'<<>>', Meta, string_parts(Args)}, utf8]}.
|
||||
|
||||
binary_to_atom_op(true) -> binary_to_existing_atom;
|
||||
binary_to_atom_op(false) -> binary_to_atom.
|
||||
|
||||
string_parts(Parts) ->
|
||||
[string_part(Part) || Part <- Parts].
|
||||
string_part(Binary) when is_binary(Binary) ->
|
||||
Binary;
|
||||
string_part({Location, Tokens}) ->
|
||||
Form = string_tokens_parse(Tokens),
|
||||
Meta = meta_from_location(Location),
|
||||
{'::', Meta, [{{'.', Meta, ['Elixir.Kernel', to_string]}, Meta, [Form]}, {binary, Meta, nil}]}.
|
||||
|
||||
string_tokens_parse(Tokens) ->
|
||||
case parse(Tokens) of
|
||||
{ok, Forms} -> Forms;
|
||||
{error, _} = Error -> throw(Error)
|
||||
end.
|
||||
|
||||
%% Keywords
|
||||
|
||||
build_stab([{'->', Meta, [Left, Right]}|T]) ->
|
||||
build_stab(Meta, T, Left, [Right], []);
|
||||
|
||||
build_stab(Else) ->
|
||||
build_block(Else).
|
||||
|
||||
build_stab(Old, [{'->', New, [Left, Right]}|T], Marker, Temp, Acc) ->
|
||||
H = {'->', Old, [Marker, build_block(reverse(Temp))]},
|
||||
build_stab(New, T, Left, [Right], [H|Acc]);
|
||||
|
||||
build_stab(Meta, [H|T], Marker, Temp, Acc) ->
|
||||
build_stab(Meta, T, Marker, [H|Temp], Acc);
|
||||
|
||||
build_stab(Meta, [], Marker, Temp, Acc) ->
|
||||
H = {'->', Meta, [Marker, build_block(reverse(Temp))]},
|
||||
reverse([H|Acc]).
|
||||
|
||||
%% Every time the parser sees a (unquote_splicing())
|
||||
%% it assumes that a block is being spliced, wrapping
|
||||
%% the splicing in a __block__. But in the stab clause,
|
||||
%% we can have (unquote_splicing(1, 2, 3)) -> :ok, in such
|
||||
%% case, we don't actually want the block, since it is
|
||||
%% an arg style call. unwrap_splice unwraps the splice
|
||||
%% from such blocks.
|
||||
unwrap_splice([{'__block__', [], [{unquote_splicing, _, _}] = Splice}]) ->
|
||||
Splice;
|
||||
|
||||
unwrap_splice(Other) -> Other.
|
||||
|
||||
unwrap_when(Args) ->
|
||||
case elixir_utils:split_last(Args) of
|
||||
{Start, {'when', Meta, [_, _] = End}} ->
|
||||
[{'when', Meta, Start ++ End}];
|
||||
{_, _} ->
|
||||
Args
|
||||
end.
|
||||
|
||||
to_block([One]) -> One;
|
||||
to_block(Other) -> {'__block__', [], reverse(Other)}.
|
||||
|
||||
%% Warnings and errors
|
||||
|
||||
throw(Meta, Error, Token) ->
|
||||
Line =
|
||||
case lists:keyfind(line, 1, Meta) of
|
||||
{line, L} -> L;
|
||||
false -> 0
|
||||
end,
|
||||
throw({error, {Line, ?MODULE, [Error, Token]}}).
|
||||
|
||||
throw_bad_atom(Token) ->
|
||||
throw(meta_from_token(Token), "atom cannot be followed by an alias. If the '.' was meant to be "
|
||||
"part of the atom's name, the atom name must be quoted. Syntax error before: ", "'.'").
|
||||
|
||||
throw_no_parens_strict(Token) ->
|
||||
throw(meta_from_token(Token), "unexpected parentheses. If you are making a "
|
||||
"function call, do not insert spaces between the function name and the "
|
||||
"opening parentheses. Syntax error before: ", "'('").
|
||||
|
||||
throw_no_parens_many_strict(Node) ->
|
||||
throw(?meta(Node),
|
||||
"unexpected comma. Parentheses are required to solve ambiguity in nested calls.\n\n"
|
||||
"This error happens when you have nested function calls without parentheses. "
|
||||
"For example:\n\n"
|
||||
" one a, two b, c, d\n\n"
|
||||
"In the example above, we don't know if the parameters \"c\" and \"d\" apply "
|
||||
"to the function \"one\" or \"two\". You can solve this by explicitly adding "
|
||||
"parentheses:\n\n"
|
||||
" one a, two(b, c, d)\n\n"
|
||||
"Elixir cannot compile otherwise. Syntax error before: ", "','").
|
||||
|
||||
throw_no_parens_container_strict(Node) ->
|
||||
throw(?meta(Node),
|
||||
"unexpected comma. Parentheses are required to solve ambiguity inside containers.\n\n"
|
||||
"This error may happen when you forget a comma in a list or other container:\n\n"
|
||||
" [a, b c, d]\n\n"
|
||||
"Or when you have ambiguous calls:\n\n"
|
||||
" [one, two three, four, five]\n\n"
|
||||
"In the example above, we don't know if the parameters \"four\" and \"five\" "
|
||||
"belongs to the list or the function \"two\". You can solve this by explicitly "
|
||||
"adding parentheses:\n\n"
|
||||
" [one, two(three, four), five]\n\n"
|
||||
"Elixir cannot compile otherwise. Syntax error before: ", "','").
|
||||
|
||||
throw_invalid_kw_identifier({_, _, do} = Token) ->
|
||||
throw(meta_from_token(Token), elixir_tokenizer:invalid_do_error("unexpected keyword \"do:\""), "'do:'");
|
||||
throw_invalid_kw_identifier({_, _, KW} = Token) ->
|
||||
throw(meta_from_token(Token), "syntax error before: ", "'" ++ atom_to_list(KW) ++ "':").
|
||||
|
||||
%% TODO: Make those warnings errors.
|
||||
warn_empty_stab_clause({stab_op, {Line, _Begin, _End}, '->'}) ->
|
||||
elixir_errors:warn(Line, ?file(),
|
||||
"an expression is always required on the right side of ->. "
|
||||
"Please provide a value after ->").
|
||||
|
||||
warn_pipe({arrow_op, {Line, _Begin, _End}, Op}, {_, [_|_], [_|_]}) ->
|
||||
elixir_errors:warn(Line, ?file(),
|
||||
io_lib:format(
|
||||
"you are piping into a function call without parentheses, which may be ambiguous. "
|
||||
"Please wrap the function you are piping into in parentheses. For example:\n\n"
|
||||
" foo 1 ~ts bar 2 ~ts baz 3\n\n"
|
||||
"Should be written as:\n\n"
|
||||
" foo(1) ~ts bar(2) ~ts baz(3)\n",
|
||||
[Op, Op, Op, Op]
|
||||
)
|
||||
);
|
||||
warn_pipe(_Token, _) ->
|
||||
ok.
|
||||
256
samples/Erlang/lfe_scan.xrl
Normal file
256
samples/Erlang/lfe_scan.xrl
Normal file
@@ -0,0 +1,256 @@
|
||||
%% Copyright (c) 2008-2013 Robert Virding
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
|
||||
%% File : lfe_scan.xrl
|
||||
%% Author : Robert Virding
|
||||
%% Purpose : Token definitions for Lisp Flavoured Erlang.
|
||||
|
||||
Definitions.
|
||||
B = [01]
|
||||
O = [0-7]
|
||||
D = [0-9]
|
||||
H = [0-9a-fA-F]
|
||||
B36 = [0-9a-zA-Z]
|
||||
U = [A-Z]
|
||||
L = [a-z]
|
||||
A = ({U}|{L})
|
||||
DEL = [][()}{";\000-\s]
|
||||
SYM = [^][()}{";\000-\s\177-\237]
|
||||
SSYM = [^][()}{"|;#`',\000-\s\177-\237]
|
||||
WS = ([\000-\s]|;[^\n]*)
|
||||
|
||||
Rules.
|
||||
%% Bracketed Comments using #| foo |#
|
||||
#{D}*\|[^\|]*\|+([^#\|][^\|]*\|+)*# :
|
||||
block_comment(string:substr(TokenChars, 3)).
|
||||
|
||||
%% Separators
|
||||
' : {token,{'\'',TokenLine}}.
|
||||
` : {token,{'`',TokenLine}}.
|
||||
, : {token,{',',TokenLine}}.
|
||||
,@ : {token,{',@',TokenLine}}.
|
||||
\. : {token,{'.',TokenLine}}.
|
||||
[][()}{] : {token,{list_to_atom(TokenChars),TokenLine}}.
|
||||
|
||||
#{D}*[bB]\( : {token,{'#B(',TokenLine}}.
|
||||
#{D}*[mM]\( : {token,{'#M(',TokenLine}}.
|
||||
#{D}*\( : {token,{'#(',TokenLine}}.
|
||||
#{D}*\. : {token,{'#.',TokenLine}}.
|
||||
|
||||
#{D}*` : {token,{'#`',TokenLine}}.
|
||||
#{D}*; : {token,{'#;',TokenLine}}.
|
||||
#{D}*, : {token,{'#,',TokenLine}}.
|
||||
#{D}*,@ : {token,{'#,@',TokenLine}}.
|
||||
|
||||
%% Characters
|
||||
#{D}*\\(x{H}+|.) : char_token(skip_past(TokenChars, $\\, $\\), TokenLine).
|
||||
|
||||
%% Based numbers
|
||||
#{D}*\*{SYM}+ : base_token(skip_past(TokenChars, $*, $*), 2, TokenLine).
|
||||
#{D}*[bB]{SYM}+ : base_token(skip_past(TokenChars, $b, $B), 2, TokenLine).
|
||||
#{D}*[oO]{SYM}+ : base_token(skip_past(TokenChars, $o, $O), 8, TokenLine).
|
||||
#{D}*[dD]{SYM}+ : base_token(skip_past(TokenChars, $d, $D), 10, TokenLine).
|
||||
#{D}*[xX]{SYM}+ : base_token(skip_past(TokenChars, $x, $X), 16, TokenLine).
|
||||
#{D}*[rR]{SYM}+ :
|
||||
%% Scan over digit chars to get base.
|
||||
{Base,[_|Ds]} = base1(tl(TokenChars), 10, 0),
|
||||
base_token(Ds, Base, TokenLine).
|
||||
|
||||
%% String
|
||||
"(\\x{H}+;|\\.|[^"\\])*" :
|
||||
%% Strip quotes.
|
||||
S = string:substr(TokenChars, 2, TokenLen - 2),
|
||||
{token,{string,TokenLine,chars(S)}}.
|
||||
%% Binary string
|
||||
#"(\\x{H}+;|\\.|[^"\\])*" :
|
||||
%% Strip quotes.
|
||||
S = string:substr(TokenChars, 3, TokenLen - 3),
|
||||
Bin = unicode:characters_to_binary(chars(S), utf8, utf8),
|
||||
{token,{binary,TokenLine,Bin}}.
|
||||
%% Symbols
|
||||
\|(\\x{H}+;|\\.|[^|\\])*\| :
|
||||
%% Strip quotes.
|
||||
S = string:substr(TokenChars, 2, TokenLen - 2),
|
||||
symbol_token(chars(S), TokenLine).
|
||||
%% Funs
|
||||
#'{SSYM}{SYM}*/{D}+ :
|
||||
%% Strip sharpsign single-quote.
|
||||
FunStr = string:substr(TokenChars,3),
|
||||
{token,{'#\'',TokenLine,FunStr}}.
|
||||
%% Atoms
|
||||
[+-]?{D}+ :
|
||||
case catch {ok,list_to_integer(TokenChars)} of
|
||||
{ok,I} -> {token,{number,TokenLine,I}};
|
||||
_ -> {error,"illegal integer"}
|
||||
end.
|
||||
[+-]?{D}+\.{D}+([eE][+-]?{D}+)? :
|
||||
case catch {ok,list_to_float(TokenChars)} of
|
||||
{ok,F} -> {token,{number,TokenLine,F}};
|
||||
_ -> {error,"illegal float"}
|
||||
end.
|
||||
{SSYM}{SYM}* :
|
||||
symbol_token(TokenChars, TokenLine).
|
||||
{WS}+ : skip_token.
|
||||
|
||||
Erlang code.
|
||||
%% Copyright (c) 2008-2013 Robert Virding
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
|
||||
%% File : lfe_scan.erl
|
||||
%% Author : Robert Virding
|
||||
%% Purpose : Token definitions for Lisp Flavoured Erlang.
|
||||
|
||||
-export([start_symbol_char/1,symbol_char/1]).
|
||||
|
||||
-import(string, [substr/2,substr/3]).
|
||||
|
||||
%% start_symbol_char(Char) -> true | false.
|
||||
%% symbol_char(Char) -> true | false.
|
||||
%% Define start symbol chars and symbol chars.
|
||||
|
||||
start_symbol_char($#) -> false;
|
||||
start_symbol_char($`) -> false;
|
||||
start_symbol_char($') -> false; %'
|
||||
start_symbol_char($,) -> false;
|
||||
start_symbol_char($|) -> false; %Symbol quote character
|
||||
start_symbol_char(C) -> symbol_char(C).
|
||||
|
||||
symbol_char($() -> false;
|
||||
symbol_char($)) -> false;
|
||||
symbol_char($[) -> false;
|
||||
symbol_char($]) -> false;
|
||||
symbol_char(${) -> false;
|
||||
symbol_char($}) -> false;
|
||||
symbol_char($") -> false;
|
||||
symbol_char($;) -> false;
|
||||
symbol_char(C) -> ((C > $\s) and (C =< $~)) orelse (C > $\240).
|
||||
|
||||
%% symbol_token(Chars, Line) -> {token,{symbol,Line,Symbol}} | {error,E}.
|
||||
%% Build a symbol from list of legal characters, else error.
|
||||
|
||||
symbol_token(Cs, L) ->
|
||||
case catch {ok,list_to_atom(Cs)} of
|
||||
{ok,S} -> {token,{symbol,L,S}};
|
||||
_ -> {error,"illegal symbol"}
|
||||
end.
|
||||
|
||||
%% base_token(Chars, Base, Line) -> Integer.
|
||||
%% Convert a string of Base characters into a number. We only allow
|
||||
%% base betqeen 2 and 36, and an optional sign character first.
|
||||
|
||||
base_token(_, B, _) when B < 2; B > 36 ->
|
||||
{error,"illegal number base"};
|
||||
base_token([$+|Cs], B, L) -> base_token(Cs, B, +1, L);
|
||||
base_token([$-|Cs], B, L) -> base_token(Cs, B, -1, L);
|
||||
base_token(Cs, B, L) -> base_token(Cs, B, +1, L).
|
||||
|
||||
base_token(Cs, B, S, L) ->
|
||||
case base1(Cs, B, 0) of
|
||||
{N,[]} -> {token,{number,L,S*N}};
|
||||
{_,_} -> {error,"illegal based number"}
|
||||
end.
|
||||
|
||||
base1([C|Cs], Base, SoFar) when C >= $0, C =< $9, C < Base + $0 ->
|
||||
Next = SoFar * Base + (C - $0),
|
||||
base1(Cs, Base, Next);
|
||||
base1([C|Cs], Base, SoFar) when C >= $a, C =< $z, C < Base + $a - 10 ->
|
||||
Next = SoFar * Base + (C - $a + 10),
|
||||
base1(Cs, Base, Next);
|
||||
base1([C|Cs], Base, SoFar) when C >= $A, C =< $Z, C < Base + $A - 10 ->
|
||||
Next = SoFar * Base + (C - $A + 10),
|
||||
base1(Cs, Base, Next);
|
||||
base1([C|Cs], _Base, SoFar) -> {SoFar,[C|Cs]};
|
||||
base1([], _Base, N) -> {N,[]}.
|
||||
|
||||
-define(IS_UNICODE(C), ((C >= 0) and (C =< 16#10FFFF))).
|
||||
|
||||
%% char_token(InputChars, Line) -> {token,{number,L,N}} | {error,E}.
|
||||
%% Convert an input string into the corresponding character. For a
|
||||
%% sequence of hex characters we check resultant is code is in the
|
||||
%% unicode range.
|
||||
|
||||
char_token([$x,C|Cs], L) ->
|
||||
case base1([C|Cs], 16, 0) of
|
||||
{N,[]} when ?IS_UNICODE(N) -> {token,{number,L,N}};
|
||||
_ -> {error,"illegal character"}
|
||||
end;
|
||||
char_token([C], L) -> {token,{number,L,C}}.
|
||||
|
||||
%% chars(InputChars) -> Chars.
|
||||
%% Convert an input string into the corresponding string characters.
|
||||
%% We know that the input string is correct.
|
||||
|
||||
chars([$\\,$x,C|Cs0]) ->
|
||||
case hex_char(C) of
|
||||
true ->
|
||||
case base1([C|Cs0], 16, 0) of
|
||||
{N,[$;|Cs1]} -> [N|chars(Cs1)];
|
||||
_Other -> [escape_char($x)|chars([C|Cs0])]
|
||||
end;
|
||||
false -> [escape_char($x)|chars([C|Cs0])]
|
||||
end;
|
||||
chars([$\\,C|Cs]) -> [escape_char(C)|chars(Cs)];
|
||||
chars([C|Cs]) -> [C|chars(Cs)];
|
||||
chars([]) -> [].
|
||||
|
||||
hex_char(C) when C >= $0, C =< $9 -> true;
|
||||
hex_char(C) when C >= $a, C =< $f -> true;
|
||||
hex_char(C) when C >= $A, C =< $F -> true;
|
||||
hex_char(_) -> false.
|
||||
|
||||
escape_char($b) -> $\b; %\b = BS
|
||||
escape_char($t) -> $\t; %\t = TAB
|
||||
escape_char($n) -> $\n; %\n = LF
|
||||
escape_char($v) -> $\v; %\v = VT
|
||||
escape_char($f) -> $\f; %\f = FF
|
||||
escape_char($r) -> $\r; %\r = CR
|
||||
escape_char($e) -> $\e; %\e = ESC
|
||||
escape_char($s) -> $\s; %\s = SPC
|
||||
escape_char($d) -> $\d; %\d = DEL
|
||||
escape_char(C) -> C.
|
||||
|
||||
%% Block Comment:
|
||||
%% Provide a sensible error when people attempt to include nested
|
||||
%% comments because currently the parser cannot process them without
|
||||
%% a rebuild. But simply exploding on a '#|' is not going to be that
|
||||
%% helpful.
|
||||
|
||||
block_comment(TokenChars) ->
|
||||
%% Check we're not opening another comment block.
|
||||
case string:str(TokenChars, "#|") of
|
||||
0 -> skip_token; %% No nesting found
|
||||
_ -> {error, "illegal nested block comment"}
|
||||
end.
|
||||
|
||||
%% skip_until(String, Char1, Char2) -> String.
|
||||
%% skip_past(String, Char1, Char2) -> String.
|
||||
|
||||
%% skip_until([C|_]=Cs, C1, C2) when C =:= C1 ; C =:= C2 -> Cs;
|
||||
%% skip_until([_|Cs], C1, C2) -> skip_until(Cs, C1, C2);
|
||||
%% skip_until([], _, _) -> [].
|
||||
|
||||
skip_past([C|Cs], C1, C2) when C =:= C1 ; C =:= C2 -> Cs;
|
||||
skip_past([_|Cs], C1, C2) -> skip_past(Cs, C1, C2);
|
||||
skip_past([], _, _) -> [].
|
||||
625
samples/Java/GrammarKit.java
Normal file
625
samples/Java/GrammarKit.java
Normal file
@@ -0,0 +1,625 @@
|
||||
// This is a generated file. Not intended for manual editing.
|
||||
package org.intellij.grammar.parser;
|
||||
|
||||
import com.intellij.lang.PsiBuilder;
|
||||
import com.intellij.lang.PsiBuilder.Marker;
|
||||
import static org.intellij.grammar.psi.BnfTypes.*;
|
||||
import static org.intellij.grammar.parser.GeneratedParserUtilBase.*;
|
||||
import com.intellij.psi.tree.IElementType;
|
||||
import com.intellij.lang.ASTNode;
|
||||
import com.intellij.psi.tree.TokenSet;
|
||||
import com.intellij.lang.PsiParser;
|
||||
import com.intellij.lang.LightPsiParser;
|
||||
|
||||
@SuppressWarnings({"SimplifiableIfStatement", "UnusedAssignment"})
|
||||
public class GrammarParser implements PsiParser, LightPsiParser {
|
||||
|
||||
public ASTNode parse(IElementType t, PsiBuilder b) {
|
||||
parseLight(t, b);
|
||||
return b.getTreeBuilt();
|
||||
}
|
||||
|
||||
public void parseLight(IElementType t, PsiBuilder b) {
|
||||
boolean r;
|
||||
b = adapt_builder_(t, b, this, EXTENDS_SETS_);
|
||||
Marker m = enter_section_(b, 0, _COLLAPSE_, null);
|
||||
if (t == BNF_ATTR) {
|
||||
r = attr(b, 0);
|
||||
}
|
||||
else if (t == BNF_ATTR_PATTERN) {
|
||||
r = attr_pattern(b, 0);
|
||||
}
|
||||
else if (t == BNF_ATTR_VALUE) {
|
||||
r = attr_value(b, 0);
|
||||
}
|
||||
else if (t == BNF_ATTRS) {
|
||||
r = attrs(b, 0);
|
||||
}
|
||||
else if (t == BNF_CHOICE) {
|
||||
r = choice(b, 0);
|
||||
}
|
||||
else if (t == BNF_EXPRESSION) {
|
||||
r = expression(b, 0);
|
||||
}
|
||||
else if (t == BNF_LITERAL_EXPRESSION) {
|
||||
r = literal_expression(b, 0);
|
||||
}
|
||||
else if (t == BNF_MODIFIER) {
|
||||
r = modifier(b, 0);
|
||||
}
|
||||
else if (t == BNF_PAREN_EXPRESSION) {
|
||||
r = paren_expression(b, 0);
|
||||
}
|
||||
else if (t == BNF_PREDICATE) {
|
||||
r = predicate(b, 0);
|
||||
}
|
||||
else if (t == BNF_PREDICATE_SIGN) {
|
||||
r = predicate_sign(b, 0);
|
||||
}
|
||||
else if (t == BNF_QUANTIFIED) {
|
||||
r = quantified(b, 0);
|
||||
}
|
||||
else if (t == BNF_QUANTIFIER) {
|
||||
r = quantifier(b, 0);
|
||||
}
|
||||
else if (t == BNF_REFERENCE_OR_TOKEN) {
|
||||
r = reference_or_token(b, 0);
|
||||
}
|
||||
else if (t == BNF_RULE) {
|
||||
r = rule(b, 0);
|
||||
}
|
||||
else if (t == BNF_SEQUENCE) {
|
||||
r = sequence(b, 0);
|
||||
}
|
||||
else if (t == BNF_STRING_LITERAL_EXPRESSION) {
|
||||
r = string_literal_expression(b, 0);
|
||||
}
|
||||
else {
|
||||
r = parse_root_(t, b, 0);
|
||||
}
|
||||
exit_section_(b, 0, m, t, r, true, TRUE_CONDITION);
|
||||
}
|
||||
|
||||
protected boolean parse_root_(IElementType t, PsiBuilder b, int l) {
|
||||
return grammar(b, l + 1);
|
||||
}
|
||||
|
||||
public static final TokenSet[] EXTENDS_SETS_ = new TokenSet[] {
|
||||
create_token_set_(BNF_LITERAL_EXPRESSION, BNF_STRING_LITERAL_EXPRESSION),
|
||||
create_token_set_(BNF_CHOICE, BNF_EXPRESSION, BNF_LITERAL_EXPRESSION, BNF_PAREN_EXPRESSION,
|
||||
BNF_PREDICATE, BNF_QUANTIFIED, BNF_REFERENCE_OR_TOKEN, BNF_SEQUENCE,
|
||||
BNF_STRING_LITERAL_EXPRESSION),
|
||||
};
|
||||
|
||||
/* ********************************************************** */
|
||||
// id attr_pattern? '=' attr_value ';'?
|
||||
public static boolean attr(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr")) return false;
|
||||
boolean r, p;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<attr>");
|
||||
r = consumeToken(b, BNF_ID);
|
||||
p = r; // pin = 1
|
||||
r = r && report_error_(b, attr_1(b, l + 1));
|
||||
r = p && report_error_(b, consumeToken(b, BNF_OP_EQ)) && r;
|
||||
r = p && report_error_(b, attr_value(b, l + 1)) && r;
|
||||
r = p && attr_4(b, l + 1) && r;
|
||||
exit_section_(b, l, m, BNF_ATTR, r, p, attr_recover_until_parser_);
|
||||
return r || p;
|
||||
}
|
||||
|
||||
// attr_pattern?
|
||||
private static boolean attr_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_1")) return false;
|
||||
attr_pattern(b, l + 1);
|
||||
return true;
|
||||
}
|
||||
|
||||
// ';'?
|
||||
private static boolean attr_4(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_4")) return false;
|
||||
consumeToken(b, BNF_SEMICOLON);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '(' string ')'
|
||||
public static boolean attr_pattern(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_pattern")) return false;
|
||||
if (!nextTokenIs(b, BNF_LEFT_PAREN)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_LEFT_PAREN);
|
||||
r = r && consumeToken(b, BNF_STRING);
|
||||
r = r && consumeToken(b, BNF_RIGHT_PAREN);
|
||||
exit_section_(b, m, BNF_ATTR_PATTERN, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// !'}'
|
||||
static boolean attr_recover_until(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_recover_until")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NOT_, null);
|
||||
r = !consumeToken(b, BNF_RIGHT_BRACE);
|
||||
exit_section_(b, l, m, null, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// (reference_or_token | literal_expression) !'='
|
||||
public static boolean attr_value(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_value")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<attr value>");
|
||||
r = attr_value_0(b, l + 1);
|
||||
r = r && attr_value_1(b, l + 1);
|
||||
exit_section_(b, l, m, BNF_ATTR_VALUE, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
// reference_or_token | literal_expression
|
||||
private static boolean attr_value_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_value_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = reference_or_token(b, l + 1);
|
||||
if (!r) r = literal_expression(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// !'='
|
||||
private static boolean attr_value_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_value_1")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NOT_, null);
|
||||
r = !consumeToken(b, BNF_OP_EQ);
|
||||
exit_section_(b, l, m, null, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '{' attr* '}'
|
||||
public static boolean attrs(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attrs")) return false;
|
||||
if (!nextTokenIs(b, BNF_LEFT_BRACE)) return false;
|
||||
boolean r, p;
|
||||
Marker m = enter_section_(b, l, _NONE_, null);
|
||||
r = consumeToken(b, BNF_LEFT_BRACE);
|
||||
p = r; // pin = 1
|
||||
r = r && report_error_(b, attrs_1(b, l + 1));
|
||||
r = p && consumeToken(b, BNF_RIGHT_BRACE) && r;
|
||||
exit_section_(b, l, m, BNF_ATTRS, r, p, null);
|
||||
return r || p;
|
||||
}
|
||||
|
||||
// attr*
|
||||
private static boolean attrs_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attrs_1")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!attr(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "attrs_1", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '{' sequence ('|' sequence)* '}' | sequence choice_tail*
|
||||
public static boolean choice(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _COLLAPSE_, "<choice>");
|
||||
r = choice_0(b, l + 1);
|
||||
if (!r) r = choice_1(b, l + 1);
|
||||
exit_section_(b, l, m, BNF_CHOICE, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
// '{' sequence ('|' sequence)* '}'
|
||||
private static boolean choice_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_LEFT_BRACE);
|
||||
r = r && sequence(b, l + 1);
|
||||
r = r && choice_0_2(b, l + 1);
|
||||
r = r && consumeToken(b, BNF_RIGHT_BRACE);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// ('|' sequence)*
|
||||
private static boolean choice_0_2(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_0_2")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!choice_0_2_0(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "choice_0_2", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// '|' sequence
|
||||
private static boolean choice_0_2_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_0_2_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_OP_OR);
|
||||
r = r && sequence(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// sequence choice_tail*
|
||||
private static boolean choice_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_1")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = sequence(b, l + 1);
|
||||
r = r && choice_1_1(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// choice_tail*
|
||||
private static boolean choice_1_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_1_1")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!choice_tail(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "choice_1_1", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '|' sequence
|
||||
static boolean choice_tail(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_tail")) return false;
|
||||
if (!nextTokenIs(b, BNF_OP_OR)) return false;
|
||||
boolean r, p;
|
||||
Marker m = enter_section_(b, l, _NONE_, null);
|
||||
r = consumeToken(b, BNF_OP_OR);
|
||||
p = r; // pin = 1
|
||||
r = r && sequence(b, l + 1);
|
||||
exit_section_(b, l, m, null, r, p, null);
|
||||
return r || p;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// choice?
|
||||
public static boolean expression(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "expression")) return false;
|
||||
Marker m = enter_section_(b, l, _COLLAPSE_, "<expression>");
|
||||
choice(b, l + 1);
|
||||
exit_section_(b, l, m, BNF_EXPRESSION, true, false, null);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// (attrs | rule) *
|
||||
static boolean grammar(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "grammar")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!grammar_0(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "grammar", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// attrs | rule
|
||||
private static boolean grammar_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "grammar_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = attrs(b, l + 1);
|
||||
if (!r) r = rule(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// string_literal_expression | number
|
||||
public static boolean literal_expression(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "literal_expression")) return false;
|
||||
if (!nextTokenIs(b, "<literal expression>", BNF_NUMBER, BNF_STRING)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _COLLAPSE_, "<literal expression>");
|
||||
r = string_literal_expression(b, l + 1);
|
||||
if (!r) r = consumeToken(b, BNF_NUMBER);
|
||||
exit_section_(b, l, m, BNF_LITERAL_EXPRESSION, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// 'private' | 'external' | 'wrapped'
|
||||
public static boolean modifier(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "modifier")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<modifier>");
|
||||
r = consumeToken(b, "private");
|
||||
if (!r) r = consumeToken(b, "external");
|
||||
if (!r) r = consumeToken(b, "wrapped");
|
||||
exit_section_(b, l, m, BNF_MODIFIER, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// quantified | predicate
|
||||
static boolean option(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "option")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = quantified(b, l + 1);
|
||||
if (!r) r = predicate(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '(' expression ')'
|
||||
public static boolean paren_expression(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "paren_expression")) return false;
|
||||
if (!nextTokenIs(b, BNF_LEFT_PAREN)) return false;
|
||||
boolean r, p;
|
||||
Marker m = enter_section_(b, l, _NONE_, null);
|
||||
r = consumeToken(b, BNF_LEFT_PAREN);
|
||||
p = r; // pin = 1
|
||||
r = r && report_error_(b, expression(b, l + 1));
|
||||
r = p && consumeToken(b, BNF_RIGHT_PAREN) && r;
|
||||
exit_section_(b, l, m, BNF_PAREN_EXPRESSION, r, p, null);
|
||||
return r || p;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// predicate_sign simple
|
||||
public static boolean predicate(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "predicate")) return false;
|
||||
if (!nextTokenIs(b, "<predicate>", BNF_OP_NOT, BNF_OP_AND)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<predicate>");
|
||||
r = predicate_sign(b, l + 1);
|
||||
r = r && simple(b, l + 1);
|
||||
exit_section_(b, l, m, BNF_PREDICATE, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '&' | '!'
|
||||
public static boolean predicate_sign(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "predicate_sign")) return false;
|
||||
if (!nextTokenIs(b, "<predicate sign>", BNF_OP_NOT, BNF_OP_AND)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<predicate sign>");
|
||||
r = consumeToken(b, BNF_OP_AND);
|
||||
if (!r) r = consumeToken(b, BNF_OP_NOT);
|
||||
exit_section_(b, l, m, BNF_PREDICATE_SIGN, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '[' expression ']' | simple quantifier?
|
||||
public static boolean quantified(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "quantified")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _COLLAPSE_, "<quantified>");
|
||||
r = quantified_0(b, l + 1);
|
||||
if (!r) r = quantified_1(b, l + 1);
|
||||
exit_section_(b, l, m, BNF_QUANTIFIED, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
// '[' expression ']'
|
||||
private static boolean quantified_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "quantified_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_LEFT_BRACKET);
|
||||
r = r && expression(b, l + 1);
|
||||
r = r && consumeToken(b, BNF_RIGHT_BRACKET);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// simple quantifier?
|
||||
private static boolean quantified_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "quantified_1")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = simple(b, l + 1);
|
||||
r = r && quantified_1_1(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// quantifier?
|
||||
private static boolean quantified_1_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "quantified_1_1")) return false;
|
||||
quantifier(b, l + 1);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '?' | '+' | '*'
|
||||
public static boolean quantifier(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "quantifier")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<quantifier>");
|
||||
r = consumeToken(b, BNF_OP_OPT);
|
||||
if (!r) r = consumeToken(b, BNF_OP_ONEMORE);
|
||||
if (!r) r = consumeToken(b, BNF_OP_ZEROMORE);
|
||||
exit_section_(b, l, m, BNF_QUANTIFIER, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// id
|
||||
public static boolean reference_or_token(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "reference_or_token")) return false;
|
||||
if (!nextTokenIs(b, BNF_ID)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_ID);
|
||||
exit_section_(b, m, BNF_REFERENCE_OR_TOKEN, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// modifier* id '::=' expression attrs? ';'?
|
||||
public static boolean rule(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "rule")) return false;
|
||||
boolean r, p;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<rule>");
|
||||
r = rule_0(b, l + 1);
|
||||
r = r && consumeToken(b, BNF_ID);
|
||||
r = r && consumeToken(b, BNF_OP_IS);
|
||||
p = r; // pin = 3
|
||||
r = r && report_error_(b, expression(b, l + 1));
|
||||
r = p && report_error_(b, rule_4(b, l + 1)) && r;
|
||||
r = p && rule_5(b, l + 1) && r;
|
||||
exit_section_(b, l, m, BNF_RULE, r, p, rule_recover_until_parser_);
|
||||
return r || p;
|
||||
}
|
||||
|
||||
// modifier*
|
||||
private static boolean rule_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "rule_0")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!modifier(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "rule_0", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// attrs?
|
||||
private static boolean rule_4(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "rule_4")) return false;
|
||||
attrs(b, l + 1);
|
||||
return true;
|
||||
}
|
||||
|
||||
// ';'?
|
||||
private static boolean rule_5(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "rule_5")) return false;
|
||||
consumeToken(b, BNF_SEMICOLON);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// !'{'
|
||||
static boolean rule_recover_until(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "rule_recover_until")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NOT_, null);
|
||||
r = !consumeToken(b, BNF_LEFT_BRACE);
|
||||
exit_section_(b, l, m, null, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// option +
|
||||
public static boolean sequence(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "sequence")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _COLLAPSE_, "<sequence>");
|
||||
r = option(b, l + 1);
|
||||
int c = current_position_(b);
|
||||
while (r) {
|
||||
if (!option(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "sequence", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
exit_section_(b, l, m, BNF_SEQUENCE, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// !(modifier* id '::=' ) reference_or_token | literal_expression | paren_expression
|
||||
static boolean simple(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "simple")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = simple_0(b, l + 1);
|
||||
if (!r) r = literal_expression(b, l + 1);
|
||||
if (!r) r = paren_expression(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// !(modifier* id '::=' ) reference_or_token
|
||||
private static boolean simple_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "simple_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = simple_0_0(b, l + 1);
|
||||
r = r && reference_or_token(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// !(modifier* id '::=' )
|
||||
private static boolean simple_0_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "simple_0_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NOT_, null);
|
||||
r = !simple_0_0_0(b, l + 1);
|
||||
exit_section_(b, l, m, null, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
// modifier* id '::='
|
||||
private static boolean simple_0_0_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "simple_0_0_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = simple_0_0_0_0(b, l + 1);
|
||||
r = r && consumeToken(b, BNF_ID);
|
||||
r = r && consumeToken(b, BNF_OP_IS);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// modifier*
|
||||
private static boolean simple_0_0_0_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "simple_0_0_0_0")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!modifier(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "simple_0_0_0_0", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// string
|
||||
public static boolean string_literal_expression(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "string_literal_expression")) return false;
|
||||
if (!nextTokenIs(b, BNF_STRING)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_STRING);
|
||||
exit_section_(b, m, BNF_STRING_LITERAL_EXPRESSION, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
final static Parser attr_recover_until_parser_ = new Parser() {
|
||||
public boolean parse(PsiBuilder b, int l) {
|
||||
return attr_recover_until(b, l + 1);
|
||||
}
|
||||
};
|
||||
final static Parser rule_recover_until_parser_ = new Parser() {
|
||||
public boolean parse(PsiBuilder b, int l) {
|
||||
return rule_recover_until(b, l + 1);
|
||||
}
|
||||
};
|
||||
}
|
||||
482
samples/Java/JFlexLexer.java
Normal file
482
samples/Java/JFlexLexer.java
Normal file
@@ -0,0 +1,482 @@
|
||||
/* The following code was generated by JFlex 1.4.3 on 28/01/16 11:27 */
|
||||
|
||||
package test;
|
||||
import com.intellij.lexer.*;
|
||||
import com.intellij.psi.tree.IElementType;
|
||||
import static org.intellij.grammar.psi.BnfTypes.*;
|
||||
|
||||
|
||||
/**
|
||||
* This class is a scanner generated by
|
||||
* <a href="http://www.jflex.de/">JFlex</a> 1.4.3
|
||||
* on 28/01/16 11:27 from the specification file
|
||||
* <tt>/home/abigail/code/intellij-grammar-kit-test/src/test/_GrammarLexer.flex</tt>
|
||||
*/
|
||||
public class _GrammarLexer implements FlexLexer {
|
||||
/** initial size of the lookahead buffer */
|
||||
private static final int ZZ_BUFFERSIZE = 16384;
|
||||
|
||||
/** lexical states */
|
||||
public static final int YYINITIAL = 0;
|
||||
|
||||
/**
|
||||
* ZZ_LEXSTATE[l] is the state in the DFA for the lexical state l
|
||||
* ZZ_LEXSTATE[l+1] is the state in the DFA for the lexical state l
|
||||
* at the beginning of a line
|
||||
* l is of the form l = 2*k, k a non negative integer
|
||||
*/
|
||||
private static final int ZZ_LEXSTATE[] = {
|
||||
0, 0
|
||||
};
|
||||
|
||||
/**
|
||||
* Translates characters to character classes
|
||||
*/
|
||||
private static final String ZZ_CMAP_PACKED =
|
||||
"\11\0\1\1\1\1\1\0\1\1\1\1\22\0\1\1\101\0\1\13"+
|
||||
"\1\0\1\3\1\14\1\0\1\10\1\0\1\2\3\0\1\12\1\7"+
|
||||
"\3\0\1\6\1\4\1\5\1\11\uff8a\0";
|
||||
|
||||
/**
|
||||
* Translates characters to character classes
|
||||
*/
|
||||
private static final char [] ZZ_CMAP = zzUnpackCMap(ZZ_CMAP_PACKED);
|
||||
|
||||
/**
|
||||
* Translates DFA states to action switch labels.
|
||||
*/
|
||||
private static final int [] ZZ_ACTION = zzUnpackAction();
|
||||
|
||||
private static final String ZZ_ACTION_PACKED_0 =
|
||||
"\1\0\1\1\1\2\3\1\1\3\10\0\1\4\1\5";
|
||||
|
||||
private static int [] zzUnpackAction() {
|
||||
int [] result = new int[17];
|
||||
int offset = 0;
|
||||
offset = zzUnpackAction(ZZ_ACTION_PACKED_0, offset, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int zzUnpackAction(String packed, int offset, int [] result) {
|
||||
int i = 0; /* index in packed string */
|
||||
int j = offset; /* index in unpacked array */
|
||||
int l = packed.length();
|
||||
while (i < l) {
|
||||
int count = packed.charAt(i++);
|
||||
int value = packed.charAt(i++);
|
||||
do result[j++] = value; while (--count > 0);
|
||||
}
|
||||
return j;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Translates a state to a row index in the transition table
|
||||
*/
|
||||
private static final int [] ZZ_ROWMAP = zzUnpackRowMap();
|
||||
|
||||
private static final String ZZ_ROWMAP_PACKED_0 =
|
||||
"\0\0\0\15\0\32\0\47\0\64\0\101\0\15\0\116"+
|
||||
"\0\133\0\150\0\165\0\202\0\217\0\234\0\251\0\15"+
|
||||
"\0\15";
|
||||
|
||||
private static int [] zzUnpackRowMap() {
|
||||
int [] result = new int[17];
|
||||
int offset = 0;
|
||||
offset = zzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int zzUnpackRowMap(String packed, int offset, int [] result) {
|
||||
int i = 0; /* index in packed string */
|
||||
int j = offset; /* index in unpacked array */
|
||||
int l = packed.length();
|
||||
while (i < l) {
|
||||
int high = packed.charAt(i++) << 16;
|
||||
result[j++] = high | packed.charAt(i++);
|
||||
}
|
||||
return j;
|
||||
}
|
||||
|
||||
/**
|
||||
* The transition table of the DFA
|
||||
*/
|
||||
private static final int [] ZZ_TRANS = zzUnpackTrans();
|
||||
|
||||
private static final String ZZ_TRANS_PACKED_0 =
|
||||
"\1\2\1\3\1\4\1\2\1\5\2\2\1\6\5\2"+
|
||||
"\16\0\1\3\16\0\1\7\16\0\1\10\20\0\1\11"+
|
||||
"\11\0\1\12\20\0\1\13\4\0\1\14\25\0\1\15"+
|
||||
"\10\0\1\16\21\0\1\17\10\0\1\20\12\0\1\21"+
|
||||
"\6\0";
|
||||
|
||||
private static int [] zzUnpackTrans() {
|
||||
int [] result = new int[182];
|
||||
int offset = 0;
|
||||
offset = zzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int zzUnpackTrans(String packed, int offset, int [] result) {
|
||||
int i = 0; /* index in packed string */
|
||||
int j = offset; /* index in unpacked array */
|
||||
int l = packed.length();
|
||||
while (i < l) {
|
||||
int count = packed.charAt(i++);
|
||||
int value = packed.charAt(i++);
|
||||
value--;
|
||||
do result[j++] = value; while (--count > 0);
|
||||
}
|
||||
return j;
|
||||
}
|
||||
|
||||
|
||||
/* error codes */
|
||||
private static final int ZZ_UNKNOWN_ERROR = 0;
|
||||
private static final int ZZ_NO_MATCH = 1;
|
||||
private static final int ZZ_PUSHBACK_2BIG = 2;
|
||||
private static final char[] EMPTY_BUFFER = new char[0];
|
||||
private static final int YYEOF = -1;
|
||||
private static java.io.Reader zzReader = null; // Fake
|
||||
|
||||
/* error messages for the codes above */
|
||||
private static final String ZZ_ERROR_MSG[] = {
|
||||
"Unkown internal scanner error",
|
||||
"Error: could not match input",
|
||||
"Error: pushback value was too large"
|
||||
};
|
||||
|
||||
/**
|
||||
* ZZ_ATTRIBUTE[aState] contains the attributes of state <code>aState</code>
|
||||
*/
|
||||
private static final int [] ZZ_ATTRIBUTE = zzUnpackAttribute();
|
||||
|
||||
private static final String ZZ_ATTRIBUTE_PACKED_0 =
|
||||
"\1\0\1\11\4\1\1\11\10\0\2\11";
|
||||
|
||||
private static int [] zzUnpackAttribute() {
|
||||
int [] result = new int[17];
|
||||
int offset = 0;
|
||||
offset = zzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int zzUnpackAttribute(String packed, int offset, int [] result) {
|
||||
int i = 0; /* index in packed string */
|
||||
int j = offset; /* index in unpacked array */
|
||||
int l = packed.length();
|
||||
while (i < l) {
|
||||
int count = packed.charAt(i++);
|
||||
int value = packed.charAt(i++);
|
||||
do result[j++] = value; while (--count > 0);
|
||||
}
|
||||
return j;
|
||||
}
|
||||
|
||||
/** the current state of the DFA */
|
||||
private int zzState;
|
||||
|
||||
/** the current lexical state */
|
||||
private int zzLexicalState = YYINITIAL;
|
||||
|
||||
/** this buffer contains the current text to be matched and is
|
||||
the source of the yytext() string */
|
||||
private CharSequence zzBuffer = "";
|
||||
|
||||
/** this buffer may contains the current text array to be matched when it is cheap to acquire it */
|
||||
private char[] zzBufferArray;
|
||||
|
||||
/** the textposition at the last accepting state */
|
||||
private int zzMarkedPos;
|
||||
|
||||
/** the textposition at the last state to be included in yytext */
|
||||
private int zzPushbackPos;
|
||||
|
||||
/** the current text position in the buffer */
|
||||
private int zzCurrentPos;
|
||||
|
||||
/** startRead marks the beginning of the yytext() string in the buffer */
|
||||
private int zzStartRead;
|
||||
|
||||
/** endRead marks the last character in the buffer, that has been read
|
||||
from input */
|
||||
private int zzEndRead;
|
||||
|
||||
/**
|
||||
* zzAtBOL == true <=> the scanner is currently at the beginning of a line
|
||||
*/
|
||||
private boolean zzAtBOL = true;
|
||||
|
||||
/** zzAtEOF == true <=> the scanner is at the EOF */
|
||||
private boolean zzAtEOF;
|
||||
|
||||
/* user code: */
|
||||
public _GrammarLexer() {
|
||||
this((java.io.Reader)null);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a new scanner
|
||||
*
|
||||
* @param in the java.io.Reader to read input from.
|
||||
*/
|
||||
public _GrammarLexer(java.io.Reader in) {
|
||||
this.zzReader = in;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Unpacks the compressed character translation table.
|
||||
*
|
||||
* @param packed the packed character translation table
|
||||
* @return the unpacked character translation table
|
||||
*/
|
||||
private static char [] zzUnpackCMap(String packed) {
|
||||
char [] map = new char[0x10000];
|
||||
int i = 0; /* index in packed string */
|
||||
int j = 0; /* index in unpacked array */
|
||||
while (i < 52) {
|
||||
int count = packed.charAt(i++);
|
||||
char value = packed.charAt(i++);
|
||||
do map[j++] = value; while (--count > 0);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
public final int getTokenStart(){
|
||||
return zzStartRead;
|
||||
}
|
||||
|
||||
public final int getTokenEnd(){
|
||||
return getTokenStart() + yylength();
|
||||
}
|
||||
|
||||
public void reset(CharSequence buffer, int start, int end,int initialState){
|
||||
zzBuffer = buffer;
|
||||
zzBufferArray = com.intellij.util.text.CharArrayUtil.fromSequenceWithoutCopying(buffer);
|
||||
zzCurrentPos = zzMarkedPos = zzStartRead = start;
|
||||
zzPushbackPos = 0;
|
||||
zzAtEOF = false;
|
||||
zzAtBOL = true;
|
||||
zzEndRead = end;
|
||||
yybegin(initialState);
|
||||
}
|
||||
|
||||
/**
|
||||
* Refills the input buffer.
|
||||
*
|
||||
* @return <code>false</code>, iff there was new input.
|
||||
*
|
||||
* @exception java.io.IOException if any I/O-Error occurs
|
||||
*/
|
||||
private boolean zzRefill() throws java.io.IOException {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the current lexical state.
|
||||
*/
|
||||
public final int yystate() {
|
||||
return zzLexicalState;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Enters a new lexical state
|
||||
*
|
||||
* @param newState the new lexical state
|
||||
*/
|
||||
public final void yybegin(int newState) {
|
||||
zzLexicalState = newState;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the text matched by the current regular expression.
|
||||
*/
|
||||
public final CharSequence yytext() {
|
||||
return zzBuffer.subSequence(zzStartRead, zzMarkedPos);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the character at position <tt>pos</tt> from the
|
||||
* matched text.
|
||||
*
|
||||
* It is equivalent to yytext().charAt(pos), but faster
|
||||
*
|
||||
* @param pos the position of the character to fetch.
|
||||
* A value from 0 to yylength()-1.
|
||||
*
|
||||
* @return the character at position pos
|
||||
*/
|
||||
public final char yycharat(int pos) {
|
||||
return zzBufferArray != null ? zzBufferArray[zzStartRead+pos]:zzBuffer.charAt(zzStartRead+pos);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the length of the matched text region.
|
||||
*/
|
||||
public final int yylength() {
|
||||
return zzMarkedPos-zzStartRead;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Reports an error that occured while scanning.
|
||||
*
|
||||
* In a wellformed scanner (no or only correct usage of
|
||||
* yypushback(int) and a match-all fallback rule) this method
|
||||
* will only be called with things that "Can't Possibly Happen".
|
||||
* If this method is called, something is seriously wrong
|
||||
* (e.g. a JFlex bug producing a faulty scanner etc.).
|
||||
*
|
||||
* Usual syntax/scanner level error handling should be done
|
||||
* in error fallback rules.
|
||||
*
|
||||
* @param errorCode the code of the errormessage to display
|
||||
*/
|
||||
private void zzScanError(int errorCode) {
|
||||
String message;
|
||||
try {
|
||||
message = ZZ_ERROR_MSG[errorCode];
|
||||
}
|
||||
catch (ArrayIndexOutOfBoundsException e) {
|
||||
message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
|
||||
}
|
||||
|
||||
throw new Error(message);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Pushes the specified amount of characters back into the input stream.
|
||||
*
|
||||
* They will be read again by then next call of the scanning method
|
||||
*
|
||||
* @param number the number of characters to be read again.
|
||||
* This number must not be greater than yylength()!
|
||||
*/
|
||||
public void yypushback(int number) {
|
||||
if ( number > yylength() )
|
||||
zzScanError(ZZ_PUSHBACK_2BIG);
|
||||
|
||||
zzMarkedPos -= number;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Resumes scanning until the next regular expression is matched,
|
||||
* the end of input is encountered or an I/O-Error occurs.
|
||||
*
|
||||
* @return the next token
|
||||
* @exception java.io.IOException if any I/O-Error occurs
|
||||
*/
|
||||
public IElementType advance() throws java.io.IOException {
|
||||
int zzInput;
|
||||
int zzAction;
|
||||
|
||||
// cached fields:
|
||||
int zzCurrentPosL;
|
||||
int zzMarkedPosL;
|
||||
int zzEndReadL = zzEndRead;
|
||||
CharSequence zzBufferL = zzBuffer;
|
||||
char[] zzBufferArrayL = zzBufferArray;
|
||||
char [] zzCMapL = ZZ_CMAP;
|
||||
|
||||
int [] zzTransL = ZZ_TRANS;
|
||||
int [] zzRowMapL = ZZ_ROWMAP;
|
||||
int [] zzAttrL = ZZ_ATTRIBUTE;
|
||||
|
||||
while (true) {
|
||||
zzMarkedPosL = zzMarkedPos;
|
||||
|
||||
zzAction = -1;
|
||||
|
||||
zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL;
|
||||
|
||||
zzState = ZZ_LEXSTATE[zzLexicalState];
|
||||
|
||||
|
||||
zzForAction: {
|
||||
while (true) {
|
||||
|
||||
if (zzCurrentPosL < zzEndReadL)
|
||||
zzInput = (zzBufferArrayL != null ? zzBufferArrayL[zzCurrentPosL++] : zzBufferL.charAt(zzCurrentPosL++));
|
||||
else if (zzAtEOF) {
|
||||
zzInput = YYEOF;
|
||||
break zzForAction;
|
||||
}
|
||||
else {
|
||||
// store back cached positions
|
||||
zzCurrentPos = zzCurrentPosL;
|
||||
zzMarkedPos = zzMarkedPosL;
|
||||
boolean eof = zzRefill();
|
||||
// get translated positions and possibly new buffer
|
||||
zzCurrentPosL = zzCurrentPos;
|
||||
zzMarkedPosL = zzMarkedPos;
|
||||
zzBufferL = zzBuffer;
|
||||
zzEndReadL = zzEndRead;
|
||||
if (eof) {
|
||||
zzInput = YYEOF;
|
||||
break zzForAction;
|
||||
}
|
||||
else {
|
||||
zzInput = (zzBufferArrayL != null ? zzBufferArrayL[zzCurrentPosL++] : zzBufferL.charAt(zzCurrentPosL++));
|
||||
}
|
||||
}
|
||||
int zzNext = zzTransL[ zzRowMapL[zzState] + zzCMapL[zzInput] ];
|
||||
if (zzNext == -1) break zzForAction;
|
||||
zzState = zzNext;
|
||||
|
||||
int zzAttributes = zzAttrL[zzState];
|
||||
if ( (zzAttributes & 1) == 1 ) {
|
||||
zzAction = zzState;
|
||||
zzMarkedPosL = zzCurrentPosL;
|
||||
if ( (zzAttributes & 8) == 8 ) break zzForAction;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// store back cached position
|
||||
zzMarkedPos = zzMarkedPosL;
|
||||
|
||||
switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) {
|
||||
case 1:
|
||||
{ return com.intellij.psi.TokenType.BAD_CHARACTER;
|
||||
}
|
||||
case 6: break;
|
||||
case 4:
|
||||
{ return BNF_STRING;
|
||||
}
|
||||
case 7: break;
|
||||
case 5:
|
||||
{ return BNF_NUMBER;
|
||||
}
|
||||
case 8: break;
|
||||
case 3:
|
||||
{ return BNF_ID;
|
||||
}
|
||||
case 9: break;
|
||||
case 2:
|
||||
{ return com.intellij.psi.TokenType.WHITE_SPACE;
|
||||
}
|
||||
case 10: break;
|
||||
default:
|
||||
if (zzInput == YYEOF && zzStartRead == zzCurrentPos) {
|
||||
zzAtEOF = true;
|
||||
return null;
|
||||
}
|
||||
else {
|
||||
zzScanError(ZZ_NO_MATCH);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
10
samples/Perl/Sample.pod
Normal file
10
samples/Perl/Sample.pod
Normal file
@@ -0,0 +1,10 @@
|
||||
use strict;
|
||||
use warnings;
|
||||
package DZT::Sample;
|
||||
|
||||
sub return_arrayref_of_values_passed {
|
||||
my $invocant = shift;
|
||||
return \@_;
|
||||
}
|
||||
|
||||
1;
|
||||
280
samples/Prolog/queues.yap
Normal file
280
samples/Prolog/queues.yap
Normal file
@@ -0,0 +1,280 @@
|
||||
% This file has been included as an YAP library by Vitor Santos Costa, 1999
|
||||
|
||||
% File : QUEUES.PL
|
||||
% Author : R.A.O'Keefe
|
||||
% Updated: Friday November 18th, 1983, 8:09:31 pm
|
||||
% Purpose: define queue operations
|
||||
% Needs : lib(lists) for append/3.
|
||||
|
||||
/** @defgroup Queues Queues
|
||||
@ingroup library
|
||||
@{
|
||||
|
||||
The following queue manipulation routines are available once
|
||||
included with the `use_module(library(queues))` command. Queues are
|
||||
implemented with difference lists.
|
||||
|
||||
*/
|
||||
|
||||
/**
|
||||
|
||||
@pred make_queue(+ _Queue_)
|
||||
|
||||
|
||||
Creates a new empty queue. It should only be used to create a new queue.
|
||||
|
||||
|
||||
*/
|
||||
|
||||
|
||||
/** @pred empty_queue(+ _Queue_)
|
||||
|
||||
|
||||
Tests whether the queue is empty.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred head_queue(+ _Queue_, ? _Head_)
|
||||
|
||||
|
||||
Unifies Head with the first element of the queue.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred join_queue(+ _Element_, + _OldQueue_, - _NewQueue_)
|
||||
|
||||
|
||||
Adds the new element at the end of the queue.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred jump_queue(+ _Element_, + _OldQueue_, - _NewQueue_)
|
||||
|
||||
|
||||
Adds the new element at the front of the list.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred length_queue(+ _Queue_, - _Length_)
|
||||
|
||||
|
||||
Counts the number of elements currently in the queue.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred list_join_queue(+ _List_, + _OldQueue_, - _NewQueue_)
|
||||
|
||||
|
||||
Ads the new elements at the end of the queue.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred list_jump_queue(+ _List_, + _OldQueue_, + _NewQueue_)
|
||||
|
||||
|
||||
Adds all the elements of _List_ at the front of the queue.
|
||||
|
||||
|
||||
*/
|
||||
/** @pred list_to_queue(+ _List_, - _Queue_)
|
||||
|
||||
|
||||
Creates a new queue with the same elements as _List._
|
||||
|
||||
|
||||
*/
|
||||
/** @pred queue_to_list(+ _Queue_, - _List_)
|
||||
|
||||
|
||||
Creates a new list with the same elements as _Queue_.
|
||||
|
||||
|
||||
|
||||
|
||||
*/
|
||||
/** @pred serve_queue(+ _OldQueue_, + _Head_, - _NewQueue_)
|
||||
|
||||
|
||||
Removes the first element of the queue for service.
|
||||
|
||||
|
||||
*/
|
||||
:- module(queues, [
|
||||
make_queue/1, % create empty queue
|
||||
join_queue/3, % add element to end of queue
|
||||
list_join_queue/3, % add many elements to end of queue
|
||||
jump_queue/3, % add element to front of queue
|
||||
list_jump_queue/3, % add many elements to front of queue
|
||||
head_queue/2, % look at first element of queue
|
||||
serve_queue/3, % remove first element of queue
|
||||
length_queue/2, % count elements of queue
|
||||
empty_queue/1, % test whether queue is empty
|
||||
list_to_queue/2, % convert list to queue
|
||||
queue_to_list/2 % convert queue to list
|
||||
]).
|
||||
|
||||
:- use_module(library(lists), [append/3]).
|
||||
|
||||
/*
|
||||
:- mode
|
||||
make_queue(-),
|
||||
join_queue(+, +, -),
|
||||
list_join_queue(+, +, -),
|
||||
jump_queue(+, +, -),
|
||||
list_jump_queue(+, +, -),
|
||||
head_queue(+, ?),
|
||||
serve_queue(+, ?, -),
|
||||
length_queue(+, ?),
|
||||
length_queue(+, +, +, -),
|
||||
empty_queue(+),
|
||||
list_to_queue(+, -),
|
||||
queue_to_list(+, -),
|
||||
queue_to_list(+, +, -).
|
||||
*/
|
||||
|
||||
/* In this package, a queue is represented as a term Front-Back, where
|
||||
Front is a list and Back is a tail of that list, and is normally a
|
||||
variable. join_queue will only work when the Back is a variable,
|
||||
the other routines will accept any tail. The elements of the queue
|
||||
are the list difference, that is, all the elements starting at Front
|
||||
and stopping at Back. Examples:
|
||||
|
||||
[a,b,c,d,e|Z]-Z has elements a,b,c,d,e
|
||||
[a,b,c,d,e]-[d,e] has elements a,b,c
|
||||
Z-Z has no elements
|
||||
[1,2,3]-[1,2,3] has no elements
|
||||
*/
|
||||
|
||||
% make_queue(Queue)
|
||||
% creates a new empty queue. It will also match empty queues, but
|
||||
% because Prolog doesn't do the occurs check, it will also match
|
||||
% other queues, creating circular lists. So this should ONLY be
|
||||
% used to make new queues.
|
||||
|
||||
make_queue(X-X).
|
||||
|
||||
|
||||
|
||||
% join_queue(Element, OldQueue, NewQueue)
|
||||
% adds the new element at the end of the queue. The old queue is
|
||||
% side-effected, so you *can't* do
|
||||
% join_queue(1, OldQ, NewQ1),
|
||||
% join_queue(2, OldQ, NewQ2).
|
||||
% There isn't any easy way of doing that, sensible though it might
|
||||
% be. You *can* do
|
||||
% join_queue(1, OldQ, MidQ),
|
||||
% join_queue(2, MidQ, NewQ).
|
||||
% See list_join_queue.
|
||||
|
||||
join_queue(Element, Front-[Element|Back], Front-Back).
|
||||
|
||||
|
||||
|
||||
% list_join_queue(List, OldQueue, NewQueue)
|
||||
% adds the new elements at the end of the queue. The elements are
|
||||
% added in the same order that they appear in the list, e.g.
|
||||
% list_join_queue([y,z], [a,b,c|M]-M, [a,b,c,y,z|N]-N).
|
||||
|
||||
list_join_queue(List, Front-OldBack, Front-NewBack) :-
|
||||
append(List, OldBack, NewBack).
|
||||
|
||||
|
||||
|
||||
% jump_queue(Element, OldQueue, NewQueue)
|
||||
% adds the new element at the front of the list. Unlike join_queue,
|
||||
% jump_queue(1, OldQ, NewQ1),
|
||||
% jump_queue(2, OldQ, NewQ2)
|
||||
% *does* work, though if you add things at the end of NewQ1 they
|
||||
% will also show up in NewQ2. Note that
|
||||
% jump_queue(1, OldQ, MidQ),
|
||||
% jump_queue(2, MidQ, NewQ)
|
||||
% makes NewQ start 2, 1, ...
|
||||
|
||||
jump_queue(Element, Front-Back, [Element|Front]-Back).
|
||||
|
||||
|
||||
|
||||
% list_jump_queue(List, OldQueue, NewQueue)
|
||||
% adds all the elements of List at the front of the queue. There are
|
||||
% two ways we might do this. We could add all the elements one at a
|
||||
% time, so that they would appear at the beginning of the queue in the
|
||||
% opposite order to the order they had in the list, or we could add
|
||||
% them in one lump, so that they have the same order in the queue as
|
||||
% in the list. As you can easily add the elements one at a time if
|
||||
% that is what you want, I have chosen the latter.
|
||||
|
||||
list_jump_queue(List, OldFront-Back, NewFront-Back) :-
|
||||
append(List, OldFront, NewFront).
|
||||
% reverse(List, OldFront, NewFront). % for the other definition
|
||||
|
||||
|
||||
|
||||
% head_queue(Queue, Head)
|
||||
% unifies Head with the first element of the queue. The tricky part
|
||||
% is that we might be at the end of a queue: Back-Back, with Back a
|
||||
% variable, and in that case this predicate should not succeed, as we
|
||||
% don't know what that element is or whether it exists yet.
|
||||
|
||||
head_queue(Front-Back, Head) :-
|
||||
Front \== Back, % the queue is not empty
|
||||
Front = [Head|_].
|
||||
|
||||
|
||||
|
||||
% serve_queue(OldQueue, Head, NewQueue)
|
||||
% removes the first element of the queue for service.
|
||||
|
||||
serve_queue(OldFront-Back, Head, NewFront-Back) :-
|
||||
OldFront \== Back,
|
||||
OldFront = [Head|NewFront].
|
||||
|
||||
|
||||
|
||||
% empty_queue(Queue)
|
||||
% tests whether the queue is empty. If the back of a queue were
|
||||
% guaranteed to be a variable, we could have
|
||||
% empty_queue(Front-Back) :- var(Front).
|
||||
% but I don't see why you shouldn't be able to treat difference
|
||||
% lists as queues if you want to.
|
||||
|
||||
empty_queue(Front-Back) :-
|
||||
Front == Back.
|
||||
|
||||
|
||||
|
||||
% length_queue(Queue, Length)
|
||||
% counts the number of elements currently in the queue. Note that
|
||||
% we have to be careful in checking for the end of the list, we
|
||||
% can't test for [] the way length(List) does.
|
||||
|
||||
length_queue(Front-Back, Length) :-
|
||||
length_queue(Front, Back, 0, N),
|
||||
Length = N.
|
||||
|
||||
length_queue(Front, Back, N, N) :-
|
||||
Front == Back, !.
|
||||
length_queue([_|Front], Back, K, N) :-
|
||||
L is K+1,
|
||||
length_queue(Front, Back, L, N).
|
||||
|
||||
|
||||
|
||||
% list_to_queue(List, Queue)
|
||||
% creates a new queue with the same elements as List.
|
||||
|
||||
list_to_queue(List, Front-Back) :-
|
||||
append(List, Back, Front).
|
||||
|
||||
|
||||
|
||||
% queue_to_list(Queue, List)
|
||||
% creates a new list with the same elements as Queue.
|
||||
|
||||
queue_to_list(Front-Back, List) :-
|
||||
queue_to_list(Front, Back, List).
|
||||
|
||||
queue_to_list(Front, Back, Ans) :-
|
||||
Front == Back, !, Ans = [].
|
||||
queue_to_list([Head|Front], Back, [Head|Tail]) :-
|
||||
queue_to_list(Front, Back, Tail).
|
||||
|
||||
124
samples/Python/closure_js_binary.bzl
Normal file
124
samples/Python/closure_js_binary.bzl
Normal file
@@ -0,0 +1,124 @@
|
||||
# Copyright 2015 The Bazel Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Build definitions for JavaScript binaries compiled with the Closure Compiler.
|
||||
|
||||
A single file is produced with the _compiled.js suffix.
|
||||
|
||||
By default, the name of the entry point is assumed to be the same as that of the
|
||||
build target. This behaviour may be overridden with the "main" attribute.
|
||||
|
||||
The optimization level may be set with the "compilation_level" attribute.
|
||||
Supported values are: unobfuscated, simple, and advanced.
|
||||
|
||||
Example:
|
||||
|
||||
closure_js_binary(
|
||||
name = "hello",
|
||||
compilation_level = "simple",
|
||||
language_in = "ecmascript6",
|
||||
language_out = "ecmascript3",
|
||||
externs = ["//third_party/javascript/google_cast/cast.js"],
|
||||
deps = [
|
||||
"@closure_library//:closure_library",
|
||||
":hello_lib",
|
||||
],
|
||||
)
|
||||
|
||||
This rule will produce hello_combined.js.
|
||||
"""
|
||||
|
||||
_COMPILATION_LEVELS = {
|
||||
"whitespace_only": [
|
||||
"--compilation_level=WHITESPACE_ONLY",
|
||||
"--formatting=PRETTY_PRINT"
|
||||
],
|
||||
"simple": ["--compilation_level=SIMPLE"],
|
||||
"advanced": ["--compilation_level=ADVANCED"]
|
||||
}
|
||||
|
||||
_SUPPORTED_LANGUAGES = {
|
||||
"es3": ["ES3"],
|
||||
"ecmascript3": ["ECMASCRIPT3"],
|
||||
"es5": ["ES5"],
|
||||
"ecmascript5": ["ECMASCRIPT5"],
|
||||
"es5_strict": ["ES5_STRICT"],
|
||||
"ecmascript5_strict": ["ECMASCRIPT5_STRICT"],
|
||||
"es6": ["ES6"],
|
||||
"ecmascript6": ["ECMASCRIPT6"],
|
||||
"es6_strict": ["ES6_STRICT"],
|
||||
"ecmascript6_strict": ["ECMASCRIPT6_STRICT"],
|
||||
"es6_typed": ["ES6_TYPED"],
|
||||
"ecmascript6_typed": ["ECMASCRIPT6_TYPED"],
|
||||
}
|
||||
|
||||
def _impl(ctx):
|
||||
externs = set(order="compile")
|
||||
srcs = set(order="compile")
|
||||
for dep in ctx.attr.deps:
|
||||
externs += dep.transitive_js_externs
|
||||
srcs += dep.transitive_js_srcs
|
||||
|
||||
args = [
|
||||
"--entry_point=goog:%s" % ctx.attr.main,
|
||||
"--js_output_file=%s" % ctx.outputs.out.path,
|
||||
"--dependency_mode=LOOSE",
|
||||
"--warning_level=VERBOSE",
|
||||
] + (["--js=%s" % src.path for src in srcs] +
|
||||
["--externs=%s" % extern.path for extern in externs])
|
||||
|
||||
# Set the compilation level.
|
||||
if ctx.attr.compilation_level in _COMPILATION_LEVELS:
|
||||
args += _COMPILATION_LEVELS[ctx.attr.compilation_level]
|
||||
else:
|
||||
fail("Invalid compilation_level '%s', expected one of %s" %
|
||||
(ctx.attr.compilation_level, _COMPILATION_LEVELS.keys()))
|
||||
|
||||
# Set the language in.
|
||||
if ctx.attr.language_in in _SUPPORTED_LANGUAGES:
|
||||
args += "--language_in=" + _SUPPORTED_LANGUAGES[ctx.attr.language_in]
|
||||
else:
|
||||
fail("Invalid language_in '%s', expected one of %s" %
|
||||
(ctx.attr.language_in, _SUPPORTED_LANGUAGES.keys()))
|
||||
|
||||
# Set the language out.
|
||||
if ctx.attr.language_out in _SUPPORTED_LANGUAGES:
|
||||
args += "--language_out=" + _SUPPORTED_LANGUAGES[ctx.attr.language_out]
|
||||
else:
|
||||
fail("Invalid language_out '%s', expected one of %s" %
|
||||
(ctx.attr.language_out, _SUPPORTED_LANGUAGES.keys()))
|
||||
|
||||
ctx.action(
|
||||
inputs=list(srcs) + list(externs),
|
||||
outputs=[ctx.outputs.out],
|
||||
arguments=args,
|
||||
executable=ctx.executable._closure_compiler)
|
||||
|
||||
return struct(files=set([ctx.outputs.out]))
|
||||
|
||||
closure_js_binary = rule(
|
||||
implementation=_impl,
|
||||
attrs={
|
||||
"deps": attr.label_list(
|
||||
allow_files=False,
|
||||
providers=["transitive_js_externs", "transitive_js_srcs"]),
|
||||
"main": attr.string(default="%{name}"),
|
||||
"compilation_level": attr.string(default="advanced"),
|
||||
"language_in": attr.string(default="ecmascript6"),
|
||||
"language_out": attr.string(default="ecmascript3"),
|
||||
"_closure_compiler": attr.label(
|
||||
default=Label("//external:closure_compiler_"),
|
||||
executable=True),
|
||||
},
|
||||
outputs={"out": "%{name}_combined.js"})
|
||||
154
samples/Uno/PlayerPads.uno
Normal file
154
samples/Uno/PlayerPads.uno
Normal file
@@ -0,0 +1,154 @@
|
||||
using Uno;
|
||||
using Uno.Collections;
|
||||
using Uno.Graphics;
|
||||
using Uno.Scenes;
|
||||
using Uno.Designer;
|
||||
using Uno.Content;
|
||||
using Uno.Content.Models;
|
||||
using Uno.UI;
|
||||
|
||||
namespace PONG2D
|
||||
{
|
||||
public class PlayerPads : Node
|
||||
{
|
||||
|
||||
Image _player1Image;
|
||||
Image _player2Image;
|
||||
|
||||
[Inline]
|
||||
public Image Player1
|
||||
{
|
||||
get { return _player1Image; }
|
||||
set
|
||||
{
|
||||
if (_player1Image != value)
|
||||
{
|
||||
_player1Image = value;
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Inline]
|
||||
public Image Player2
|
||||
{
|
||||
get { return _player2Image; }
|
||||
set
|
||||
{
|
||||
if (_player2Image != value)
|
||||
{
|
||||
_player2Image = value;
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Hide]
|
||||
public float2 Player1Pos
|
||||
{
|
||||
get { return (Player1.ActualPosition); }
|
||||
set
|
||||
{
|
||||
if (Player1 != null)
|
||||
Player1.Position = value;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
[Hide]
|
||||
public float2 Player2Pos
|
||||
{
|
||||
get { return (Player2.ActualPosition); }
|
||||
set
|
||||
{
|
||||
if (Player2 != null)
|
||||
Player2.Position = value;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public Rect Player1Rect
|
||||
{
|
||||
get { return new Rect(Player1Pos, float2(Player1.Width, Player2.Height)); }
|
||||
set
|
||||
{
|
||||
Player1Pos = value.Position;
|
||||
if (Player1 != null)
|
||||
{
|
||||
Player1.Width = value.Size.X;
|
||||
Player1.Height = value.Size.Y;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Rect Player2Rect
|
||||
{
|
||||
get { return new Rect(Player2Pos, float2(Player2.Width, Player2.Height)); }
|
||||
set
|
||||
{
|
||||
Player2Pos = value.Position;
|
||||
if (Player2 != null)
|
||||
{
|
||||
Player2.Width = value.Size.X;
|
||||
Player2.Height = value.Size.Y;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Ball Ball
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
public float PadVelocity { get; set; }
|
||||
|
||||
public PlayerPads()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
void UpdatePositions()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
protected override void OnUpdate()
|
||||
{
|
||||
base.OnUpdate();
|
||||
|
||||
if (Input.IsKeyDown(Uno.Platform.Key.W))
|
||||
{
|
||||
Player1Pos = float2(0, Player1Pos.Y - PadVelocity);
|
||||
}
|
||||
|
||||
if (Input.IsKeyDown(Uno.Platform.Key.S))
|
||||
{
|
||||
Player1Pos = float2(0, Player1Pos.Y + PadVelocity);
|
||||
}
|
||||
|
||||
if (Input.IsKeyDown(Uno.Platform.Key.Up))
|
||||
{
|
||||
Player2Pos = float2(0, Player2Pos.Y - PadVelocity);
|
||||
}
|
||||
|
||||
if (Input.IsKeyDown(Uno.Platform.Key.Down))
|
||||
{
|
||||
Player2Pos = float2(0, Player2Pos.Y + PadVelocity);
|
||||
}
|
||||
|
||||
if (Ball != null)
|
||||
{
|
||||
|
||||
if (Ball.BallRectangle.Intersects(Player1Rect) ||
|
||||
Ball.BallRectangle.Intersects(Player2Rect))
|
||||
{
|
||||
|
||||
Ball.BallVelocity = float2(Ball.BallVelocity.X * -1f, Ball.BallVelocity.Y);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
139
samples/Uno/Pong.uno
Normal file
139
samples/Uno/Pong.uno
Normal file
@@ -0,0 +1,139 @@
|
||||
using Uno;
|
||||
using Uno.Collections;
|
||||
using Uno.Graphics;
|
||||
using Uno.Scenes;
|
||||
using Uno.Content;
|
||||
using Uno.Content.Models;
|
||||
|
||||
namespace PONG2D
|
||||
{
|
||||
public class Pong : Node
|
||||
{
|
||||
float2 _player1Pos;
|
||||
float2 _player2Pos;
|
||||
float2 ballPosition;
|
||||
float2 ballVelocity;
|
||||
float2 rectangleSize;
|
||||
|
||||
Rect player1Rect;
|
||||
Rect player2Rect;
|
||||
Rect ballRect;
|
||||
|
||||
float2 resolution = Context.VirtualResolution;
|
||||
|
||||
Random random = new Random(1);
|
||||
|
||||
|
||||
float2 Player1Pos
|
||||
{
|
||||
get { return _player1Pos; }
|
||||
set
|
||||
{
|
||||
_player1Pos = Math.Clamp(value, float2(0, 0), resolution - rectangleSize);
|
||||
}
|
||||
}
|
||||
|
||||
float2 Player2Pos
|
||||
{
|
||||
get { return _player2Pos; }
|
||||
set
|
||||
{
|
||||
_player2Pos = Math.Clamp(value, float2(0, 0), resolution - rectangleSize);
|
||||
}
|
||||
}
|
||||
|
||||
public Pong()
|
||||
{
|
||||
Uno.Scenes.Input.AddGlobalListener(this);
|
||||
}
|
||||
|
||||
protected override void OnInitialize()
|
||||
{
|
||||
base.OnInitialize();
|
||||
UpdateValues();
|
||||
|
||||
}
|
||||
|
||||
void UpdateValues()
|
||||
{
|
||||
rectangleSize = float2(resolution.X / 80f, resolution.Y / 5f);
|
||||
_player1Pos = float2(0f);
|
||||
_player2Pos = float2(Context.VirtualResolution.X - rectangleSize.X, 0f);
|
||||
|
||||
player1Rect = new Rect(_player1Pos, rectangleSize);
|
||||
player2Rect = new Rect(_player2Pos, rectangleSize);
|
||||
|
||||
ballPosition = float2(resolution.X * 0.5f - 10f, resolution.Y * 0.5f - 10f);
|
||||
ballRect = new Rect(ballPosition, float2(20f));
|
||||
|
||||
|
||||
SpwanBall();
|
||||
|
||||
}
|
||||
|
||||
void SpwanBall()
|
||||
{
|
||||
ballRect.Position = float2(resolution.X * 0.5f - 10f, resolution.Y * 0.5f - 10f);
|
||||
ballVelocity = float2(5f, 10f) * 0.5f;
|
||||
}
|
||||
|
||||
void OnWindowResize(object sender, EventArgs args)
|
||||
{
|
||||
//UpdateValues();
|
||||
}
|
||||
|
||||
protected override void OnUpdate()
|
||||
{
|
||||
base.OnUpdate();
|
||||
|
||||
var padVelocity = resolution.Y * (float)Application.Current.FrameInterval * 4f;
|
||||
if (Input.IsKeyDown(Uno.Platform.Key.Up))
|
||||
{
|
||||
Player1Pos = float2(Player1Pos.X, Player1Pos.Y - padVelocity);
|
||||
}
|
||||
|
||||
if (Input.IsKeyDown(Uno.Platform.Key.Down))
|
||||
{
|
||||
Player1Pos = float2(Player1Pos.X, Player1Pos.Y + padVelocity);
|
||||
}
|
||||
|
||||
if (Input.IsKeyDown(Uno.Platform.Key.W))
|
||||
{
|
||||
Player2Pos = float2(Player2Pos.X, Player2Pos.Y - padVelocity);
|
||||
}
|
||||
|
||||
if (Input.IsKeyDown(Uno.Platform.Key.S))
|
||||
{
|
||||
Player2Pos = float2(Player2Pos.X, Player2Pos.Y + padVelocity);
|
||||
}
|
||||
player1Rect.Position = Player1Pos;
|
||||
player2Rect.Position = Player2Pos;
|
||||
|
||||
if (ballRect.Position.X > resolution.X || ballRect.Position.X < 0)
|
||||
{
|
||||
SpwanBall();
|
||||
}
|
||||
if (ballRect.Position.Y > resolution.Y ||
|
||||
ballRect.Position.Y < 0)
|
||||
{
|
||||
ballVelocity.Y *= -1f;
|
||||
}
|
||||
|
||||
if (ballRect.Intersects(player1Rect) ||
|
||||
ballRect.Intersects(player2Rect))
|
||||
{
|
||||
ballVelocity.X *= -1f;
|
||||
}
|
||||
|
||||
ballRect.Position += ballVelocity;
|
||||
|
||||
}
|
||||
|
||||
protected override void OnDraw()
|
||||
{
|
||||
Uno.Drawing.RoundedRectangle.Draw(player1Rect.Position, player1Rect.Size, float4(1f), 0);
|
||||
Uno.Drawing.RoundedRectangle.Draw(player2Rect.Position, player2Rect.Size, float4(1f), 0);
|
||||
Uno.Drawing.RoundedRectangle.Draw(ballRect.Position, ballRect.Size, float4(1f), 0f);
|
||||
}
|
||||
}
|
||||
}
|
||||
136
samples/Uno/TowerBlock.uno
Normal file
136
samples/Uno/TowerBlock.uno
Normal file
@@ -0,0 +1,136 @@
|
||||
using Uno;
|
||||
using Uno.Collections;
|
||||
using Uno.Graphics;
|
||||
using Uno.Scenes;
|
||||
using Uno.Content;
|
||||
using Uno.Content.Models;
|
||||
using Uno.Physics.Box2D;
|
||||
|
||||
using TowerBuilder.Box2DMath;
|
||||
|
||||
namespace TowerBuilder
|
||||
{
|
||||
public class TowerBlock : TestBed
|
||||
{
|
||||
Body floorBody, deleteBody, mouseBody;
|
||||
|
||||
private List<Body> bodies = new List<Body>();
|
||||
private List<Body> bodiesToDelete = new List<Body>();
|
||||
|
||||
private ContactListener contactListener;
|
||||
|
||||
protected override void OnInitializeTestBed()
|
||||
{
|
||||
World.Gravity = float2(0, -25.0f);
|
||||
World.ContactListener = contactListener = new ContactListener(this);
|
||||
|
||||
bodies.Clear();
|
||||
bodiesToDelete.Clear();
|
||||
|
||||
CreateFloor();
|
||||
CreateDeleteBody();
|
||||
CreateBox2();
|
||||
}
|
||||
|
||||
void CreateFloor()
|
||||
{
|
||||
var bodyDef = new BodyDef();
|
||||
bodyDef.position = float2(0, -40.0f);
|
||||
|
||||
floorBody = World.CreateBody(bodyDef);
|
||||
|
||||
var shape = new PolygonShape();
|
||||
shape.SetAsBox(30.0f, 10.0f);
|
||||
|
||||
var fixtureDef = new FixtureDef();
|
||||
fixtureDef.shape = shape;
|
||||
fixtureDef.density = 1.0f;
|
||||
|
||||
floorBody.CreateFixture(fixtureDef);
|
||||
}
|
||||
|
||||
void CreateDeleteBody()
|
||||
{
|
||||
var bodyDef = new BodyDef();
|
||||
bodyDef.position = float2(0, -44.0f);
|
||||
|
||||
deleteBody = World.CreateBody(bodyDef);
|
||||
|
||||
var shape = new PolygonShape();
|
||||
shape.SetAsBox(200.0f, 10.0f);
|
||||
|
||||
var fixtureDef = new FixtureDef();
|
||||
fixtureDef.shape = shape;
|
||||
fixtureDef.density = 1.0f;
|
||||
|
||||
deleteBody.CreateFixture(fixtureDef);
|
||||
}
|
||||
|
||||
Random random = new Random((int) (Uno.Diagnostics.Clock.GetSeconds() * 1000000));
|
||||
void CreateBox2()
|
||||
{
|
||||
var bodyDef = new BodyDef();
|
||||
bodyDef.type = BodyType.Dynamic;
|
||||
bodyDef.position = float2(random.NextFloat(-25f, 25f), 50.0f);
|
||||
bodyDef.angularVelocity = random.NextFloat() * 40 - 20;
|
||||
bodyDef.userData = float3(0, 0, 0);
|
||||
|
||||
var body = World.CreateBody(bodyDef);
|
||||
|
||||
var shape = new PolygonShape();
|
||||
shape.SetAsBox(0.75f, 0.75f);
|
||||
|
||||
var fixtureDef = new FixtureDef();
|
||||
fixtureDef.shape = shape;
|
||||
fixtureDef.density = 5.0f;
|
||||
//fixtureDef.friction = 0.75f;
|
||||
|
||||
body.CreateFixture(fixtureDef);
|
||||
|
||||
bodies.Add(body);
|
||||
}
|
||||
|
||||
private int c = 0;
|
||||
protected override void OnFixedUpdate()
|
||||
{
|
||||
base.OnFixedUpdate();
|
||||
|
||||
debug_log bodies.Count;
|
||||
if(c++ % 8 == 0 && bodies.Count < 20) CreateBox2();
|
||||
|
||||
foreach(var body in bodiesToDelete)
|
||||
{
|
||||
World.DestroyBody(body);
|
||||
bodies.Remove(body);
|
||||
}
|
||||
|
||||
bodiesToDelete.Clear();
|
||||
}
|
||||
|
||||
public class ContactListener : IContactListener
|
||||
{
|
||||
private TowerBlock b;
|
||||
public ContactListener(TowerBlock b)
|
||||
{
|
||||
this.b = b;
|
||||
}
|
||||
|
||||
public void BeginContact(Contact contact)
|
||||
{
|
||||
if(contact.GetFixtureA().GetBody() == b.deleteBody)
|
||||
{
|
||||
b.bodiesToDelete.Add(contact.GetFixtureB().GetBody());
|
||||
}
|
||||
else if(contact.GetFixtureB().GetBody() == b.deleteBody)
|
||||
{
|
||||
b.bodiesToDelete.Add(contact.GetFixtureA().GetBody());
|
||||
}
|
||||
}
|
||||
|
||||
public void EndContact(Contact contact) {}
|
||||
public void PreSolve(Contact contact, ref Manifold manifold) {}
|
||||
public void PostSolve(Contact contact, ref ContactImpulse impulse) {}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
57
samples/XML/MainView.ux
Normal file
57
samples/XML/MainView.ux
Normal file
@@ -0,0 +1,57 @@
|
||||
<App Theme="Basic">
|
||||
<DockPanel>
|
||||
<TopFrameBackground Dock="Top" />
|
||||
<Panel>
|
||||
<Rectangle ux:Name="newLayoutMaster" Width="50" Height="50" Color="#8889" Alignment="TopLeft" Margin="20,50"/>
|
||||
<StackPanel Alignment="Top">
|
||||
<TextInput PlaceholderText="Click me to show the keyboard" Alignment="Center">
|
||||
<WhileKeyboardVisible>
|
||||
<Move Target="rect3" RelativeTo="Keyboard" Y="-1" Duration="0.5" />
|
||||
</WhileKeyboardVisible>
|
||||
</TextInput>
|
||||
<Button Text="Click me" Alignment="Center">
|
||||
<WhilePressed>
|
||||
<Move Target="rect1" RelativeTo="Size" Y="-1" Duration="0.5" />
|
||||
<Move Target="rect2" RelativeTo="ParentSize" Y="-1" Duration="0.5" />
|
||||
<Move Target="rect5" RelativeTo="Size" RelativeNode="relativeNode" Y="-1" Duration="0.5" />
|
||||
</WhilePressed>
|
||||
</Button>
|
||||
<Switch>
|
||||
<WhileTrue>
|
||||
<Change rect4.LayoutMaster="newLayoutMaster" />
|
||||
</WhileTrue>
|
||||
</Switch>
|
||||
</StackPanel>
|
||||
<Rectangle ux:Name="relativeNode" Height="400" Width="50" Color="#bbf" Alignment="BottomRight">
|
||||
<VerticalText Alignment="Center" Width="200">relativeNode</VerticalText>
|
||||
</Rectangle>
|
||||
|
||||
<Text ux:Class="VerticalText" TransformOrigin="Center" Alignment="Bottom" TextColor="#000">
|
||||
<Rotation Degrees="-90" />
|
||||
</Text>
|
||||
|
||||
<Grid ColumnCount="5" Rows="3*,1*" Color="#ddd" Width="80%" Height="100%" Alignment="BottomLeft">
|
||||
<VerticalText Width="200">RelativeTo="Size"</VerticalText>
|
||||
<VerticalText Width="200">RelativeTo="ParentSize"</VerticalText>
|
||||
<VerticalText Width="200">RelativeTo="Keyboard"</VerticalText>
|
||||
<VerticalText Width="200">RelativeTo="PositionChange"</VerticalText>
|
||||
<VerticalText Width="200">RelativeNode="relativeNode"</VerticalText>
|
||||
|
||||
|
||||
|
||||
<Rectangle ux:Name="rect1" Width="50" Height="50" Color="#f00" Alignment="Bottom"/>
|
||||
<Rectangle ux:Name="rect2" Width="50" Height="50" Color="#0f0" Alignment="Bottom"/>
|
||||
<Rectangle ux:Name="rect3" Width="50" Height="50" Color="#00f" Alignment="Bottom"/>
|
||||
<Panel Alignment="Bottom" Width="50" Height="50">
|
||||
<Rectangle ux:Name="rect4" Width="50" Height="50" Color="#0ff" LayoutMaster="layoutMaster">
|
||||
<LayoutAnimation>
|
||||
<Move RelativeTo="PositionChange" X="1" Y="1" Duration="0.5" />
|
||||
</LayoutAnimation>
|
||||
</Rectangle>
|
||||
<Rectangle ux:Name="layoutMaster" Width="50" Height="50"/>
|
||||
</Panel>
|
||||
<Rectangle ux:Name="rect5" Width="50" Height="50" Color="#f0f" Alignment="Bottom"/>
|
||||
</Grid>
|
||||
</Panel>
|
||||
</DockPanel>
|
||||
</App>
|
||||
11
samples/XML/MyApp.ux
Normal file
11
samples/XML/MyApp.ux
Normal file
@@ -0,0 +1,11 @@
|
||||
<App Theme="Basic">
|
||||
<EdgeNavigator HitTestMode="LocalBoundsAndChildren">
|
||||
<Panel Width="150" EdgeNavigation.Edge="Left" Background="#f63" />
|
||||
|
||||
<Panel>
|
||||
<Text Alignment="Center">
|
||||
This is an example of EdgeNavigator!
|
||||
</Text>
|
||||
</Panel>
|
||||
</EdgeNavigator>
|
||||
</App>
|
||||
55
samples/YANG/sfc-lisp-impl.yang
Normal file
55
samples/YANG/sfc-lisp-impl.yang
Normal file
@@ -0,0 +1,55 @@
|
||||
module sfc-lisp-impl {
|
||||
|
||||
yang-version 1;
|
||||
namespace "urn:opendaylight:params:xml:ns:yang:controller:config:sfc-lisp:impl";
|
||||
prefix "sfc-lisp-impl";
|
||||
|
||||
import config { prefix config; revision-date 2013-04-05; }
|
||||
import rpc-context { prefix rpcx; revision-date 2013-06-17; }
|
||||
import opendaylight-md-sal-binding { prefix mdsal; revision-date 2013-10-28; }
|
||||
|
||||
|
||||
description
|
||||
"This module contains the base YANG definitions for
|
||||
sfc-lisp implementation.";
|
||||
|
||||
revision "2015-04-27" {
|
||||
description
|
||||
"Initial revision.";
|
||||
}
|
||||
|
||||
// This is the definition of the service implementation as a module identity
|
||||
identity sfc-lisp-impl {
|
||||
base config:module-type;
|
||||
|
||||
// Specifies the prefix for generated java classes.
|
||||
config:java-name-prefix SfcLisp;
|
||||
}
|
||||
|
||||
|
||||
// Augments the 'configuration' choice node under modules/module.
|
||||
augment "/config:modules/config:module/config:configuration" {
|
||||
case sfc-lisp-impl {
|
||||
when "/config:modules/config:module/config:type = 'sfc-lisp-impl'";
|
||||
|
||||
//wires in the data-broker service
|
||||
container data-broker {
|
||||
uses config:service-ref {
|
||||
refine type {
|
||||
mandatory false;
|
||||
config:required-identity mdsal:binding-async-data-broker;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
container rpc-registry {
|
||||
uses config:service-ref {
|
||||
refine type {
|
||||
mandatory true;
|
||||
config:required-identity mdsal:binding-rpc-registry;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
329
samples/reStructuredText/HACKING.rst.txt
Normal file
329
samples/reStructuredText/HACKING.rst.txt
Normal file
@@ -0,0 +1,329 @@
|
||||
Contributing to SciPy
|
||||
=====================
|
||||
|
||||
This document aims to give an overview of how to contribute to SciPy. It
|
||||
tries to answer commonly asked questions, and provide some insight into how the
|
||||
community process works in practice. Readers who are familiar with the SciPy
|
||||
community and are experienced Python coders may want to jump straight to the
|
||||
`git workflow`_ documentation.
|
||||
|
||||
|
||||
Contributing new code
|
||||
---------------------
|
||||
|
||||
If you have been working with the scientific Python toolstack for a while, you
|
||||
probably have some code lying around of which you think "this could be useful
|
||||
for others too". Perhaps it's a good idea then to contribute it to SciPy or
|
||||
another open source project. The first question to ask is then, where does
|
||||
this code belong? That question is hard to answer here, so we start with a
|
||||
more specific one: *what code is suitable for putting into SciPy?*
|
||||
Almost all of the new code added to scipy has in common that it's potentially
|
||||
useful in multiple scientific domains and it fits in the scope of existing
|
||||
scipy submodules. In principle new submodules can be added too, but this is
|
||||
far less common. For code that is specific to a single application, there may
|
||||
be an existing project that can use the code. Some scikits (`scikit-learn`_,
|
||||
`scikits-image`_, `statsmodels`_, etc.) are good examples here; they have a
|
||||
narrower focus and because of that more domain-specific code than SciPy.
|
||||
|
||||
Now if you have code that you would like to see included in SciPy, how do you
|
||||
go about it? After checking that your code can be distributed in SciPy under a
|
||||
compatible license (see FAQ for details), the first step is to discuss on the
|
||||
scipy-dev mailing list. All new features, as well as changes to existing code,
|
||||
are discussed and decided on there. You can, and probably should, already
|
||||
start this discussion before your code is finished.
|
||||
|
||||
Assuming the outcome of the discussion on the mailing list is positive and you
|
||||
have a function or piece of code that does what you need it to do, what next?
|
||||
Before code is added to SciPy, it at least has to have good documentation, unit
|
||||
tests and correct code style.
|
||||
|
||||
1. Unit tests
|
||||
In principle you should aim to create unit tests that exercise all the code
|
||||
that you are adding. This gives some degree of confidence that your code
|
||||
runs correctly, also on Python versions and hardware or OSes that you don't
|
||||
have available yourself. An extensive description of how to write unit
|
||||
tests is given in the NumPy `testing guidelines`_.
|
||||
|
||||
2. Documentation
|
||||
Clear and complete documentation is essential in order for users to be able
|
||||
to find and understand the code. Documentation for individual functions
|
||||
and classes -- which includes at least a basic description, type and
|
||||
meaning of all parameters and returns values, and usage examples in
|
||||
`doctest`_ format -- is put in docstrings. Those docstrings can be read
|
||||
within the interpreter, and are compiled into a reference guide in html and
|
||||
pdf format. Higher-level documentation for key (areas of) functionality is
|
||||
provided in tutorial format and/or in module docstrings. A guide on how to
|
||||
write documentation is given in `how to document`_.
|
||||
|
||||
3. Code style
|
||||
Uniformity of style in which code is written is important to others trying
|
||||
to understand the code. SciPy follows the standard Python guidelines for
|
||||
code style, `PEP8`_. In order to check that your code conforms to PEP8,
|
||||
you can use the `pep8 package`_ style checker. Most IDEs and text editors
|
||||
have settings that can help you follow PEP8, for example by translating
|
||||
tabs by four spaces. Using `pyflakes`_ to check your code is also a good
|
||||
idea.
|
||||
|
||||
At the end of this document a checklist is given that may help to check if your
|
||||
code fulfills all requirements for inclusion in SciPy.
|
||||
|
||||
Another question you may have is: *where exactly do I put my code*? To answer
|
||||
this, it is useful to understand how the SciPy public API (application
|
||||
programming interface) is defined. For most modules the API is two levels
|
||||
deep, which means your new function should appear as
|
||||
``scipy.submodule.my_new_func``. ``my_new_func`` can be put in an existing or
|
||||
new file under ``/scipy/<submodule>/``, its name is added to the ``__all__``
|
||||
dict in that file (which lists all public functions in the file), and those
|
||||
public functions are then imported in ``/scipy/<submodule>/__init__.py``. Any
|
||||
private functions/classes should have a leading underscore (``_``) in their
|
||||
name. A more detailed description of what the public API of SciPy is, is given
|
||||
in `SciPy API`_.
|
||||
|
||||
Once you think your code is ready for inclusion in SciPy, you can send a pull
|
||||
request (PR) on Github. We won't go into the details of how to work with git
|
||||
here, this is described well in the `git workflow`_ section of the NumPy
|
||||
documentation and in the Github help pages. When you send the PR for a new
|
||||
feature, be sure to also mention this on the scipy-dev mailing list. This can
|
||||
prompt interested people to help review your PR. Assuming that you already got
|
||||
positive feedback before on the general idea of your code/feature, the purpose
|
||||
of the code review is to ensure that the code is correct, efficient and meets
|
||||
the requirements outlined above. In many cases the code review happens
|
||||
relatively quickly, but it's possible that it stalls. If you have addressed
|
||||
all feedback already given, it's perfectly fine to ask on the mailing list
|
||||
again for review (after a reasonable amount of time, say a couple of weeks, has
|
||||
passed). Once the review is completed, the PR is merged into the "master"
|
||||
branch of SciPy.
|
||||
|
||||
The above describes the requirements and process for adding code to SciPy. It
|
||||
doesn't yet answer the question though how decisions are made exactly. The
|
||||
basic answer is: decisions are made by consensus, by everyone who chooses to
|
||||
participate in the discussion on the mailing list. This includes developers,
|
||||
other users and yourself. Aiming for consensus in the discussion is important
|
||||
-- SciPy is a project by and for the scientific Python community. In those
|
||||
rare cases that agreement cannot be reached, the `maintainers`_ of the module
|
||||
in question can decide the issue.
|
||||
|
||||
|
||||
Contributing by helping maintain existing code
|
||||
----------------------------------------------
|
||||
|
||||
The previous section talked specifically about adding new functionality to
|
||||
SciPy. A large part of that discussion also applies to maintenance of existing
|
||||
code. Maintenance means fixing bugs, improving code quality or style,
|
||||
documenting existing functionality better, adding missing unit tests, keeping
|
||||
build scripts up-to-date, etc. The SciPy `Trac`_ bug tracker contains all
|
||||
reported bugs, build/documentation issues, etc. Fixing issues described in
|
||||
Trac tickets helps improve the overall quality of SciPy, and is also a good way
|
||||
of getting familiar with the project. You may also want to fix a bug because
|
||||
you ran into it and need the function in question to work correctly.
|
||||
|
||||
The discussion on code style and unit testing above applies equally to bug
|
||||
fixes. It is usually best to start by writing a unit test that shows the
|
||||
problem, i.e. it should pass but doesn't. Once you have that, you can fix the
|
||||
code so that the test does pass. That should be enough to send a PR for this
|
||||
issue. Unlike when adding new code, discussing this on the mailing list may
|
||||
not be necessary - if the old behavior of the code is clearly incorrect, no one
|
||||
will object to having it fixed. It may be necessary to add some warning or
|
||||
deprecation message for the changed behavior. This should be part of the
|
||||
review process.
|
||||
|
||||
|
||||
Other ways to contribute
|
||||
------------------------
|
||||
|
||||
There are many ways to contribute other than contributing code. Participating
|
||||
in discussions on the scipy-user and scipy-dev *mailing lists* is a contribution
|
||||
in itself. The `scipy.org`_ *website* contains a lot of information on the
|
||||
SciPy community and can always use a new pair of hands. A redesign of this
|
||||
website is ongoing, see `scipy.github.com`_. The redesigned website is a
|
||||
static site based on Sphinx, the sources for it are
|
||||
also on Github at `scipy.org-new`_.
|
||||
|
||||
The SciPy *documentation* is constantly being improved by many developers and
|
||||
users. You can contribute by sending a PR on Github that improves the
|
||||
documentation, but there's also a `documentation wiki`_ that is very convenient
|
||||
for making edits to docstrings (and doesn't require git knowledge). Anyone can
|
||||
register a username on that wiki, ask on the scipy-dev mailing list for edit
|
||||
rights and make edits. The documentation there is updated every day with the
|
||||
latest changes in the SciPy master branch, and wiki edits are regularly
|
||||
reviewed and merged into master. Another advantage of the documentation wiki
|
||||
is that you can immediately see how the reStructuredText (reST) of docstrings
|
||||
and other docs is rendered as html, so you can easily catch formatting errors.
|
||||
|
||||
Code that doesn't belong in SciPy itself or in another package but helps users
|
||||
accomplish a certain task is valuable. `SciPy Central`_ is the place to share
|
||||
this type of code (snippets, examples, plotting code, etc.).
|
||||
|
||||
|
||||
Useful links, FAQ, checklist
|
||||
----------------------------
|
||||
|
||||
Checklist before submitting a PR
|
||||
````````````````````````````````
|
||||
|
||||
- Are there unit tests with good code coverage?
|
||||
- Do all public function have docstrings including examples?
|
||||
- Is the code style correct (PEP8, pyflakes)
|
||||
- Is the new functionality tagged with ``.. versionadded:: X.Y.Z`` (with
|
||||
X.Y.Z the version number of the next release - can be found in setup.py)?
|
||||
- Is the new functionality mentioned in the release notes of the next
|
||||
release?
|
||||
- Is the new functionality added to the reference guide?
|
||||
- In case of larger additions, is there a tutorial or more extensive
|
||||
module-level description?
|
||||
- In case compiled code is added, is it integrated correctly via setup.py
|
||||
(and preferably also Bento/Numscons configuration files)?
|
||||
- If you are a first-time contributor, did you add yourself to THANKS.txt?
|
||||
Please note that this is perfectly normal and desirable - the aim is to
|
||||
give every single contributor credit, and if you don't add yourself it's
|
||||
simply extra work for the reviewer (or worse, the reviewer may forget).
|
||||
- Did you check that the code can be distributed under a BSD license?
|
||||
|
||||
|
||||
Useful SciPy documents
|
||||
``````````````````````
|
||||
|
||||
- The `how to document`_ guidelines
|
||||
- NumPy/SciPy `testing guidelines`_
|
||||
- `SciPy API`_
|
||||
- SciPy `maintainers`_
|
||||
- NumPy/SciPy `git workflow`_
|
||||
|
||||
|
||||
FAQ
|
||||
```
|
||||
|
||||
*I based my code on existing Matlab/R/... code I found online, is this OK?*
|
||||
|
||||
It depends. SciPy is distributed under a BSD license, so if the code that you
|
||||
based your code on is also BSD licensed or has a BSD-compatible license (MIT,
|
||||
Apache, ...) then it's OK. Code which is GPL-licensed, has no clear license,
|
||||
requires citation or is free for academic use only can't be included in SciPy.
|
||||
Therefore if you copied existing code with such a license or made a direct
|
||||
translation to Python of it, your code can't be included. See also `license
|
||||
compatibility`_.
|
||||
|
||||
|
||||
*How do I set up SciPy so I can edit files, run the tests and make commits?*
|
||||
|
||||
The simplest method is setting up an in-place build. To create your local git
|
||||
repo and do the in-place build::
|
||||
|
||||
$ git clone https://github.com/scipy/scipy.git scipy
|
||||
$ cd scipy
|
||||
$ python setup.py build_ext -i
|
||||
|
||||
Then you need to either set up a symlink in your site-packages or add this
|
||||
directory to your PYTHONPATH environment variable, so Python can find it. Some
|
||||
IDEs (Spyder for example) have utilities to manage PYTHONPATH. On Linux and OS
|
||||
X, you can for example edit your .bash_login file to automatically add this dir
|
||||
on startup of your terminal. Add the line::
|
||||
|
||||
export PYTHONPATH="$HOME/scipy:${PYTHONPATH}"
|
||||
|
||||
Alternatively, to set up the symlink, use (prefix only necessary if you want to
|
||||
use your local instead of global site-packages dir)::
|
||||
|
||||
$ python setupegg.py develop --prefix=${HOME}
|
||||
|
||||
To test that everything works, start the interpreter (not inside the scipy/
|
||||
source dir) and run the tests::
|
||||
|
||||
$ python
|
||||
>>> import scipy as sp
|
||||
>>> sp.test()
|
||||
|
||||
Now editing a Python source file in SciPy allows you to immediately test and
|
||||
use your changes, by simply restarting the interpreter.
|
||||
|
||||
Note that while the above procedure is the most straightforward way to get
|
||||
started, you may want to look into using Bento or numscons for faster and more
|
||||
flexible building, or virtualenv to maintain development environments for
|
||||
multiple Python versions.
|
||||
|
||||
|
||||
*How do I set up a development version of SciPy in parallel to a released
|
||||
version that I use to do my job/research?*
|
||||
|
||||
One simple way to achieve this is to install the released version in
|
||||
site-packages, by using a binary installer or pip for example, and set up the
|
||||
development version with an in-place build in a virtualenv. First install
|
||||
`virtualenv`_ and `virtualenvwrapper`_, then create your virtualenv (named
|
||||
scipy-dev here) with::
|
||||
|
||||
$ mkvirtualenv scipy-dev
|
||||
|
||||
Now, whenever you want to switch to the virtual environment, you can use the
|
||||
command ``workon scipy-dev``, while the command ``deactivate`` exits from the
|
||||
virtual environment and brings back your previous shell. With scipy-dev
|
||||
activated, follow the in-place build with the symlink install above to actually
|
||||
install your development version of SciPy.
|
||||
|
||||
|
||||
*Can I use a programming language other than Python to speed up my code?*
|
||||
|
||||
Yes. The languages used in SciPy are Python, Cython, C, C++ and Fortran. All
|
||||
of these have their pros and cons. If Python really doesn't offer enough
|
||||
performance, one of those languages can be used. Important concerns when
|
||||
using compiled languages are maintainability and portability. For
|
||||
maintainability, Cython is clearly preferred over C/C++/Fortran. Cython and C
|
||||
are more portable than C++/Fortran. A lot of the existing C and Fortran code
|
||||
in SciPy is older, battle-tested code that was only wrapped in (but not
|
||||
specifically written for) Python/SciPy. Therefore the basic advice is: use
|
||||
Cython. If there's specific reasons why C/C++/Fortran should be preferred,
|
||||
please discuss those reasons first.
|
||||
|
||||
|
||||
*There's overlap between Trac and Github, which do I use for what?*
|
||||
|
||||
Trac_ is the bug tracker, Github_ the code repository. Before the SciPy code
|
||||
repository moved to Github, the preferred way to contribute code was to create
|
||||
a patch and attach it to a Trac ticket. The overhead of this approach is much
|
||||
larger than sending a PR on Github, so please don't do this anymore. Use Trac
|
||||
for bug reports, Github for patches.
|
||||
|
||||
|
||||
.. _scikit-learn: http://scikit-learn.org
|
||||
|
||||
.. _scikits-image: http://scikits-image.org/
|
||||
|
||||
.. _statsmodels: http://statsmodels.sourceforge.net/
|
||||
|
||||
.. _testing guidelines: https://github.com/numpy/numpy/blob/master/doc/TESTS.rst.txt
|
||||
|
||||
.. _how to document: https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt
|
||||
|
||||
.. _PEP8: http://www.python.org/dev/peps/pep-0008/
|
||||
|
||||
.. _pep8 package: http://pypi.python.org/pypi/pep8
|
||||
|
||||
.. _pyflakes: http://pypi.python.org/pypi/pyflakes
|
||||
|
||||
.. _SciPy API: http://docs.scipy.org/doc/scipy/reference/api.html
|
||||
|
||||
.. _git workflow: http://docs.scipy.org/doc/numpy/dev/gitwash/index.html
|
||||
|
||||
.. _maintainers: https://github.com/scipy/scipy/blob/master/doc/MAINTAINERS.rst.txt
|
||||
|
||||
.. _Trac: http://projects.scipy.org/scipy/timeline
|
||||
|
||||
.. _Github: https://github.com/scipy/scipy
|
||||
|
||||
.. _scipy.org: http://scipy.org/
|
||||
|
||||
.. _scipy.github.com: http://scipy.github.com/
|
||||
|
||||
.. _scipy.org-new: https://github.com/scipy/scipy.org-new
|
||||
|
||||
.. _documentation wiki: http://docs.scipy.org/scipy/Front%20Page/
|
||||
|
||||
.. _SciPy Central: http://scipy-central.org/
|
||||
|
||||
.. _license compatibility: http://www.scipy.org/License_Compatibility
|
||||
|
||||
.. _doctest: http://www.doughellmann.com/PyMOTW/doctest/
|
||||
|
||||
.. _virtualenv: http://www.virtualenv.org/
|
||||
|
||||
.. _virtualenvwrapper: http://www.doughellmann.com/projects/virtualenvwrapper/
|
||||
|
||||
Reference in New Issue
Block a user