1
0
mirror of https://github.com/gryf/tagbar.git synced 2026-01-29 11:35:55 +01:00

Add tests to repository

This commit is contained in:
Jan Larres
2013-03-28 00:16:03 +13:00
parent b6f47e4020
commit db9404ca1a
128 changed files with 54624 additions and 0 deletions

View File

@@ -0,0 +1,34 @@
COMPILER ebnf.
CHARACTERS
small = "abcdefghijklmnopqrstuvwxyz";
big = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
alpha = small + big;
dig = "0123456789";
blank = CHR(9) + CHR(10) + CHR(32);
noQuote = ANY - '"'.
COMMENTS
FROM "(*" TO "*)" NESTED.
TOKENS
Nonterminal = small {alpha | dig};
Terminal = big {alpha | dig};
White = blank {blank};
String = '"' { noQuote } '"'.
IGNORE
White + Comment.
PRODUCTIONS
ebnf = {production} ".";
production = Nonterminal "=" expr ";" ;
expr = term {"|" term};
term = factor {factor};
factor = "{" expr "}"
| "[" expr "]"
| "(" expr ")"
| Nonterminal | Terminal | String.
END ebnf.

View File

@@ -0,0 +1,117 @@
Definitions.
Dig = [0-9]
Big = [A-Z]
Small = [a-z]
WS = [\000-\s]
COMMENT = \(\*\(*([^*)]|[^*]\)|\*[^)])*\**\*\)
STRING = "(\\\^.|\\.|[^"])*"
QUOTE = '(\\\^.|\\.|[^'])*'
Rules.
({Small}({Small}|{Big}|{Dig}|_)*) : {token, {atom,YYline, YYtext}}.
({Big}({Small}|{Big}|{Dig}|_)*) : {token, special(YYtext, YYline)}.
({Dig}{Dig}*) : {token, {integer, YYline, list_to_integer(YYtext)}}.
%% string
{STRING} : %% Strip quotes.
S = lists:sublist(YYtext, 2, length(YYtext) - 2),
{token,{string,YYline,string_gen(S)}}.
{QUOTE} : %% Strip quotes.
S = lists:sublist(YYtext, 2, length(YYtext) - 2),
{token,{quote,YYline,string_gen(S)}}.
{COMMENT} : .
%%---------------------------------------------------------
%% Ignore stuff
%%---------------------------------------------------------
%% "{WHITE}". %% whitespace
%% "#.*". %% Ignore Macro stuff for now
%% "{COMMENT}". %% Ignore Comments
%% C comments are /* ... */
%% Our comments are (* ... *) {we have to quote ( and * yuck
%% i.e. write \* and \( }
%%
%% COMMENT "/\\*/*([^*/]|[^*]/|\\*[^/])*\\**\\*/". (tobbe)
%% COMMENT "(\\*/*([^*)]|[^*])|\\*[^)])*\\**\\*)". (modified)
%% COMMENT "\(\\*/*([^*\)]|[^*]\)|\\*[^\)])*\\**\\*\)". (quoted)
= : {token, {'=', YYline}}.
\+ : {token, {'+', YYline}}.
\- : {token, {'-', YYline}}.
\; : {token, {';', YYline}}.
} : {token, {'}', YYline}}.
{ : {token, {'{', YYline}}.
\[ : {token, {'[', YYline}}.
\] : {token, {']', YYline}}.
\( : {token, {'(', YYline}}.
\) : {token, {')', YYline}}.
\| : {token, {'|', YYline}}.
\: : {token, {':', YYline}}.
(.|\n) : skip_token.
\.[\s\t\n] : {end_token,{'$end', YYline}}.
Erlang code.
string_gen([$\\|Cs]) ->
string_escape(Cs);
string_gen([C|Cs]) ->
[C|string_gen(Cs)];
string_gen([]) -> [].
string_escape([O1,O2,O3|S]) when
O1 >= $0, O1 =< $7, O2 >= $0, O2 =< $7, O3 >= $0, O3 =< $7 ->
[(O1*8 + O2)*8 + O3 - 73*$0|string_gen(S)];
string_escape([$^,C|Cs]) ->
[C band 31|string_gen(Cs)];
string_escape([C|Cs]) when C >= 0, C =< $ ->
string_gen(Cs);
string_escape([C|Cs]) ->
[escape_char(C)|string_gen(Cs)].
escape_char($n) -> $\n; %\n = LF
escape_char($r) -> $\r; %\r = CR
escape_char($t) -> $\t; %\t = TAB
escape_char($v) -> $\v; %\v = VT
escape_char($b) -> $\b; %\b = BS
escape_char($f) -> $\f; %\f = FF
escape_char($e) -> $\e; %\e = ESC
escape_char($s) -> $ ; %\s = SPC
escape_char($d) -> $\d; %\d = DEL
escape_char(C) -> C.
remove_brackets([_,_|T]) ->
[_,_|T1] = lists:reverse(T),
lists:reverse(T1).
special("COMPILER", Line) -> {'COMPILER', Line};
special("CHARACTERS", Line) -> {'CHARACTERS', Line};
special("COMMENTS", Line) -> {'COMMENTS', Line};
special("FROM", Line) -> {'FROM', Line};
special("TO", Line) -> {'TO', Line};
special("TOKENS", Line) -> {'TOKENS', Line};
special("IGNORE", Line) -> {'IGNORE', Line};
special("PRODUCTIONS", Line) -> {'PRODUCTIONS', Line};
special("END", Line) -> {'END', Line};
special("NESTED", Line) -> {'NESTED', Line};
special("EOL", Line) -> {'EOL', Line};
special("CHR", Line) -> {'CHR', Line};
special("ANY", Line) -> {'ANY', Line};
special(Other, Line) -> {var, Line, Other}.

View File

@@ -0,0 +1,105 @@
Nonterminals
production
form lhs factor
nested syntax char_prods charline char_rhs char_prim
ignore moreignore expr term.
Terminals
atom var string quote '|' '=' '}' '{' '(' ')' '[' ']'
'COMPILER' 'CHARACTERS' 'COMMENTS' 'FROM' 'TO' 'TOKENS'
'IGNORE' 'PRODUCTIONS' 'END' 'NESTED' 'EOL' 'CHR' 'ANY' integer comment
'+' '-' ';'.
Rootsymbol form.
form -> 'COMPILER' atom : {compiler, unwrap('$2')}.
form -> 'CHARACTERS' char_prods : {characters, '$2'}.
form -> 'COMMENTS' 'FROM' string
'TO' string nested : {comments,unwrap('$3'),unwrap('$5'),
'$6'}.
form -> 'TOKENS' syntax : {tokens, '$2'}.
form -> 'IGNORE' ignore : {ignore, '$2'}.
form -> 'PRODUCTIONS' syntax : {syntax, '$2'}.
form -> 'END' atom : {theend, '$2'}.
form -> comment.
nested -> 'NESTED' : nested.
nested -> 'EOL' : eol.
nested -> '$empty' : not_nested.
%% Character syntax
char_prods -> charline ';' char_prods : ['$1'|'$3'].
char_prods -> charline : ['$1'].
charline -> atom '=' char_rhs : {unwrap('$1'), '$3'}.
char_rhs -> char_prim '+' char_rhs : {plus, '$1', '$3'}.
char_rhs -> char_prim '-' char_rhs : {minus, '$1', '$3'}.
char_rhs -> char_prim : '$1'.
char_prim -> 'CHR' '(' integer ')' : {chr, unwrap('$3')}.
char_prim -> string : {string, unwrap('$1')}.
char_prim -> quote : {string, unwrap('$1')}.
char_prim -> atom : {atom, unwrap('$1')}.
char_prim -> 'ANY' : any.
ignore -> var moreignore : [unwrap('$1')|'$2'].
moreignore -> '+' ignore : '$2'.
moreignore -> '$empty' : [].
%% The following deifinitions are taken from [WIR82]
%% WIR82 Programming in Modular2
%% Springer Verlag 1982
%% statement : A syntactic form
%% expression : A list of alternatives
%% term : A concatination of factors
%% factor : A single syntactoc entity or a parenthesized expression
%% Construct
%% =========
%% [ A ] = zero or more A's
%% { A } = any number of A's
%% "A" = a string parse tree
%% A | B = A or B parse tree
%% A B = sequence of A followed by B
%% identifier = a name
%% syntax = {production}
%% production = id "=" expr ";"
%% expr = term {"|" term}
%% term = factor {factor}
%% factor = id | string "{" expr "}
syntax -> production ';' syntax : ['$1'|'$3'].
syntax -> production : ['$1'].
production -> lhs '=' expr : {prod, '$1', '$3'}.
lhs -> var : unwrap('$1').
lhs -> atom : unwrap('$1').
expr -> term : '$1'.
expr -> term '|' expr : {alt, '$1', '$3'}.
term -> factor : '$1'.
term -> factor term : {seq, '$1', '$2'}.
factor -> atom : {nt, unwrap('$1')}.
factor -> var : {ta, unwrap('$1')}.
factor -> string : {ts, unwrap('$1')}.
factor -> quote : {tq, unwrap('$1')}.
factor -> '[' expr ']' : {one, '$2'}.
factor -> '{' expr '}' : {star, '$2'}.
factor -> '(' expr ')' : {bracket, '$2'}.
Erlang code.
unwrap({_,_,V}) -> V.
simplify({Tag,A,nil}) -> A;
simplify(X) -> X.

View File

@@ -0,0 +1,58 @@
-module(ecc_parse).
-doc([{author, 'Joe Armstrong'},
{title, "Parser for the <b>ecc</b> language."},
{keywords,[ecc,parser,yecc,leex]},
{date, 891106}]).
-export([make/0, file/1]).
%% usage
%% ecc_parse:file(File)
%% Converts File.ebnf -> File.xbin
%% ecc_parse:make()
%% Makes the parser
make() ->
%% The parser is made from
%% ecc.yrl and ecc.xrl
yecc:yecc("ecc", "ecc_yecc"),
c:c(ecc_yecc),
leex:gen(ecc, ecc_lex),
c:c(ecc_lex).
file(F) ->
io:format("Parsing ~s.ecc~n", [F]),
{ok, Stream} = file:open(F ++ ".ecc", read),
Parse = handle(Stream, 1, [], 0),
file:close(Stream),
Parse.
handle(Stream, LineNo, L, NErrors) ->
handle1(io:requests(Stream, [{get_until,foo,ecc_lex,
tokens,[LineNo]}]), Stream, L, NErrors).
handle1({ok, Toks, Next}, Stream, L, Nerrs) ->
case ecc_yecc:parse(Toks) of
{ok, Parse} ->
handle(Stream, Next, [Parse|L], Nerrs);
{error, {Line, Mod, What}} ->
Str = apply(Mod, format_error, [What]),
io:format("** ~w ~s~n", [Line, Str]),
handle(Stream, Next, L, Nerrs+1);
Other ->
io:format("Bad_parse:~p\n", [Other]),
handle(Stream, Next, L, Nerrs+1)
end;
handle1({eof, _}, Stream, L, 0) ->
{ok, lists:reverse(L)};
handle1({eof, _}, Stream, L, N) ->
{error, N};
handle1(What, Stream, L, Nerrs) ->
io:format("Here:~p\n", [What]),
handle(Stream, 1, L, Nerrs+1).
first([H]) -> [];
first([H|T]) -> [H|first(T)];
first([]) -> [].

View File

@@ -0,0 +1,236 @@
-module(ermake).
-doc([{author,'Joe Armstrong'},
{title,"Erlang make utility."},
{keywords, [make]},
{date,981103}]).
-export([all/0, target/1, file/1, file/2]).
-import(lists, [delete/2, filter/2, foldl/3, map/2, max/1, member/2, zf/2]).
%% all() -> Makes first target in EMakefile
%% target(target::string()) -> Makes Target in EMakefile
%% file(file()) -> Makes first target in File
%% file(file(), target::string()) -> Makes Target in File
all() -> file("EMakefile").
target(T) -> file("EMakefile", T).
file(File) -> make(File, top).
file(File, Target) -> make(File, {target, Target}).
make(File, Target) ->
Lines = ermake_parse:parse(File),
Rules = filter(fun(X) -> element(1, X) == make end, Lines),
Suffix = filter(fun(X) -> element(1, X) == suffix end, Lines),
Rules1 = add_extra_rules(Rules, Suffix),
case Target of
top ->
case hd(Rules) of
{make, Ts, _, _} ->
make_everything(Ts, Rules1);
_ ->
nothing_to_do
end;
{target, T} ->
make_everything([T], Rules1)
end.
add_extra_rules(Rules, Suffix) ->
%% If a dependent is mentioned and
%% there is no explicit rule for how to make the dependent then
%% add an extra rule if possible
Targets = [T || {make,Ts,_,_} <- Rules, T <- Ts],
Dependents = [D || {make, _, Ds, _} <- Rules, D <- Ds],
Missing = filter(fun(I) -> not member(I, Targets) end, Dependents),
Missing1 = remove_duplicates(Missing),
Extra = zf(fun(I) -> find_suffix_rule(I, Suffix) end, Missing1),
Rules ++ Extra.
make_everything(Targets, Rules) ->
Ps = [{T, D} || {make, Ts, Ds, _} <- Rules, T <- Ts, D <- Ds],
%% trace all the rules we can reach from the root set
L0 = transitive:closure(Targets, Ps),
L = delete(true, L0),
%% keep those rules that are mentioned in targets or destinations
Ps1 = filter(fun({D,T}) ->
member(D, L) or member(T, L)
end, Ps),
%% reverse the order to build the bottom up tree
Ps2 = map(fun({I,J}) -> {J, I} end, Ps1),
%% order the result
case topological_sort:sort(Ps2) of
{ok, Order0} ->
Order = delete(true, Order0),
%% Order is the absolute order to build things
Cmds = map(fun(I) -> select_rule(I, Rules) end, Order),
foldl(fun do_cmd/2, [], Cmds),
true;
{cycle, Cycle} ->
exit({makefile,contains,cycle,Cycle})
end.
%% find which rule is needed to build Target
select_rule(Target, Rules) ->
Matches = [{make, Ts,Ds,Fun}|| {make,Ts,Ds,Fun} <- Rules,
member(Target, Ts)],
case length(Matches) of
0 -> {file, Target};
1 -> hd(Matches);
_ -> exit({multiple,rules,to,make,Target})
end.
%% do_cmd(cmd(), made()) -> make()'
%% cmd() = {make, Targets, Dependents, Fun} | {file, Target}
%% made() = [Target, time()].
do_cmd({make, Bins, Srcs, Fun}, Made) ->
case target_time(Bins, Made) of
none ->
eval(Bins, Fun);
{missing, M} ->
eval(Bins, Fun);
{max, TBin} ->
case target_time(Srcs, Made) of
{missing, M} ->
exit({'I don\'t know how to make',M});
{max, TSrc} when TSrc > TBin ->
eval(Bins, Fun);
{max, _} ->
true;
none ->
exit({no,src,Srcs})
end
end,
update_times(Srcs ++ Bins, this_time(), Made);
do_cmd({file,H}, Made) ->
update_times([H], this_time(), Made).
%% target_time(Targets, Made) -> {max, Time} | {missing,M}
%% none
%% if no targets found
%% {missing, M}
%% if target M is missing
%% {max, Time}
%% Time is the last modified time of all the targets
%% the limes are derived from either the Made list
%% or from the time stamp of the file.
target_time(Targets, Made) ->
target_time(Targets, Made, []).
target_time([H|T], Made, Times) ->
case make_time(H, Made) of
{yes, Time} ->
target_time(T, Made, [Time|Times]);
no ->
case is_file(H) of
true ->
target_time(T, Made, [last_modified(H)|Times]);
false ->
{missing, H}
end
end;
target_time([], Made, []) ->
none;
target_time([], Made, Times) ->
{max, max(Times)}.
make_time(X, [{X,Time}|_]) -> {yes, Time};
make_time(X, [_|T]) -> make_time(X, T);
make_time(X, []) -> no.
update_times([H|T], Now, Made) ->
case make_time(H, Made) of
{yes, _} -> update_times(T, Now, Made);
no ->
case is_file(H) of
true ->
update_times(T, Now, [{H, last_modified(H)}|Made]);
false ->
update_times(T, Now, [{H, Now}|Made])
end
end;
update_times([], _, Made) ->
Made.
%% see if a suffix rule can be applied to the file D
find_suffix_rule(D, Suffix) ->
Ext = filename:extension(D),
find_suffix_rule(Ext, D, Suffix).
find_suffix_rule(To, D, [{suffix, [From, To], Fun}|_]) ->
Root = filename:rootname(D),
Fun1 = expand_cmd(Fun, Root),
{true, {make, [D], [Root ++ From], Fun1}};
find_suffix_rule(To, D, [_|T]) ->
find_suffix_rule(To, D, T);
find_suffix_rule(_, _, []) ->
false.
expand_cmd([$$,$>|T], Root) ->
Root ++ expand_cmd(T, Root);
expand_cmd([H|T], Root) ->
[H|expand_cmd(T, Root)];
expand_cmd([], _) ->
[].
eval(_, []) ->
true;
eval(Target, Str) ->
io:format("make ~p ->~n~s~n",[Target, Str]),
case erl_scan:tokens([], "fun() -> " ++ Str ++ " end. ", 1) of
{done, {ok, Toks, _},_} ->
case erl_parse:parse_exprs(Toks) of
{ok, [Parse]} ->
%% io:format("Parse = ~p~n",[Parse]),
Env0 = erl_eval:new_bindings(),
Call = [{call,9999,Parse,[]}],
case erl_eval:exprs(Call, Env0) of
{value, Val, _} ->
Val;
O3 ->
exit({eval,error,O3})
end;
O1 ->
exit({parse,error,o1,O1})
end;
O2 ->
exit({tokenisation,error,O2})
end.
%% Stuff that should have been in the libraries (sigh :-)
last_modified(F) ->
case file:file_info(F) of
{ok, {_, _, _, _, Time, _, _}} ->
Time;
_ ->
exit({last_modified, F})
end.
is_file(File) ->
case file:file_info(File) of
{ok, _} ->
true;
_ ->
false
end.
remove_duplicates(L) ->
foldl(fun(I, Acc) ->
case member(I, Acc) of
true -> Acc;
false -> [I|Acc]
end
end, [], L).
this_time() ->
{Y,M,D} = date(),
{H,Min,S} = time(),
{Y,M,D,H,Min,S}.

View File

@@ -0,0 +1,246 @@
-module(ermake_line_reader).
-doc([{author,'Joe Armstrong'},
{title,"Provide character level input for utilities such as make, lex, yacc etc. This module reads lines up to dot whitespace. Include files and named macros are expanded in place."},
{keywords, [read,line,make,dot,whitespace,forms]},
{date,981028}]).
%% This module provides a common *character level input*
%% For Erlang look-alike utilities such as make, yecc, lex
%% It provides
%% 1) Multiple line input terminated by dot white space
%% 2) Variables VAR = Val, or VAR += Val
%% 3) Include files include("File")
%% 4) comment stripping %... are removed
-export([test/1, read_file/1, read_file/2]).
-import(lists, [keyreplace/4, keysearch/3, member/2, reverse/1, reverse/2]).
test(1) -> read_file("EMakefile");
test(2) -> read_file("test1").
read_file(File) ->
read_file(File, []).
read_file(File, Macros) ->
{Macros1, Lines} = read_lines(File, Macros, [File]),
%% io:format("Macros were:~p~n",[Macros1]),
trim(Lines).
trim([{{File,Line},Str}|T]) ->
Leading = count_leading_nls(Str, Line),
case trim_line(Str) of
[] -> trim(T);
Str1 -> [{File,Leading,Str1}|trim(T)]
end;
trim([]) -> [].
trim_line(Str) ->
Str1 = skip_white(Str),
trim_end_of_line(Str1).
trim_end_of_line(Str1) ->
case reverse(Str1) of
[X,$.|Tmp] ->
reverse(Tmp);
[] ->
[];
Other ->
exit({oops,Other})
end.
read_lines(File, Macros0, Stack) ->
case file:read_file(File) of
{ok, Bin} ->
Lines = gather_lines(binary_to_list(Bin), File, 1, []),
%% io:format("Lines=~p~n",[Lines]),
expand(Lines, Macros0, Stack, []);
_ ->
exit({cannot,read,file,File})
end.
expand([{Where, H}|T], Macros, Stack, L) ->
%% first expand any macros
H1 = expand_macro(H, Macros, Where),
%% now add any macro definitions
case is_macro_defn(H1) of
{new, Var, Val} ->
case keysearch(Var,1,Macros) of
{value,{_,Val}} ->
%% same value no problem
expand(T, Macros, Stack, L);
{value,{_,Replacement}} ->
%% some other value
exit({error, Where, cannot,redefine,macro,
Var,was,Replacement,is,Val});
false ->
%% new definition
expand(T, [{Var,Val}|Macros], Stack, L)
end;
{plus, Var, Val} ->
case keysearch(Var,1,Macros) of
{value,{_,Old}} ->
%% some other value
Macros1 = keyreplace(Var,1,Macros,{Var,Old++Val}),
expand(T, Macros1, Stack, L);
false ->
exit({error, Where, no,previous,defn,for,Var})
end;
no ->
case is_include(H1, Where) of
{yes, File} ->
case member(File, Stack) of
true ->
exit({error, Where, recursive_include, File});
false ->
{Macros1, Lines1} = read_lines(File,Macros,Stack),
expand(T, Macros1, Stack, reverse(Lines1, L))
end;
no ->
expand(T, Macros, Stack, [{Where, H1}|L])
end
end;
expand([], Macros, Stack, L) ->
{Macros, reverse(L)}.
expand_macro([$$,$(|T], Macros, Where) ->
case is_var(T) of
{yes, Var, [$)|T1]} ->
case keysearch(Var,1,Macros) of
{value,{_,Replacement}} ->
Replacement ++ expand_macro(T1, Macros, Where);
false ->
exit({error,Where,undefined,macro,Var})
end;
no ->
[$$,$(|expand_macro(T, Macros, Where)]
end;
expand_macro([H|T], Macros, Where) ->
[H|expand_macro(T, Macros, Where)];
expand_macro([], Macros, _) ->
[].
is_include(Line, Where) ->
case skip_white(Line) of
[$i,$n,$c,$l,$u,$d,$e,$(,$"|T] ->
{File, T1} = get_quoted([$"|T], Where),
case skip_white(T1) of
[$)|_] ->
{yes, File};
_ ->
exit({Where,bad,include,syntax})
end;
_ ->
no
end.
is_macro_defn(Line) ->
Str1 = skip_white(Line),
case is_var(Str1) of
{yes, Var, Str2} ->
case skip_white(Str2) of
[$=|T] ->
{new, Var, trim_end_of_line(T)};
[$+,$=|T] ->
{plus, Var, trim_end_of_line(T)};
_ ->
no
end;
no ->
no
end.
is_var([H|T]) when $A =< H, H =< $Z ->
collect_var(T, [H]);
is_var(_) ->
no.
collect_var([H|T], L) when $A =< H, H =< $Z ->
collect_var(T, [H|L]);
collect_var([H|T], L) when $1 =< H, H =< $9 ->
collect_var(T, [H|L]);
collect_var([H|T], L) when $a =< H, H =< $z ->
collect_var(T, [H|L]);
collect_var(X, L) ->
{yes, reverse(L), X}.
skip_white([$ |T]) -> skip_white(T);
skip_white([$\n|T]) -> skip_white(T);
skip_white([$\t|T]) -> skip_white(T);
skip_white(T) -> T.
gather_lines([], File, N, L) ->
reverse(L);
gather_lines(Str, File, N, L) ->
{Line, Str1} = get_line(Str, {File, N}, []),
Width = count_nls(Line, 0),
gather_lines(Str1, File, N + Width, [{{File,N},Line}|L]).
count_nls([$\n|T], N) -> count_nls(T, N+1);
count_nls([_|T], N) -> count_nls(T, N);
count_nls([], N) -> N.
count_leading_nls([$\n|T], N) -> count_leading_nls(T, N+1);
count_leading_nls(_, N) -> N.
%% get_line collects a line up to . <white>
get_line([$.,X|T], Where, L) ->
case X of
$\n ->
{reverse(L, [$.,$\n]), T};
$ ->
{reverse(L, [". "]), T};
$\t ->
{reverse(L, [$.,$\t]), T};
_ ->
get_line(T, Where, [X,$.|L])
end;
get_line([$"|T], Where, L) ->
{Str, T1} = get_quoted([$"|T], Where),
get_line(T1, Where, [$"|reverse(Str, [$"|L])]);
get_line([$'|T], Where, L) ->
{Str, T1} = get_quoted([$'|T], Where),
get_line(T1, Where,[$'|reverse(Str, [$'|L])]);
get_line([$%|T], Where, L) ->
%% remove the comment
T1 = skip_to_eol(T),
get_line(T1, Where, L);
get_line([H|T], Where, L) ->
get_line(T, Where, [H|L]);
get_line([], Where, L) ->
{reverse(L), []}.
skip_to_eol([$\n|T]) -> [$\n|T];
skip_to_eol([_|T]) -> skip_to_eol(T);
skip_to_eol([]) -> [].
%% get_quoted(string(), {file(),line()}) -> {quoted(), rest()}
%% The " ' is not included
get_quoted([End|T], Where) ->
get_quoted(T, Where, End, []).
get_quoted([End|T], Where, End, Acc) ->
{reverse(Acc), T};
get_quoted([$\\,C|T], Where, End, Acc) ->
get_quoted(T, Where, End, [quoted(C)|Acc]);
get_quoted([$\n|_], {File,Line}, _, _) ->
exit({error, file, File, line, Line,
"newline not allowed in string"});
get_quoted([H|T], Where, End, Acc) ->
get_quoted(T, Where, End, [H|Acc]);
get_quoted([], {File,Line}, _, _) ->
exit({error, file, File, line, Line,
"end of line not allowed in string"}).
%% Quoted characters
quoted($n) -> $\n;
quoted($t) -> $\t;
quoted($r) -> $\r;
quoted($b) -> $\b;
quoted($v) -> $\v;
quoted(C) -> C.

View File

@@ -0,0 +1,73 @@
-module(ermake_parse).
-doc([{author,'Joe Armstrong'},
{title,"Parser used by ermake."},
{keywords, [parser,make]},
{date,981029}]).
-export([parse/1]).
-import(lists, [reverse/1, prefix/2, map/2]).
parse(File) ->
Dir = filename:dirname(code:which(?MODULE)),
Lines = ermake_line_reader:read_file(File, [{"MAKEDIR", Dir}]),
map(fun parse_line/1, Lines).
parse_line({File,Line,Str}) ->
parse_str(Str).
parse_str([$S,$u,$f,$f,$i,$x,$ |T]) ->
%% io:format("Suffix:~s~n",[T]),
case split("->", T) of
{yes, Pre, Fun} ->
Tmp = string:tokens(Pre,". "),
Tmp1 = map(fun(I) -> [$.|I] end, Tmp),
{suffix, Tmp1, Fun};
no ->
exit({'No -> in suffix rule', T})
end;
parse_str(Str) ->
case split("when", Str) of
{yes, As, BsF} ->
case split("->", BsF) of
%% As when Bs -> F
{yes, Bs, F} ->
{make, parse_files(As), parse_files(Bs),
parse_fun(F)};
no ->
%% As when Bs.
{make, parse_files(As), parse_files(BsF), []}
end;
no ->
%% A -> F
case split("->", Str) of
no ->
exit({'No "when" or "->" in rule', Str});
{yes, As, F} ->
{make, parse_files(As), [true], parse_fun(F)}
end
end.
%% split(Prefix, String) -> {yes, Before, After} | no
%% splits String at Prefix
split(Prefix, L) -> split(Prefix, L, []).
split(_, [], L) ->
no;
split(Prefix, L, L1) ->
case prefix(Prefix, L) of
true ->
{yes, reverse(L1), string:substr(L, length(Prefix)+1)};
false ->
split(Prefix, tl(L), [hd(L)|L1])
end.
parse_files(Str) ->
Files = string:tokens(Str, [$ ,$,,$\n,$\t]).
parse_fun([$\n|F]) -> F;
parse_fun(F) -> F.

View File

@@ -0,0 +1,30 @@
-module(error_handler).
-doc([{author,joe},
{title,"Special version of error handler used by sos.erl"},
{date,981012}]).
-export([undefined_function/3,undefined_global_name/2]).
undefined_function(sos, F, A) ->
erlang:display({error_handler,undefined_function,
sos,F,A}),
exit(oops);
undefined_function(M, F, A) ->
case sos:load_module(M) of
{ok, M} ->
case erlang:function_exported(M,F,length(A)) of
true ->
apply(M, F, A);
false ->
sos:stop_system({undef,{M,F,A}})
end;
{ok, Other} ->
sos:stop_system({undef,{M,F,A}});
already_loaded ->
sos:stop_system({undef,{M,F,A}});
{error, What} ->
sos:stop_system({load,error,What})
end.
undefined_global_name(Name, Message) ->
exit({badarg,{Name,Message}}).

View File

@@ -0,0 +1,170 @@
-module(find).
-doc([{author,'Joe Armstrong'},
{title,"Find all files. Find all out of date files.
<p>A find utility which finds all files (and directories)
relative to a given root directory"},
{keywords, [find,make]},
{api,["find:files(\".\", \"*.erl\", false)</b> finds all
entries in the current directory.
Recursive scan of sub-directories is also allowed.",
"find:out_of_date(\".\",\".erl\",\".jam\")</b> finds all out of date
Erlang files in the current directory"]},
{date,970203}]).
-export([files/3, out_of_date/3]).
-import(lists, [suffix/2, sublist/3, map/2, filter/2]).
%% files(Dir, ReExpr, Recursive) -> [File]
%% Find regular files starting from Dir
%% Which match ReExpr
%% If Recursive is true do recursivly on all sub-directories
%% Example find(".", "*.erl", false) will find all erlang files in the
%% Current directory
%%
%% out_of_date(Dir, SrcExt, ObjExt) find all "out of date files" in
%% Dir.
%% Example:
%% out_of_date(".", ".erl", ".jam")
%% Finds all out of date files in the current directory
files(Dir, Re, Flag) ->
Re1 = string:re_sh_to_awk(Re),
find_files(Dir, Re1, Flag, []).
%% +type find_files(dirname(), Regexp, bool(), [filename()]) -> [filename()]
%% when Regexp = string().
find_files(Dir, Re, Flag, L) ->
case file:list_dir(Dir) of
{ok, Files} -> find_files(Files, Dir, Re, Flag, L);
{error, _} -> L
end.
%% +type find_files([filename()], dirname(), Regexp, bool(), [filename()]) ->
%% [filename()] when Regexp = string().
find_files([File|T], Dir, Re, Recursive, L) ->
FullName = Dir ++ [$/|File],
case file_type(FullName) of
regular ->
case string:re_match(FullName, Re) of
{match, _, _} ->
find_files(T, Dir, Re, Recursive, [FullName|L]);
_ ->
find_files(T, Dir, Re, Recursive, L)
end;
directory ->
case Recursive of
true ->
L1 = find_files(FullName, Re, Recursive, L),
find_files(T, Dir, Re, Recursive, L1);
false ->
find_files(T, Dir, Re, Recursive, L)
end;
error ->
find_files(T, Dir, Re, Recursive, L)
end;
find_files([], _, _, _, L) ->
L.
%% +type file_type(string()) -> regular | directory | error.
file_type(File) ->
case file:file_info(File) of
{ok, Facts} ->
case element(2, Facts) of
regular -> regular;
directory -> directory;
_ -> error
end;
_ ->
error
end.
%%______________________________________________________________________
%% outofdate(Dir, InExtension, OutExtension)
%% scans Dir for all files with the extension "InExtension"
%% If a file with this extension is found then "OutExtension" is checked
%%
%% returns a list of files in <Dir> where *.OutExtension is
%% "out of date" with respect to *.InExtension
%% in the sence of "make"
out_of_date(Dir, In, Out) ->
case file:list_dir(Dir) of
{ok, Files0} ->
Files1 = filter(fun(F) ->
suffix(In, F)
end, Files0),
Files2 = map(fun(F) ->
sublist(F, 1,
length(F)-length(In))
end, Files1),
filter(fun(F) -> update(F, In, Out) end,Files2);
_ ->
[]
end.
%% +type update(string(), string(), string()) -> bool().
update(File, In, Out) ->
InFile = File ++ In,
OutFile = File ++ Out,
case is_file(OutFile) of
true ->
case writeable(OutFile) of
true ->
outofdate(InFile, OutFile);
false ->
%% can't write so we can't update
false
end;
false ->
%% doesn't exist
true
end.
%% +type is_file(string()) -> bool().
is_file(File) ->
case file:file_info(File) of
{ok, _} ->
true;
_ ->
false
end.
%% +type outofdate(string(), string()) -> bool().
outofdate(In, Out) ->
case {last_modified(In), last_modified(Out)} of
{T1, T2} when T1 > T2 ->
true;
_ ->
false
end.
%% +type last_modified(string()) -> {int(), int(),int(), int(),int(), int()}
%% | 'EXIT'({last_modified, string()}).
last_modified(F) ->
case file:file_info(F) of
{ok, {_, _, _, _, Time, _, _}} ->
Time;
_ ->
exit({last_modified, F})
end.
%% +type writeable(string()) -> bool().
writeable(F) ->
case file:file_info(F) of
{ok, {_,_,read_write,_,_,_,_}} -> true;
{ok, {_,_,write ,_,_,_,_}} -> true;
_ -> false
end.

View File

@@ -0,0 +1,52 @@
-module(ftp_client).
-doc([{author, joe},
{title, "FTP client in pure Erlang -- i.e. an FTP client as it might
have been written, i.e. not according to RFC 959"},
{keywords,[ftp, client]},
{date, 981014}]).
-export([connect/3, pwd/1, cd/2, ls/1, put/2, get/2, lcd/1, lpwd/0, lls/0,
quit/1]).
connect(Host, User, Password) ->
{ftp_server, Host} ! {connect,self(),User,Password},
receive
{ftp_server, Reply} -> Reply;
Other -> Other
after 10000 ->
timeout
end.
%S tag1
pwd(Handle) -> remote(Handle, pwd).
cd(Handle, Dir) -> remote(Handle, {cd, Dir}).
ls(Handle) -> remote(Handle, ls).
get(Handle, File) -> remote(Handle, {get, File}).
quit(Handle) -> remote(Handle, quit).
%E tag1
%S tag2
lcd(Dir) -> file:set_cwd(Dir), lpwd().
lpwd() -> cwd().
lls() -> element(2, file:list_dir(cwd())).
%E tag2
cwd() -> element(2, file:get_cwd()).
remote(Handle, Op) ->
Handle ! {self(), Op},
receive
{ftp_server, Any} ->
Any
after 1000 ->
timeout
end.
put(Handle, File) ->
case file:read_file(File) of
{ok, Contents} ->
remote(Handle, {put, File, Contents});
Other ->
Other
end.

View File

@@ -0,0 +1,123 @@
-module(ftp_server).
%% Look in ~tony/erlang/ftpd/ftpd.erl
%% For filename stuff
-doc([{author, joe},
{title, "FTP server in pure Erlang -- i.e. an FTP server as it
might have been written, i.e. not according to RFC 959"},
{keywords,[ftp, server]},
{date, 981014}]).
-compile(export_all).
-export([start/0, internal/0, handler/1]).
-import(lists, [member/2, reverse/1]).
start() ->
case (catch register(ftp_server,
spawn(?MODULE, internal, []))) of
{'EXIT', _} ->
already_started;
Pid ->
ok
end.
internal() ->
case file:consult("users") of
{ok, Users} ->
process_flag(trap_exit, true),
loop(Users, 0);
_ ->
exit(no_users_allowed)
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
loop(Users, N) ->
receive
{connect, Pid, User, Password} ->
io:format("connection request from:~p ~p ~p~n",
[Pid, User, Password]),
case member({User, Password}, Users) of
true ->
Max = max_connections(),
if
N > Max ->
Pid ! {ftp_server,
{error, too_many_connections}},
loop(Users, N);
true ->
New = spawn_link(?MODULE, handler, [Pid]),
Pid ! {ftp_server, {ok, New}},
loop(Users, N + 1)
end;
false ->
Pid ! {ftp_server, {error, rejected}},
loop(Users, N)
end;
{'EXIT', Pid} ->
io:format("Handler ~p died~n", [Pid]),
loop(Users, lists:max(N-1, 0));
Any ->
io:format("received:~p~n",[Any]),
loop(Users, N)
end.
handler(Pid) ->
receive
{Pid, quit} ->
Pid ! {ftp_server, ok};
{Pid, Op} ->
io:format("got:~p ~p~n",[Pid, Op]),
Pid ! {ftp_server, do_op(Op)},
handler(Pid)
end.
do_op({cd, Dir}) -> file:set_cwd(Dir), cwd();
do_op(ls) -> element(2, file:list_dir(cwd()));
do_op(pwd) -> cwd();
do_op({get_file, File}) -> file:read_file(File).
max_connections() -> 10.
cwd() -> element(2, file:get_cwd()).
%% This was taken from Tony
%%
%% Compose file/directory names
%%
rel_name(Name, Wd) ->
case filename:pathtype(Name) of
relative ->
rel_path(filename:join(Wd, Name));
absolute ->
rel_path(Name);
volumerelative ->
rel_path(filename:join(Wd,Name))
end.
%%
%% We sometime need a simulated root, then call abs_name
%%
abs_name(Name) ->
filename:join("/", Name).
%%
%% rel_path returns a relative path i.e remove
%% and root or volume relative start components
%%
rel_path(Path) ->
rel_path(filename:split(Path),[]).
%% remove absolute or volume relative stuff
rel_path([Root|Path], RP) ->
case filename:pathtype(Root) of
relative -> rpath(Path, [Root|RP]);
_ -> rpath(Path, RP)
end.
rpath([".."|P], [_|RP]) -> rpath(P, RP);
rpath(["."|P], RP) -> rpath(P, RP);
rpath([F|P], RP) -> rpath(P, [F|RP]);
rpath([],[]) -> "";
rpath([], RP) -> filename:join(reverse(RP)).

View File

@@ -0,0 +1,591 @@
%% THIS IS A PRE-RELEASE OF LEEX - RELEASED ONLY BECAUSE MANY PEOPLE
%% WANTED IT - THE OFFICIAL RELEASE WILL PROVIDE A DIFFERENT INCOMPATIBLE
%% AND BETTER INTERFACE - BE WARNED
%% PLEASE REPORT ALL BUGS TO THE AUTHOR.
%% Copyright (C) 1996, Ellemtel Telecommunications Systems Laboratories
%% File : leex.erl
%% Author : Robert Virding (rv@cslab.ericsson.se)
%% Purpose : A Lexical Analyser Generator for Erlang.
%% Most of the algorithms used here are taken pretty much as
%% described in the "Dragon Book" by Aho, Sethi and Ullman. Some
%% completing details were taken from "Compiler Design in C" by
%% Hollub.
-module(leex).
-doc([{author,'Robert Virding'},
{title,"A Lexical Analyser Generator for Erlang"},
{keywords, [lex]},
{date,981012}]).
-copyright('Copyright (c) 1996 Ericsson Telecommunications AB').
-author('rv@cslab.ericsson.se').
-export([gen/2,format_error/1]).
-import(lists, [member/2,reverse/1,seq/2,keymember/3,keysearch/3,keysort/2,
foreach/2]).
-import(ordsets, [is_element/2,add_element/2,union/2,subtract/2]).
%%-compile([export_all]).
-record(nfa_state, {no,edges=[],accept=noaccept}).
-record(dfa_state, {no,nfa=[],trans=[],accept=noaccept}).
gen(In, Out) ->
InFile = lists:concat([In,".xrl"]),
OutFile = lists:concat([Out,".erl"]),
case parse_file(InFile) of
{ok,REAs,Actions,Code} ->
%% io:fwrite("REAs = ~p\n", [REAs]),
%% io:fwrite("Actions = ~p\n", [Actions]),
{NFA,NF} = build_combined_nfa(REAs),
io:fwrite("NFA contains ~w states, ", [size(NFA)]),
%% io:fwrite("NFA = ~p~n ", [NFA]),
{DFA0,DF0} = build_dfa(NFA, NF),
io:fwrite("DFA contains ~w states, ", [length(DFA0)]),
%% io:fwrite("DFA = ~p~n ", [DFA0]),
{DFA,DF} = minimise_dfa(DFA0, DF0),
io:fwrite("minimised to ~w states.~n", [length(DFA)]),
out_file(OutFile, Out, DFA, DF, Actions, Code);
{error,Error} ->
io:put_chars([$\n,gcc_error(InFile, Error),$\n]),
error
end.
format_error({open,F}) -> ["error opening ",io_lib:write_string(F)];
format_error(missing_rules) -> "missing rules";
format_error(bad_rule) -> "bad rule";
format_error({regexp,E}) -> ["bad regexp `",regexp:format_error(E),"'"];
format_error({after_regexp,S}) ->
["bad code after regexp ",io_lib:write_string(S)].
gcc_error(File, {Line,Mod,Error}) ->
io_lib:format("~s:~w: ~s", [File,Line,apply(Mod, format_error, [Error])]);
gcc_error(File, {Mod,Error}) ->
io_lib:format("~s: ~s", [File,apply(Mod, format_error, [Error])]).
%% parse_file(InFile) -> {[REA],[Action],Code} | {error,Error}
%% when
%% REA = {RegExp,ActionNo};
%% Action = {ActionNo,ActionString};
%% Code = [char()].
%%
%% Read and parse the file InFile.
%% After each section of the file has been parsed we directly call the
%% next section. This is done when we detect a line we don't recognise
%% in the current section. The file format is very simple and Erlang
%% token based, we allow empty lines and Erlang style comments.
parse_file(InFile) ->
case file:open(InFile, read) of
{ok,Ifile} ->
io:fwrite("Parsing file ~s, ", [InFile]),
case parse_head(Ifile) of
{ok,REAs,Actions,Code} ->
io:fwrite("contained ~w rules.~n", [length(REAs)]),
file:close(Ifile),
{ok,REAs,Actions,Code};
Error ->
file:close(Ifile),
Error
end;
{error,R} ->
{error,{leex,{open,InFile}}}
end.
%% parse_head(File)
%% Parse the head of the file.
parse_head(Ifile) ->
parse_defs(Ifile, nextline(Ifile, 0)).
%% parse_defs(File, Line)
%% Parse the macro definition section of a file. Allow no definitions.
parse_defs(Ifile, {ok,[$D,$e,$f,$i,$n,$i,$t,$i,$o,$n,$s,$.|_Rest],L}) ->
parse_defs(Ifile, nextline(Ifile, L), []);
parse_defs(Ifile, Line) ->
parse_rules(Ifile, Line, []).
parse_defs(Ifile, {ok,Chars,L}, Ms) ->
case string:tokens(Chars, " \t\n") of
[Name,"=",Def] ->
parse_defs(Ifile, nextline(Ifile, L), [{Name,Def}|Ms]);
Other ->
parse_rules(Ifile, {ok,Chars,L}, Ms)
end;
parse_defs(Ifile, Line, Ms) ->
parse_rules(Ifile, Line, Ms).
%% parse_rules(File, Line, Macros)
%% Parse the RE rules section of the file. This must exist.
parse_rules(Ifile, {ok,[$R,$u,$l,$e,$s,$.|_Rest],L}, Ms) ->
parse_rules(Ifile, nextline(Ifile, L), Ms, [], [], 0);
parse_rules(Ifile, {ok,Other,L}, Ms) ->
{error,{L,leex,missing_rules}};
parse_rules(Ifile, {eof,L}, Ms) ->
{error,{L,leex,missing_rules}}.
collect_rule(Ifile, Chars, L0) ->
{match,St,Len} = regexp:first_match(Chars, "[^ \t]+"),
%% io:fwrite("RE = ~p~n", [string:substr(Chars, St, Len)]),
case collect_rule(Ifile, string:substr(Chars, St+Len), L0, []) of
{ok,[{':',Lc}|Toks],L1} -> {ok,string:substr(Chars, St, Len),Toks,L1};
{ok,Toks,L1} -> {error,{L0,leex,bad_rule}};
{eof,L1} -> {error,{L1,leex,bad_rule}};
{error,E,L1} -> {error,E}
end.
collect_rule(Ifile, Chars, L0, Cont0) ->
case erl_scan:tokens(Cont0, Chars, L0) of
{done,{ok,Toks,L1},Rest} -> {ok,Toks,L0};
{done,{eof,L1},Rest} -> {eof,L0};
{done,{error,E,L1},Rest} -> {error,E,L0};
{more,Cont1} ->
collect_rule(Ifile, io:get_line(Ifile, leex), L0+1, Cont1)
end.
parse_rules(Ifile, {ok,[$E,$r,$l,$a,$n,$g,$ ,$c,$o,$d,$e,$.|_Rest],L},
Ms, REAs, As, N) ->
%% Must be careful to put rules in correct order!
parse_code(Ifile, L, reverse(REAs), reverse(As));
parse_rules(Ifile, {ok,Chars,L0}, Ms, REAs, As, N) ->
%% io:fwrite("~w: ~p~n", [L0,Chars]),
case collect_rule(Ifile, Chars, L0) of
{ok,Re,Atoks,L1} ->
case parse_rule(Re, L0, Atoks, Ms, N) of
{ok,REA,A} ->
parse_rules(Ifile, nextline(Ifile, L1), Ms,
[REA|REAs], [A|As], N+1);
{error,E} -> {error,E}
end;
{error,E} -> {error,E}
end;
parse_rules(Ifile, {eof,Line}, Ms, REAs, As, N) ->
%% Must be careful to put rules in correct order!
{ok,reverse(REAs),reverse(As),[]}.
%% parse_rule(RegExpString, RegExpLine, ActionTokens, Macros, Counter)
%% Parse one regexp after performing macro substition.
parse_rule(S, Line, [{dot,Ld}], Ms, N) ->
case parse_rule_regexp(S, Ms) of
{ok,R} ->
{ok,{R,N},{N,empty_action}};
{error,E} ->
{error,{Line,leex,{regexp,E}}}
end;
parse_rule(S, Line, Atoks, Ms, N) ->
case parse_rule_regexp(S, Ms) of
{ok,R} ->
case erl_parse:parse_exprs(Atoks) of
{ok,Aes} ->
YYtext = keymember('YYtext', 3, Atoks),
{ok,{R,N},{N,Aes,YYtext}};
{error,E} ->
{error,{Line,leex,{after_regexp,S}}}
end;
{error,E} ->
{error,{Line,leex,{regexp,E}}}
end.
parse_rule_regexp(RE0, [{M,Exp}|Ms]) ->
case regexp:gsub(RE0, "{" ++ M ++ "}", Exp) of
{ok,RE,N} -> parse_rule_regexp(RE, Ms);
{error,E} -> parse_rule_regexp(RE0, Ms)
end;
parse_rule_regexp(RE, []) ->
%% io:fwrite("RE = ~p~n", [RE]),
regexp:parse(RE).
%% parse_code(File, Line, REAs, Actions)
%% Parse the code section of the file.
parse_code(Ifile, Line, REAs, As) ->
{ok,REAs,As,io:get_chars(Ifile, leex, 102400)}.
%% nextline(InputFile, PrevLineNo) -> {ok,Chars,LineNo} | {eof,LineNo}.
%% Get the next line skipping comment lines and blank lines.
nextline(Ifile, L) ->
case io:get_line(Ifile, leex) of
eof -> {eof,L};
Chars ->
case skip(Chars, " \t\n") of
[$%|_Rest] -> nextline(Ifile, L+1);
[] -> nextline(Ifile, L+1);
Other -> {ok,Chars,L+1}
end
end.
%% skip(Str, Cs) -> lists:dropwhile(fun (C) -> member(C, Cs) end, Str).
skip([C|Str], Cs) ->
case member(C, Cs) of
true -> skip(Str, Cs);
false -> [C|Str]
end;
skip([], Cs) -> [].
%% build_combined_nfa(RegExpActionList) -> {NFA,FirstState}. Build
%% the combined NFA using Thompson's construction straight out of the
%% book. Build the separate NFAs in the same order as the rules so
%% that the accepting have ascending states have ascending state
%% numbers. Start numbering the states from 1 as we put the states
%% in a tuple with the state number as the index.
build_combined_nfa(REAs) ->
{NFA0,Firsts,Free} = build_nfa_list(REAs, [], [], 1),
F = #nfa_state{no=Free,edges=epsilon_trans(Firsts)},
{list_to_tuple(keysort(#nfa_state.no, [F|NFA0])),Free}.
build_nfa_list([{RE,Action}|REAs], NFA0, Firsts, Free0) ->
{NFA1,Free1,First} = build_nfa(RE, Free0, Action),
build_nfa_list(REAs, NFA1 ++ NFA0, [First|Firsts], Free1);
build_nfa_list([], NFA, Firsts, Free) ->
{NFA,reverse(Firsts),Free}.
epsilon_trans(Firsts) -> [ {epsilon,F} || F <- Firsts ].
%% {NFA,NextFreeState,FirstState} = build_nfa(RegExp, FreeState, Action)
%% When building the NFA states for a ??? we don't build the end
%% state, just allocate a State for it and return this state
%% number. This allows us to avoid building unnecessary states for
%% concatenation which would then have to be removed by overwriting
%% an existing state.
build_nfa(RE, FreeState, Action) ->
{NFA,N,Es} = build_nfa(RE, FreeState+1, FreeState, []),
{[#nfa_state{no=Es,accept={accept,Action}}|NFA],N,FreeState}.
%% build_nfa(RegExp, NextState, FirstState, NFA) -> {NFA,NextState,EndState}.
%% The NFA is a list of nfa_state is no predefined order. The state
%% number of the returned EndState is already allocated!
build_nfa({'or',RE1,RE2}, N0, Fs, NFA0) ->
{NFA1,N1,Es1} = build_nfa(RE1, N0+1, N0, NFA0),
{NFA2,N2,Es2} = build_nfa(RE2, N1+1, N1, NFA1),
Es = N2,
{[#nfa_state{no=Fs,edges=[{epsilon,N0},{epsilon,N1}]},
#nfa_state{no=Es1,edges=[{epsilon,Es}]},
#nfa_state{no=Es2,edges=[{epsilon,Es}]}|NFA2],
N2+1,Es};
build_nfa({concat,RE1, RE2}, N0, Fs, NFA0) ->
{NFA1,N1,Es1} = build_nfa(RE1, N0, Fs, NFA0),
{NFA2,N2,Es2} = build_nfa(RE2, N1, Es1, NFA1),
{NFA2,N2,Es2};
build_nfa({kclosure,RE}, N0, Fs, NFA0) ->
{NFA1,N1,Es1} = build_nfa(RE, N0+1, N0, NFA0),
Es = N1,
{[#nfa_state{no=Fs,edges=[{epsilon,N0},{epsilon,Es}]},
#nfa_state{no=Es1,edges=[{epsilon,N0},{epsilon,Es}]}|NFA1],
N1+1,Es};
build_nfa({pclosure,RE}, N0, Fs, NFA0) ->
{NFA1,N1,Es1} = build_nfa(RE, N0+1, N0, NFA0),
Es = N1,
{[#nfa_state{no=Fs,edges=[{epsilon,N0}]},
#nfa_state{no=Es1,edges=[{epsilon,N0},{epsilon,Es}]}|NFA1],
N1+1,Es};
build_nfa({optional,RE}, N0, Fs, NFA0) ->
{NFA1,N1,Es1} = build_nfa(RE, N0+1, N0, NFA0),
Es = N1,
{[#nfa_state{no=Fs,edges=[{epsilon,N0},{epsilon,Es}]},
#nfa_state{no=Es1,edges=[{epsilon,Es}]}|NFA1],
N1+1,Es};
build_nfa({char_class,Cc}, N, Fs, NFA) ->
{[#nfa_state{no=Fs,edges=[{char_class(Cc),N}]}|NFA],N+1,N};
build_nfa({comp_class,Cc}, N, Fs, NFA) ->
{[#nfa_state{no=Fs,edges=[{comp_class(Cc),N}]}|NFA],N+1,N};
build_nfa(C, N, Fs, NFA) when integer(C) ->
{[#nfa_state{no=Fs,edges=[{[C],N}]}|NFA],N+1,N}.
char_class(Cc) ->
lists:foldl(fun ({C1,C2}, Set) -> union(seq(C1, C2), Set);
(C, Set) -> add_element(C, Set) end, [], Cc).
comp_class(Cc) -> subtract(seq(0, 255), char_class(Cc)).
%% build_dfa(NFA, NfaFirstState) -> {DFA,DfaFirstState}.
%% Build a DFA from an NFA using "subset construction". The major
%% difference from the book is that we keep the marked and unmarked
%% DFA states in seperate lists. New DFA states are added to the
%% unmarked list and states are marked by moving them to the marked
%% list. We assume that the NFA accepting state numbers are in
%% ascending order for the rules and use ordsets to keep this order.
build_dfa(NFA, Nf) ->
D = #dfa_state{no=0,nfa=eclosure([Nf], NFA)},
{build_dfa([D], 1, [], NFA),0}.
%% build_dfa([UnMarked], NextState, [Marked], NFA) -> DFA.
%% Traverse the unmarked states. Temporarily add the current unmarked
%% state to the marked list before calculating translation, this is
%% to avoid adding too many duplicate states. Add it properly to the
%% marked list afterwards with correct translations.
build_dfa([U|Us0], N0, Ms, NFA) ->
{Ts,Us1,N1} = build_dfa(255, U#dfa_state.nfa, Us0, N0, [], [U|Ms], NFA),
M = U#dfa_state{trans=Ts,accept=accept(U#dfa_state.nfa, NFA)},
build_dfa(Us1, N1, [M|Ms], NFA);
build_dfa([], N, Ms, NFA) -> Ms.
%% build_dfa(Char, [NfaState], [Unmarked], NextState, [Transition], [Marked], NFA) ->
%% [Marked].
%% Foreach NFA state set calculate the legal translations. N.B. must
%% search *BOTH* the unmarked and marked lists to check if DFA state
%% already exists. By test characters downwards and prepending
%% transitions we get the transition lists in ascending order.
build_dfa(C, Set, Us, N, Ts, Ms, NFA) when C >= 0 ->
case eclosure(move(Set, C, NFA), NFA) of
S when S /= [] ->
case keysearch(S, #dfa_state.nfa, Us) of
{value,#dfa_state{no=T}} ->
build_dfa(C-1, Set, Us, N, [{C,T}|Ts], Ms, NFA);
false ->
case keysearch(S, #dfa_state.nfa, Ms) of
{value,#dfa_state{no=T}} ->
build_dfa(C-1, Set, Us, N, [{C,T}|Ts], Ms, NFA);
false ->
U = #dfa_state{no=N,nfa=S},
build_dfa(C-1, Set, [U|Us], N+1, [{C,N}|Ts], Ms, NFA)
end
end;
[] ->
build_dfa(C-1, Set, Us, N, Ts, Ms, NFA)
end;
build_dfa(-1, Set, Us, N, Ts, Ms, NFA) ->
{Ts,Us,N}.
%% eclosure([State], NFA) -> [State].
%% move([State], Char, NFA) -> [State].
%% These are straight out of the book. As eclosure uses ordsets then
%% the generated state sets are in ascending order.
eclosure(Sts, NFA) -> eclosure(Sts, NFA, []).
eclosure([St|Sts], NFA, Ec) ->
#nfa_state{edges=Es} = element(St, NFA),
eclosure([ N || {epsilon,N} <- Es,
nnot(is_element(N, Ec)) ] ++ Sts,
NFA, add_element(St, Ec));
eclosure([], NFA, Ec) -> Ec.
nnot(true) -> false;
nnot(false) -> true.
move(Sts, C, NFA) ->
[St || N <- Sts,
{C1,St} <- (element(N, NFA))#nfa_state.edges,
list(C1),
member(C, C1) ].
%% accept([State], NFA) -> {accept,A} | noaccept.
%% Scan down the state list until we find an accepting state.
accept([St|Sts], NFA) ->
case element(St, NFA) of
#nfa_state{accept={accept,A}} -> {accept,A};
#nfa_state{accept=noaccept} -> accept(Sts, NFA)
end;
accept([], NFA) -> noaccept.
%% minimise_dfa(DFA, DfaFirst) -> {DFA,DfaFirst}.
%% Minimise the DFA by removing equivalent states. We consider a
%% state if both the transitions and the their accept state is the
%% same. First repeatedly run throught the DFA state list removing
%% equivalent states and updating remaining transitions with
%% remaining equivalent state numbers. When no more reductions are
%% possible then pack the remaining state numbers to get consecutive
%% states.
minimise_dfa(DFA0, Df0) ->
case min_dfa(DFA0) of
{DFA1,[]} -> %No reduction!
{DFA2,Rs} = pack_dfa(DFA1),
{min_update(DFA2, Rs),min_use(Df0, Rs)};
{DFA1,Rs} ->
minimise_dfa(min_update(DFA1, Rs), min_use(Df0, Rs))
end.
min_dfa(DFA) -> min_dfa(DFA, [], []).
min_dfa([D|DFA0], Rs0, MDFA) ->
{DFA1,Rs1} = min_delete(DFA0, D#dfa_state.trans, D#dfa_state.accept,
D#dfa_state.no, Rs0, []),
min_dfa(DFA1, Rs1, [D|MDFA]);
min_dfa([], Rs, MDFA) -> {MDFA,Rs}.
min_delete([#dfa_state{no=N,trans=T,accept=A}|DFA], T, A, NewN, Rs, MDFA) ->
min_delete(DFA, T, A, NewN, [{N,NewN}|Rs], MDFA);
min_delete([D|DFA], T, A, NewN, Rs, MDFA) ->
min_delete(DFA, T, A, NewN, Rs, [D|MDFA]);
min_delete([], T, A, NewN, Rs, MDFA) -> {MDFA,Rs}.
min_update(DFA, Rs) ->
[ D#dfa_state{trans=min_update_trans(D#dfa_state.trans, Rs)} || D <- DFA ].
min_update_trans(Tr, Rs) ->
[ {C,min_use(S, Rs)} || {C,S} <- Tr ].
min_use(Old, [{Old,New}|Reds]) -> New;
min_use(Old, [R|Reds]) -> min_use(Old, Reds);
min_use(Old, []) -> Old.
pack_dfa(DFA) -> pack_dfa(DFA, 0, [], []).
pack_dfa([D|DFA], NewN, Rs, PDFA) ->
pack_dfa(DFA, NewN+1, [{D#dfa_state.no,NewN}|Rs], [D#dfa_state{no=NewN}|PDFA]);
pack_dfa([], NewN, Rs, PDFA) -> {PDFA,Rs}.
%% out_file(FileName, Module, DFA, DfaStart, [Action], Code) -> ok | error.
out_file(OutFile, Out, DFA, DF, Actions, Code) ->
io:fwrite("Writing file ~s, ", [OutFile]),
case file:path_open([".", [code:lib_dir(),"/tools/include"]],
"leex.hrl", read) of
{ok,Ifile,Iname} ->
case file:open(OutFile, write) of
{ok,Ofile} ->
out_file(Ifile, Ofile, Out, DFA, DF, Actions, Code),
file:close(Ifile),
file:close(Ofile),
io:fwrite("ok~n"),
ok;
{error,E} ->
file:close(Ifile),
io:fwrite("open error~n"),
error
end;
{error,R} ->
io:fwrite("open error~n"),
error
end.
%% out_file(IncFile, OutFile, DFA, DfaStart, Actions, Code) -> ok.
%% Copy the include file line by line substituting special lines with
%% generated code. We cheat by only looking at the first 5
%% characters.
out_file(Ifile, Ofile, Out, DFA, DF, Actions, Code) ->
case io:get_line(Ifile, leex) of
eof -> ok;
Line ->
case string:substr(Line, 1, 5) of
"##mod" -> io:fwrite(Ofile, "-module(~w).~n", [Out]);
"##cod" -> io:put_chars(Ofile, Code);
"##dfa" -> out_dfa(Ofile, DFA, DF);
"##act" -> out_actions(Ofile, Actions);
Other -> io:put_chars(Ofile, Line)
end,
out_file(Ifile, Ofile, Out, DFA, DF, Actions, Code)
end.
out_dfa(File, DFA, DF) ->
io:fwrite(File, "yystate() -> ~w.~n~n", [DF]),
foreach(fun (S) -> out_trans(File, S) end, DFA),
io:fwrite(File, "yystate(S, Ics, Line, Tlen, Action, Alen) ->~n", []),
io:fwrite(File, " {Action,Alen,Tlen,Ics,Line,S}.~n~n", []).
out_trans(File, #dfa_state{no=N,trans=[],accept={accept,A}}) ->
io:fwrite(File, "yystate(~w, Ics, Line, Tlen, Action, Alen) ->~n", [N]),
io:fwrite(File, " {~w,Tlen,Ics,Line};~n", [A]);
out_trans(File, #dfa_state{no=N,trans=Tr,accept={accept,A}}) ->
foreach(fun (T) -> out_tran(File, N, A, T) end, pack_trans(Tr)),
io:fwrite(File, "yystate(~w, Ics, Line, Tlen, Action, Alen) ->~n", [N]),
io:fwrite(File, " {~w,Tlen,Ics,Line,~w};~n", [A,N]);
out_trans(File, #dfa_state{no=N,trans=Tr,accept=noaccept}) ->
foreach(fun (T) -> out_tran(File, N, T) end, pack_trans(Tr)),
io:fwrite(File, "yystate(~w, Ics, Line, Tlen, Action, Alen) ->~n", [N]),
io:fwrite(File, " {Action,Alen,Tlen,Ics,Line,~w};~n", [N]).
out_tran(File, N, A, {{Cf,Cl},S}) ->
out_head(File, N, io_lib:write_char(Cf), io_lib:write_char(Cl)),
out_body(File, S, "Line", "C", A);
out_tran(File, N, A, {$\n,S}) ->
out_head(File, N, "$\\n"),
out_body(File, S, "Line+1", "$\\n", A);
out_tran(File, N, A, {C,S}) ->
Char = io_lib:write_char(C),
out_head(File, N, Char),
out_body(File, S, "Line", Char, A).
out_tran(File, N, {{Cf,Cl},S}) ->
out_head(File, N, io_lib:write_char(Cf), io_lib:write_char(Cl)),
out_body(File, S, "Line", "C");
out_tran(File, N, {$\n,S}) ->
out_head(File, N, "$\\n"),
out_body(File, S, "Line+1", "$\\n");
out_tran(File, N, {C,S}) ->
Char = io_lib:write_char(C),
out_head(File, N, Char),
out_body(File, S, "Line", Char).
out_head(File, State, Char) ->
io:fwrite(File, "yystate(~w, [~s|Ics], Line, Tlen, Action, Alen) ->\n",
[State,Char]).
out_head(File, State, Min, Max) ->
io:fwrite(File, "yystate(~w, [C|Ics], Line, Tlen, Action, Alen) when C >= ~s, C =< ~s ->\n",
[State,Min,Max]).
out_body(File, Next, Line, C, Action) ->
io:fwrite(File, " yystate(~w, Ics, ~s, Tlen+1, ~w, Tlen);\n",
[Next,Line,Action]).
out_body(File, Next, Line, C) ->
io:fwrite(File, " yystate(~w, Ics, ~s, Tlen+1, Action, Alen);\n",
[Next,Line]).
%% pack_tran([{Char,State}]) -> [{Crange,State}] when
%% Crange = {Char,Char} | Char.
%% Pack the translation table into something more suitable for
%% generating code. Ranges of characters with the same State are
%% packed together, while solitary characters are left "as is". We
%% KNOW how the pattern matching compiler works so solitary
%% characters are stored before ranges. We do this using ordsets for
%% for the packed table. Always break out $\n as solitary character.
pack_trans([{C,S}|Tr]) -> pack_trans(Tr, C, C, S, []);
pack_trans([]) -> [].
pack_trans([{$\n,S1}|Tr], Cf, Cl, S, Pt) ->
pack_trans(Cf, Cl, S, add_element({$\n,S1}, pack_trans(Tr)));
pack_trans([{C,S}|Tr], Cf, Cl, S, Pt) when C == Cl + 1 ->
pack_trans(Tr, Cf, C, S, Pt);
pack_trans([{C,S1}|Tr], Cf, Cl, S, Pt) ->
pack_trans(Tr, C, C, S1, pack_trans(Cf, Cl, S, Pt));
pack_trans([], Cf, Cl, S, Pt) -> pack_trans(Cf, Cl, S, Pt).
pack_trans(Cf, Cf, S, Pt) -> add_element({Cf,S}, Pt);
pack_trans(Cf, Cl, S, Pt) when Cl == Cf + 1 ->
add_element({Cf,S}, add_element({Cl,S}, Pt));
pack_trans(Cf, Cl, S, Pt) -> add_element({{Cf,Cl},S}, Pt).
out_actions(File, As) ->
foreach(fun (A) -> out_action(File, A) end, As),
io:fwrite(File, "yyaction(_, _, _, _) -> error.~n", []).
out_action(File, {A,empty_action}) ->
io:fwrite(File, "yyaction(~w, YYlen, YYtcs, YYline) -> skip_token;~n", [A]);
out_action(File, {A,Code,YYtext}) ->
io:fwrite(File, "yyaction(~w, YYlen, YYtcs, YYline) ->~n", [A]),
if
YYtext == true ->
io:fwrite(File, " YYtext = yypre(YYtcs, YYlen),~n", []);
YYtext == false -> ok
end,
io:fwrite(File, " ~s;~n", [erl_pp:exprs(Code, 4, none)]).

View File

@@ -0,0 +1,200 @@
%% THIS IS A PRE-RELEASE OF LEEX - RELEASED ONLY BECAUSE MANY PEOPLE
%% WANTED IT - THE OFFICIAL RELEASE WILL PROVIDE A DIFFERENT INCOMPATIBLE
%% AND BETTER INTERFACE - BE WARNED
%% PLEASE REPORT ALL BUGS TO THE AUTHOR.
##module
-export([string/1,string/2,token/2,token/3,tokens/2,tokens/3]).
-export([format_error/1]).
%% User code. This is placed here to allow extra attributes.
##code
format_error({illegal,S}) -> ["illegal characters ",io_lib:write_string(S)];
format_error({user,S}) -> S.
string(String) -> string(String, 1).
string(String, Line) -> string(String, Line, String, []).
%% string(InChars, Line, TokenChars, Tokens) ->
%% {ok,Tokens,Line} | {error,ErrorInfo,Line}.
string([], L, [], Ts) -> %No partial tokens!
{ok,yyrev(Ts),L};
string(Ics0, L0, Tcs, Ts) ->
case yystate(yystate(), Ics0, L0, 0, reject, 0) of
{A,Alen,Ics1,L1} -> %Accepting end state
string_cont(Ics1, L1, yyaction(A, Alen, Tcs, L1), Ts);
{A,Alen,Ics1,L1,S1} -> %After an accepting state
string_cont(Ics1, L1, yyaction(A, Alen, Tcs, L1), Ts);
{reject,Alen,Tlen,Ics1,L1,S1} ->
{error,{L1,?MODULE,{illegal,yypre(Tcs, Tlen+1)}},L1};
{A,Alen,Tlen,Ics1,L1,S1} ->
string_cont(yysuf(Tcs, Alen), L1, yyaction(A, Alen, Tcs, L1), Ts)
end.
%% string_cont(RestChars, Line, Token, Tokens)
%% Test for and remove the end token wrapper.
string_cont(Rest, Line, {token,T}, Ts) ->
string(Rest, Line, Rest, [T|Ts]);
string_cont(Rest, Line, {end_token,T}, Ts) ->
string(Rest, Line, Rest, [T|Ts]);
string_cont(Rest, Line, skip_token, Ts) ->
string(Rest, Line, Rest, Ts);
string_cont(Rest, Line, {error,S}, Ts) ->
{error,{Line,?MODULE,{user,S}},Line}.
%% token(Continuation, Chars, Line) ->
%% {more,Continuation} | {done,ReturnVal,RestChars}.
%% Must be careful when re-entering to append the latest characters to the
%% after characters in an accept.
token(Cont, Chars) -> token(Cont, Chars, 1).
token([], Chars, Line) ->
token(Chars, Line, yystate(), Chars, 0, reject, 0);
token({Line,State,Tcs,Tlen,Action,Alen}, Chars, _) ->
token(Chars, Line, State, Tcs ++ Chars, Tlen, Action, Alen).
%% token(InChars, Line, State, TokenChars, TokenLen, Accept) ->
%% {more,Continuation} | {done,ReturnVal,RestChars}.
token(Ics0, L0, S0, Tcs, Tlen0, A0, Alen0) ->
case yystate(S0, Ics0, L0, Tlen0, A0, Alen0) of
{A1,Alen1,Ics1,L1} -> %Accepting end state
token_cont(Ics1, L1, yyaction(A1, Alen1, Tcs, L1));
{A1,Alen1,[],L1,S1} -> %After an accepting state
{more,{L1,S1,Tcs,Alen1,A1,Alen1}};
{A1,Alen1,Ics1,L1,S1} ->
token_cont(Ics1, L1, yyaction(A1, Alen1, Tcs, L1));
{A1,Alen1,Tlen1,[],L1,S1} -> %After a non-accepting state
{more,{L1,S1,Tcs,Tlen1,A1,Alen1}};
{reject,Alen1,Tlen1,eof,L1,S1} ->
{done,{eof,L1},[]};
{reject,Alen1,Tlen1,Ics1,L1,S1} ->
{done,{error,{L1,?MODULE,{illegal,yypre(Tcs, Tlen1+1)}},L1},Ics1};
{A1,Alen1,Tlen1,Ics1,L1,S1} ->
token_cont(yysuf(Tcs, Alen1), L1, yyaction(A1, Alen1, Tcs, L1))
end.
%% tokens_cont(RestChars, Line, Token)
%% Test if we have detected the end token, if so return done else continue.
token_cont(Rest, Line, {token,T}) ->
{done,{ok,T,Line},Rest};
token_cont(Rest, Line, {end_token,T}) ->
{done,{ok,T,Line},Rest};
token_cont(Rest, Line, skip_token) ->
token(Rest, Line, yystate(), Rest, 0, reject, 0);
token_cont(Rest, Line, {error,S}) ->
{done,{error,{Line,?MODULE,{user,S}},Line},Rest}.
%% tokens(Continuation, Chars, Line) ->
%% {more,Continuation} | {done,ReturnVal,RestChars}.
%% Must be careful when re-entering to append the latest characters to the
%% after characters in an accept.
tokens(Cont, Chars) -> tokens(Cont, Chars, 1).
tokens([], Chars, Line) ->
tokens(Chars, Line, yystate(), Chars, 0, [], reject, 0);
tokens({tokens,Line,State,Tcs,Tlen,Ts,Action,Alen}, Chars, _) ->
tokens(Chars, Line, State, Tcs ++ Chars, Tlen, Ts, Action, Alen);
tokens({skip_tokens,Line,State,Tcs,Tlen,Error,Action,Alen}, Chars, _) ->
skip_tokens(Chars, Line, State, Tcs ++ Chars, Tlen, Error, Action, Alen).
%% tokens(InChars, Line, State, TokenChars, TokenLen, Tokens, Accept) ->
%% {more,Continuation} | {done,ReturnVal,RestChars}.
tokens(Ics0, L0, S0, Tcs, Tlen0, Ts, A0, Alen0) ->
case yystate(S0, Ics0, L0, Tlen0, A0, Alen0) of
{A1,Alen1,Ics1,L1} -> %Accepting end state
tokens_cont(Ics1, L1, yyaction(A1, Alen1, Tcs, L1), Ts);
{A1,Alen1,[],L1,S1} -> %After an accepting state
{more,{tokens,L1,S1,Tcs,Alen1,Ts,A1,Alen1}};
{A1,Alen1,Ics1,L1,S1} ->
tokens_cont(Ics1, L1, yyaction(A1, Alen1, Tcs, L1), Ts);
{A1,Alen1,Tlen1,[],L1,S1} -> %After a non-accepting state
{more,{tokens,L1,S1,Tcs,Tlen1,Ts,A1,Alen1}};
{reject,Alen1,Tlen1,eof,L1,S1} ->
{done,if Ts == [] -> {eof,L1};
true -> {ok,yyrev(Ts),L1} end,[]};
{reject,Alen1,Tlen1,Ics1,L1,S1} ->
skip_tokens(yysuf(Tcs, Tlen1+1), L1,
{L1,?MODULE,{illegal,yypre(Tcs, Tlen1+1)}});
{A1,Alen1,Tlen1,Ics1,L1,S1} ->
tokens_cont(yysuf(Tcs, Alen1), L1, yyaction(A1, Alen1, Tcs, L1), Ts)
end.
%% tokens_cont(RestChars, Line, Token, Tokens)
%% Test if we have detected the end token, if so return done else continue.
tokens_cont(Rest, Line, {token,T}, Ts) ->
tokens(Rest, Line, yystate(), Rest, 0, [T|Ts], reject, 0);
tokens_cont(Rest, Line, {end_token,T}, Ts) ->
{done,{ok,yyrev(Ts, [T]),Line},Rest};
tokens_cont(Rest, Line, skip_token, Ts) ->
tokens(Rest, Line, yystate(), Rest, 0, Ts, reject, 0);
tokens_cont(Rest, Line, {error,S}, Ts) ->
skip_tokens(Rest, Line, {Line,?MODULE,{user,S}}).
%% token_skip(InChars, Line, Error) -> {done,ReturnVal,RestChars}.
%% Skip tokens until an end token, junk everything and return the error.
%%skip_tokens(Ics, Line, Error) -> {done,{error,Error,Line},Ics}.
skip_tokens(Ics, Line, Error) ->
skip_tokens(Ics, Line, yystate(), Ics, 0, Error, reject, 0).
%% skip_tokens(InChars, Line, State, TokenChars, TokenLen, Tokens, Accept) ->
%% {more,Continuation} | {done,ReturnVal,RestChars}.
skip_tokens(Ics0, L0, S0, Tcs, Tlen0, Error, A0, Alen0) ->
case yystate(S0, Ics0, L0, Tlen0, A0, Alen0) of
{A1,Alen1,Ics1,L1} -> %Accepting end state
skip_cont(Ics1, L1, yyaction(A1, Alen1, Tcs, L1), Error);
{A1,Alen1,[],L1,S1} -> %After an accepting state
{more,{skip_tokens,L1,S1,Tcs,Alen1,Error,A1,Alen1}};
{A1,Alen1,Ics1,L1,S1} ->
skip_cont(Ics1, L1, yyaction(A1, Alen1, Tcs, L1), Error);
{A1,Alen1,Tlen1,[],L1,S1} -> %After a non-accepting state
{more,{skip_tokens,L1,S1,Tcs,Tlen1,Error,A1,Alen1}};
{reject,Alen1,Tlen1,eof,L1,S1} ->
{done,{error,Error,L1},[]};
{reject,Alen1,Tlen1,Ics1,L1,S1} ->
skip_tokens(yysuf(Tcs, Tlen1), L1, Error);
{A1,Alen1,Tlen1,Ics1,L1,S1} ->
skip_cont(yysuf(Tcs, Alen1), L1, yyaction(A1, Alen1, Tcs, L1), Error)
end.
%% skip_cont(RestChars, Line, Token, Error)
%% Test if we have detected the end token, if so return done else continue.
skip_cont(Rest, Line, {token,T}, Error) ->
skip_tokens(Rest, Line, yystate(), Rest, 0, Error, reject, 0);
skip_cont(Rest, Line, {end_token,T}, Error) ->
{done,{error,Error,Line},Rest};
skip_cont(Rest, Line, {error,S}, Error) ->
skip_tokens(Rest, Line, yystate(), Rest, 0, Error, reject, 0);
skip_cont(Rest, Line, skip_token, Error) ->
skip_tokens(Rest, Line, yystate(), Rest, 0, Error, reject, 0).
yyrev(L) -> yyrev(L, []).
yyrev([H|T], Acc) -> yyrev(T, [H|Acc]);
yyrev([], Acc) -> Acc.
yypre([H|T], N) when N > 0 -> [H|yypre(T, N-1)];
yypre(L, N) -> [].
yysuf([H|T], N) when N > 0 -> yysuf(T, N-1);
yysuf(L, 0) -> L.
%% Generated state transition function.
##dfa
%% Generated action function.
##actions

View File

@@ -0,0 +1,95 @@
-module(lin).
-doc([{author,'Joe Armstrong'},
{title,"Linear algebra utilities."},
{keywords, [linear,algebra]},
{date,981103}]).
-export([pow/3, inv/2, solve/2, str2int/1, int2str/1, gcd/2]).
%% pow(A, B, M) => (A^B) mod M
%% examples pow(9726,3533,11413) = 5761
%% pow(5971,6597,11413) = 9726
pow(A, 1, M) ->
A rem M;
pow(A, 2, M) ->
A*A rem M;
pow(A, B, M) ->
B1 = B div 2,
B2 = B - B1,
%% B2 = B1 or B1+1
P = pow(A, B1, M),
case B2 of
B1 -> (P*P) rem M;
_ -> (P*P*A) rem M
end.
%% inv(A, B) = C | no_inverse
%% computes C such that
%% A*C mod B = 1
%% computes A^-1 mod B
%% examples inv(28, 75) = 67.
%% inv(3533, 11200) = 6597
%% inv(6597, 11200) = 3533
inv(A, B) ->
case solve(A, B) of
{X, Y} ->
if X < 0 -> X + B;
true -> X
end;
_ ->
no_inverse
end.
%% solve(A, B) => {X, Y} | insoluble
%% solve the linear congruence
%% A * X - B * Y = 1
%S tag1
solve(A, B) ->
case catch s(A,B) of
insoluble -> insoluble;
{X, Y} ->
case A * X - B * Y of
1 -> {X, Y};
Other -> error
end
end.
s(A, 0) -> throw(insoluble);
s(A, 1) -> {0, -1};
s(A, -1) -> {0, 1};
s(A, B) ->
K1 = A div B,
K2 = A - K1*B,
{Tmp, X} = s(B, -K2),
{X, K1 * X - Tmp}.
%E tag1
%% converts a string to a base 256 integer
%% converts a base 256 integer to a string
%S tag2
str2int(Str) -> str2int(Str, 0).
str2int([H|T], N) -> str2int(T, N*256+H);
str2int([], N) -> N.
int2str(N) -> int2str(N, []).
int2str(N, L) when N =< 0 -> L;
int2str(N, L) ->
N1 = N div 256,
H = N - N1 * 256,
int2str(N1, [H|L]).
%E tag2
%% greatest common denominator
gcd(A, B) when A < B -> gcd(B, A);
gcd(A, 0) -> A;
gcd(A, B) ->
gcd(B, A rem B).

View File

@@ -0,0 +1,92 @@
-module(primes).
-doc([{author,'Joe Armstrong'},
{title,"Prime number utilities."},
{keywords, [prime, numbers]},
{date,981103}]).
-export([make/1, make_prime/1, is_prime/1]).
-compile(export_all).
%% make a prime with at least K decimal digits
%% Here we use 'Bertrand's postulate, is that for every N > 3,
%% there is a prime P satisfying N < P < 2N - 2
%% This was proved by Tchebychef in 1850 (Erdos improved this proof
%% in 1932)
%S tag4
make_prime(K) when K > 0 ->
new_seed(),
N = make(K),
if N > 3 ->
io:format("Generating a ~w digit prime ",[K]),
MaxTries = N - 3,
P1 = make_prime(MaxTries, N+1),
io:format("~n",[]),
P1;
true ->
make_prime(K)
end.
make_prime(0, _) ->
exit(impossible);
make_prime(K, P) ->
io:format(".",[]),
case is_prime(P) of
true -> P;
false -> make_prime(K-1, P+1)
end.
%E tag4
%% make(N) -> a random integer with N digits.
%S tag1
make(N) -> new_seed(), make(N, 0).
make(0, D) -> D;
make(N, D) ->
make(N-1, D*10 + (random:uniform(10)-1)).
%E tag1
%% Fermat's little theorem says that if
%% N is a prime and if A < N then
%% A^N mod N = A
%S tag3
is_prime(D) ->
new_seed(),
is_prime(D, 100).
is_prime(D, Ntests) ->
N = length(integer_to_list(D)) -1,
is_prime(Ntests, D, N).
is_prime(0, _, _) -> true;
is_prime(Ntest, N, Len) ->
K = random:uniform(Len),
%% A is a random number less than N
A = make(K),
if
A < N ->
case lin:pow(A,N,N) of
A -> is_prime(Ntest-1,N,Len);
_ -> false
end;
true ->
is_prime(Ntest, N, Len)
end.
%E tag3
new_seed() ->
{_,_,X} = erlang:now(),
{H,M,S} = time(),
H1 = H * X rem 32767,
M1 = M * X rem 32767,
S1 = S * X rem 32767,
put(random_seed, {H1,M1,S1}).

View File

@@ -0,0 +1,56 @@
-module(rsa_key).
-export([make_sig/1, make_sig/2]).
make_sig(Who, Len) when Len > 79 ->
{Public, Private} = make_sig(Len),
file:write_file(Who ++ ".pub", term_to_binary(Public)),
file:write_file(Who ++ ".pri", term_to_binary(Private)),
{keyfilecreated,for,Who}.
%% The "book" says ...
%% 1. Bob generates two primes p and q
%% 2. Bob computes n = pq and phi(n) = (p-1)*(q-1)
%% 3. Bob chooses a random b(0 < b < phi(n)) such that
%% gcd(b, phi(n)) = 1
%% 4. Bob computes a = b^(-1) mod phi(n) using the Euclidean algorithm
%% 5. Bob publishes n and b in a directory as his public key.
%S tag1
make_sig(Len) ->
%% generate two <Len> digit prime numbers
P = primes:make_prime(Len),
io:format("P = ~p~n", [P]),
Q = primes:make_prime(Len),
io:format("Q = ~p~n", [Q]),
N = P*Q,
io:format("N = ~p~n", [N]),
Phi = (P-1)*(Q-1),
%% now make B such that B < Phi and gcd(B, Phi) = 1
B = b(Phi),
io:format("Public key (B) = ~p~n", [B]),
A = lin:inv(B, Phi),
io:format("Private key (A) = ~p~n", [A]),
{{B,N},{A,N}}.
b(Phi) ->
io:format("Generating a public key B "),
K = length(integer_to_list(Phi)) - 1,
B = b(1, K, Phi),
io:format("~n", []),
B.
b(Try, K, Phi) ->
io:format("."),
B = primes:make(K),
if
B < Phi ->
case lin:gcd(B, Phi) of
1 -> B;
_ -> b(Try+1, K, Phi)
end;
true ->
b(Try, K, Phi)
end.
%E tag1

2
tests/erlang/examples-2.0/sos Executable file
View File

@@ -0,0 +1,2 @@
#!/bin/sh
erl -boot /home/joe/erl/example_programs-2.0/examples-2.0/sos -environment `printenv` -load $1

View File

@@ -0,0 +1,344 @@
-module(sos).
-doc([{author,joe},
{title,"Simple OS written entirely in Erlang"},
{keywords, [os]},
{htmldoc, "sos.html"},
{date,981012}]).
-export([main/0, % starts the system
load_module/1, %
log_error/1,
make_server/3,
cast/2,
rpc/2,
change_behaviour/2,
keep_alive/2,
make_global/2,
on_exit/2,
on_halt/1,
stop_system/1,
every/3,
spawn_fun/1,
spawn_link_fun/1,
lookup/2,
map/2,
reverse/1,
read/0,
write/1,
env/1,
make_scripts/0
]).
-export([internal_call/1]).
main() ->
make_server(io,
fun start_io/0, fun handle_io/2),
make_server(code,
const([init,erl_prim_loader]),
fun handle_code/2),
make_server(error_logger,
const(0), fun handle_error_logger/2),
make_server(halt_demon,
const([]), fun handle_halt_demon/2),
make_server(env,
fun start_env/0, fun handle_env/2),
load_module(error_handler),
Mod = get_module_name(),
load_module(Mod),
run(Mod).
run(Mod) ->
Pid = spawn_link(Mod, main, []),
on_exit(Pid, fun(Why) -> stop_system(Why) end).
load_module(Mod) -> rpc(code, {load_module, Mod}).
handle_code({load_module, Mod}, Mods) ->
case member(Mod, Mods) of
true ->
{already_loaded, Mods};
false ->
case primLoad(Mod) of
ok ->
{{ok,Mod}, [Mod|Mods]};
Error ->
{Error, Mods}
end
end.
primLoad(Module) ->
Str = atom_to_list(Module),
case erl_prim_loader:get_file(Str ++ ".jam") of
{ok, Bin, FullName} ->
case erlang:load_module(Module, Bin) of
{module, Module} ->
ok;
{module, _} ->
{error, wrong_module_in_binary};
Other ->
{error, {bad_object_code, Module}}
end;
error ->
{error, {cannot_locate, Module}}
end.
log_error(Error) -> cast(error_logger, {log, Error}).
handle_error_logger({log, Error}, N) ->
erlang:display({error, Error}),
{ok, N+1}.
%S tag4
on_halt(Fun) -> cast(halt_demon,{on_halt,Fun}).
stop_system(Why) -> cast(halt_demon,{stop_system,Why}).
%E tag4
handle_halt_demon({on_halt, Fun}, Funs) ->
{ok, [Fun|Funs]};
handle_halt_demon({stop_system, Why}, Funs) ->
case Why of
normal -> true;
_ -> erlang:display({stopping_system,Why})
end,
map(fun(F) -> F() end, Funs),
erlang:halt(),
{ok, []}.
%S tag5
read() -> rpc(io, read).
write(X) -> rpc(io, {write, X}).
%E tag5
start_io() ->
Port = open_port({fd,0,1}, [eof, binary]),
process_flag(trap_exit, true),
{false, Port}.
handle_io(read, {true, Port}) ->
{eof, {true, Port}};
handle_io(read, {false, Port}) ->
receive
{Port, {data, Bytes}} ->
{{ok, Bytes}, {false, Port}};
{Port, eof} ->
{eof, {true,Port}};
{'EXIT', Port, badsig} ->
handle_io(read, {false, Port});
{'EXIT', Port, Why} ->
{eof, {true, Port}}
end;
handle_io({write,X}, {Flag,Port}) ->
Port ! {self(), {command, X}},
{ok, {Flag, Port}}.
env(Key) -> rpc(env, {lookup, Key}).
handle_env({lookup, Key}, Dict) ->
{lookup(Key, Dict), Dict}.
start_env() ->
Env = case init:get_argument(environment) of
{ok, [L]} ->
L;
error ->
fatal({missing, '-environment ...'})
end,
map(fun split_env/1, Env).
split_env(Str) -> split_env(Str, []).
split_env([$=|T], L) -> {reverse(L), T};
split_env([], L) -> {reverse(L), []};
split_env([H|T], L) -> split_env(T, [H|L]).
make_server(Name, FunD, FunH) ->
make_global(Name,
fun() ->
Data = FunD(),
server_loop(Name, Data, FunH)
end).
server_loop(Name, Data, Fun) ->
receive
{rpc, Pid, Q} ->
case (catch Fun(Q, Data)) of
{'EXIT', Why} ->
Pid ! {Name, exit, Why},
server_loop(Name, Data, Fun);
{Reply, Data1} ->
Pid ! {Name, Reply},
server_loop(Name, Data1, Fun)
end;
{cast, Pid, Q} ->
case (catch Fun(Q, Data)) of
{'EXIT', Why} ->
exit(Pid, Why),
server_loop(Name, Data, Fun);
Data1 ->
server_loop(Name, Data1, Fun)
end;
{eval, Fun1} ->
server_loop(Name, Data, Fun1)
end.
rpc(Name, Q) ->
Name ! {rpc, self(), Q},
receive
{Name, Reply} ->
Reply;
{Name, exit, Why} ->
exit(Why)
end.
cast(Name, Q) ->
Name ! {cast, self(), Q}.
change_behaviour(Name, Fun) ->
Name ! {eval, Fun}.
const(C) -> fun() -> C end.
keep_alive(Name, Fun) ->
Pid = make_global(Name, Fun),
on_exit(Pid,
fun(Exit) -> keep_alive(Name, Fun) end).
make_global(Name, Fun) ->
case whereis(Name) of
undefined ->
Self = self(),
Pid = spawn_fun(fun() ->
make_global(Self,Name,Fun)
end),
receive
{Pid, ack} ->
Pid
end;
Pid ->
Pid
end.
make_global(Pid, Name, Fun) ->
case register(Name, self()) of
{'EXIT', _} ->
Pid ! {self(), ack};
_ ->
Pid ! {self(), ack},
Fun()
end.
spawn_fun({'fun',Mod,Arity,Chksum,Env}) ->
spawn(?MODULE, internal_call,
[[Mod,Arity,Chksum,[],Env]]).
spawn_link_fun({'fun',Mod,Arity,Chksum,Env}) ->
spawn(?MODULE, internal_call,
[[Mod,Arity,Chksum,[],Env]]).
internal_call([Mod|Args]) ->
apply(Mod, module_lambdas, Args).
on_exit(Pid, Fun) ->
spawn_fun(fun() ->
process_flag(trap_exit, true),
link(Pid),
receive
{'EXIT', Pid, Why} ->
Fun(Why)
end
end).
every(Pid, Time, Fun) ->
spawn_fun(fun() ->
process_flag(trap_exit, true),
link(Pid),
every_loop(Pid, Time, Fun)
end).
every_loop(Pid, Time, Fun) ->
receive
{'EXIT', Pid, Why} ->
true
after Time ->
Fun(),
every_loop(Pid, Time, Fun)
end.
get_module_name() ->
case init:get_argument(load) of
{ok, [[Arg]]} ->
module_name(Arg);
error ->
fatal({missing, '-load Mod'})
end.
%S tag7
lookup(Key, [{Key,Val}|_]) -> {found, Val};
lookup(Key, [_|T]) -> lookup(Key, T);
lookup(Key, []) -> not_found.
member(X, [X|_]) -> true;
member(H, [_|T]) -> member(H, T);
member(_, []) -> false.
map(F, [H|T]) -> [F(H)|map(F, T)];
map(F, []) -> [].
reverse(X) -> reverse(X, []).
reverse([H|T], L) -> reverse(T, [H|L]);
reverse([], L) -> L.
module_name(Str) ->
case (catch list_to_atom(Str)) of
{'EXIT', _} ->
log_error({bad_module_name,Str}),
stop_system(bad_start_module);
Mod -> Mod
end.
%E tag7
fatal(Term) ->
log_error({fatal, Term}),
stop_system({fatal, Term}).
%S tag8
make_scripts() ->
{ok, Cwd} = file:get_cwd(),
Script =
{script,{"sos","1.0"},
[{preLoaded,[init,erl_prim_loader]},
{progress,preloaded},
{path,
[".",
Cwd,
"$ROOT/lib/" ++ lib_location(kernel) ++ "/ebin",
"$ROOT/lib/" ++ lib_location(stdlib) ++ "/ebin"]},
{primLoad,
[erl_open_port,
erlang,
error_handler,
sos
]},
{kernel_load_completed},
{progress,kernel_load_completed},
{progress,started},
{apply,{sos,main,[]}}
]},
file:write_file("sos.boot", term_to_binary(Script)),
{ok, Stream} = file:open("sos", write),
io:format(Stream, "#!/bin/sh~nerl -boot ~s/sos "
" -environment `printenv` -load $1~n",
[Cwd]),
file:close(Stream),
unix:cmd("chmod a+x sos"),
true.
lib_location(Lib) ->
filename:basename(code:lib_dir(Lib)).
%E tag8

View File

@@ -0,0 +1,7 @@
-module(sos_err1).
-doc(none).
-export([main/0]).
main() ->
listsforeaack:abc(123),
sos:write("Hello world\n").

View File

@@ -0,0 +1,7 @@
-module(sos_err2).
-doc(none).
-export([main/0]).
main() ->
lists:abc(123),
sos:write("Hello world\n").

View File

@@ -0,0 +1,6 @@
-module(sos_test1).
-doc(none).
-export([main/0]).
main() ->
sos:write("Hello world\n").

View File

@@ -0,0 +1,7 @@
-module(sos_test2).
-doc(none).
-export([main/0]).
main() ->
X = lists:reverse("Hellow world"),
sos:write([X,"\n"]).

View File

@@ -0,0 +1,16 @@
-module(sos_test3).
-doc(none).
-export([main/0]).
-import(sos, [read/0, write/1]).
main() ->
loop().
loop() ->
case read() of
eof ->
true;
{ok, X} ->
write([X]),
loop()
end.

View File

@@ -0,0 +1,8 @@
-module(sos_test4).
-doc(none).
-export([main/0]).
main() ->
sos:write("I will crash now\n"),
1 = 2,
sos:write("This line will not be printed\n").

View File

@@ -0,0 +1,8 @@
Suffix .erl.jam ->
c:c($>).
Suffix .tex.dvi ->
unix:cmd("latex $>.tex").
Suffix .ehtml.html ->
ehtml:file("$>").

View File

@@ -0,0 +1,17 @@
%% This is a macro
OBJS = a.jam, b.jam.
OBJS += ,c.jam.
include("suffix_rules").
all when a.dvi $(OBJS).
a.dvi when a.tex ->
io:format("touching a.dvi~n"),
unix:cmd("touch a.dvi"),
io:format("done~n").

View File

@@ -0,0 +1,13 @@
%% This is a macro
OBJS = abc.jam, a.jam, b.jam.
include("$(MAKEDIR)/suffix_rules").
all when $(OBJS) a.dvi.
a.dvi when a.tex ->
unix:cmd("touch a.dvi").

View File

@@ -0,0 +1,52 @@
%%
%% Make the tk library
%%
Jamdir=../ebin.
CC = gcc.
tk when $(Jamdir)/tk.jam, $(Jamdir)/tkfocus.jam,
tkbutton.jam, tkentry.jam,
tkscrlbar.jam, tkdialog.jam, tklib.jam ->
X = 12,
Y = 14,
tk:stop(X, Y, X+Y).
$(Jamdir)/tk.jam when tk.erl, tk.hrl ->
c:c(tk, [{outdir, "$(Jamdir)"}]).
$(Jamdir)/tkfocus.jam when tkfocus.erl, tk.hrl ->
c:c(tkfocus, [{outdir, "$(Jamdir)"}]).
$(Jamdir)/tkbutton.jam when tkbutton.erl, tk.hrl ->
c:c(tkbutton, [{outdir, "$(Jamdir)"}]).
$(Jamdir)/tkentry.jam when tkentry.erl, tk.hrl ->
c:c(tkentry, [{outdir, "$(Jamdir)"}]).
tkscrlbar.jam when tkscrlbar.erl, tk.hrl ->
c:c(tkscrlbar, [{outdir, "$(Jamdir)"}]).
tkdialog.jam when tkdialog.erl ->
c:c(tkdialog, [{outdir, "$(Jamdir)"}]).
tklib.jam when tklib.erl ->
io:format("compiling: $@~n"),
c:c(tklib, [{outdir, "$(Jamdir)"}]).
foo.o when foo.c ->
unix:cmd("$(CC) -o $@ -c foo.c").
foo when bar ->
X = case Jamdir of
"../ebin" -> 1;
".." -> 2;
"." -> 3
end,
io:format("this = ~w", [X]).

View File

@@ -0,0 +1,7 @@
all when a.jam ->
p,q,r.
all -> a,b,c.
all,b,c when a,b,c.

View File

@@ -0,0 +1,22 @@
\begin{titlepage}
\begin{center}
\vspace*{1.0cm}
{\Huge\bf
The Erlang Cookbook\\
}
\vskip 1.0 true cm
\vskip 3.0 true cm
{\large\bf
Joe Armstrong\\
}
\vskip 1.0 true cm
\end{center}
\eject
\end{titlepage}

View File

@@ -0,0 +1,66 @@
-module(topological_sort).
%% Copyright (C) 1998, Ericsson Computer Science Laboratory
-doc([{author,'Joe Armstrong'},
{title,"Topological sort of a partial order."},
{keywords, [topological,sort,partial,order]},
{date,981102}]).
-export([sort/1, test/0]).
-import(lists, [map/2, member/2, filter/2]).
%% -type([{X, X}]) -> {ok, [{X,Y}]} | {cycle, [{X,Y}]}
%% topological_sort:pairs(L)
%% A partial order on the set S is a set of pairs {Xi,Xj} such that
%% some relation between Xi and Xj is obeyed.
%% A topological sort of a partial order is a sequence of elements
%% [X1, X2, X3 ...] such that if whenever {Xi, Xj} is in the partial
%% order i < j
test() ->
Pairs = [{1,2},{2,4},{4,6},{2,10},{4,8},{6,3},{1,3},
{3,5},{5,8},{7,5},{7,9},{9,4},{9,10}],
sort(Pairs).
%% [7,1,9,2,4,6,3,5,8,10]
%S tag1
sort(Pairs) ->
iterate(Pairs, [], all(Pairs)).
iterate([], L, All) ->
{ok, remove_duplicates(L ++ subtract(All, L))};
iterate(Pairs, L, All) ->
case subtract(lhs(Pairs), rhs(Pairs)) of
[] ->
{cycle, Pairs};
Lhs ->
iterate(remove_pairs(Lhs, Pairs), L ++ Lhs, All)
end.
all(L) -> lhs(L) ++ rhs(L).
lhs(L) -> map(fun({X,_}) -> X end, L).
rhs(L) -> map(fun({_,Y}) -> Y end, L).
%% subtract(L1, L2) -> all the elements in L1 which are not in L2
subtract(L1, L2) -> filter(fun(X) -> not member(X, L2) end, L1).
remove_duplicates([H|T]) ->
case member(H, T) of
true -> remove_duplicates(T);
false -> [H|remove_duplicates(T)]
end;
remove_duplicates([]) ->
[].
%% remove_pairs(L1, L2) -> L2' L1 = [X] L2 = [{X,Y}]
%% removes all pairs from L2 where the first element
%% of each pair is a member of L1
remove_pairs(L1, L2) -> filter(fun({X,Y}) -> not member(X, L1) end, L2).
%E tag1

View File

@@ -0,0 +1,39 @@
-module(transitive).
%% Copyright (C) 1997, Ericsson Telecom AB
-doc([{author,'Joe Armstrong'},
{title,"Transitive closure of a graph."},
{keywords, [transitive,closure]},
{date,981102}]).
%% warning slow on big graphs
-export([closure/2]).
%S tag1
closure(RootSet, Pairs) ->
closure_list(RootSet, Pairs, RootSet).
closure(Start, [], L) ->
L;
closure(Start, Pairs, Reachable) ->
{Next, Rest} = next(Start, Pairs),
closure_list(Next, Rest, Next ++ Reachable).
closure_list([], Pairs, Reachable) ->
Reachable;
closure_list([H|T], Pairs, Reachable) ->
Reachable1 = closure(H, Pairs, Reachable),
closure_list(T, Pairs, Reachable1).
next(Start, Pairs) ->
next(Start, Pairs, [], []).
next(Start, [], Reach, NoReach) ->
{Reach, NoReach};
next(Start, [{Start,Next}|T], Reach, NoReach) ->
next(Start, T, [Next|Reach], NoReach);
next(Start, [H|T], Reach, NoReach) ->
next(Start, T, Reach, [H|NoReach]).
%E tag1

View File

@@ -0,0 +1,3 @@
{"joe", "zapme"}.
{"ftp","ftp"}.
{"jane","hospan"}.