diff options
author | Jose Antonio Marquez | 2012-01-27 12:05:17 -0800 |
---|---|---|
committer | Jose Antonio Marquez | 2012-01-27 12:05:17 -0800 |
commit | 3a754133dbc138390503341fd2e9beba3e43aa4b (patch) | |
tree | cdeae7d7dd9a30d7b4fab5afb7efad68d4ec7508 /imports/codemirror/mode/rust | |
parent | b89a7ee8b956c96a1dcee995ea840feddc5d4b27 (diff) | |
download | ninja-3a754133dbc138390503341fd2e9beba3e43aa4b.tar.gz |
Merged old FileIO
Diffstat (limited to 'imports/codemirror/mode/rust')
-rwxr-xr-x | imports/codemirror/mode/rust/index.html | 48 | ||||
-rwxr-xr-x | imports/codemirror/mode/rust/rust.js | 411 |
2 files changed, 459 insertions, 0 deletions
diff --git a/imports/codemirror/mode/rust/index.html b/imports/codemirror/mode/rust/index.html new file mode 100755 index 00000000..a84c61e5 --- /dev/null +++ b/imports/codemirror/mode/rust/index.html | |||
@@ -0,0 +1,48 @@ | |||
1 | <!doctype html> | ||
2 | <html> | ||
3 | <head> | ||
4 | <title>CodeMirror: Rust mode</title> | ||
5 | <link rel="stylesheet" href="../../lib/codemirror.css"> | ||
6 | <script src="../../lib/codemirror.js"></script> | ||
7 | <script src="rust.js"></script> | ||
8 | <link rel="stylesheet" href="../../doc/docs.css"> | ||
9 | <style type="text/css">.CodeMirror {border-top: 1px solid black; border-bottom: 1px solid black;}</style> | ||
10 | </head> | ||
11 | <body> | ||
12 | <h1>CodeMirror: Rust mode</h1> | ||
13 | |||
14 | <div><textarea id="code" name="code"> | ||
15 | // Demo code. | ||
16 | |||
17 | type foo<T> = int; | ||
18 | tag bar { | ||
19 | some(int, foo<float>); | ||
20 | none; | ||
21 | } | ||
22 | |||
23 | fn check_crate(x: int) { | ||
24 | let v = 10; | ||
25 | alt foo { | ||
26 | 1 to 3 { | ||
27 | print_foo(); | ||
28 | if x { | ||
29 | blah() + 10; | ||
30 | } | ||
31 | } | ||
32 | (x, y) { "bye" } | ||
33 | _ { "hi" } | ||
34 | } | ||
35 | } | ||
36 | </textarea></div> | ||
37 | |||
38 | <script> | ||
39 | var editor = CodeMirror.fromTextArea(document.getElementById("code"), { | ||
40 | lineNumbers: true, | ||
41 | matchBrackets: true, | ||
42 | tabMode: "indent" | ||
43 | }); | ||
44 | </script> | ||
45 | |||
46 | <p><strong>MIME types defined:</strong> <code>text/x-rustsrc</code>.</p> | ||
47 | </body> | ||
48 | </html> | ||
diff --git a/imports/codemirror/mode/rust/rust.js b/imports/codemirror/mode/rust/rust.js new file mode 100755 index 00000000..5ab964c1 --- /dev/null +++ b/imports/codemirror/mode/rust/rust.js | |||
@@ -0,0 +1,411 @@ | |||
1 | CodeMirror.defineMode("rust", function() { | ||
2 | var indentUnit = 4, altIndentUnit = 2; | ||
3 | var valKeywords = { | ||
4 | "if": "if-style", "while": "if-style", "else": "else-style", | ||
5 | "do": "else-style", "ret": "else-style", "fail": "else-style", | ||
6 | "break": "atom", "cont": "atom", "const": "let", "resource": "fn", | ||
7 | "let": "let", "fn": "fn", "for": "for", "alt": "alt", "obj": "fn", | ||
8 | "lambda": "fn", "type": "type", "tag": "tag", "mod": "mod", | ||
9 | "as": "op", "true": "atom", "false": "atom", "assert": "op", "check": "op", | ||
10 | "claim": "op", "native": "ignore", "unsafe": "ignore", "import": "else-style", | ||
11 | "export": "else-style", "copy": "op", "log": "op", "log_err": "op", | ||
12 | "use": "op", "bind": "op" | ||
13 | }; | ||
14 | var typeKeywords = function() { | ||
15 | var keywords = {"fn": "fn", "block": "fn", "obj": "obj"}; | ||
16 | var atoms = "bool uint int i8 i16 i32 i64 u8 u16 u32 u64 float f32 f64 str char".split(" "); | ||
17 | for (var i = 0, e = atoms.length; i < e; ++i) keywords[atoms[i]] = "atom"; | ||
18 | return keywords; | ||
19 | }(); | ||
20 | var operatorChar = /[+\-*&%=<>!?|\.@]/; | ||
21 | |||
22 | // Tokenizer | ||
23 | |||
24 | // Used as scratch variable to communicate multiple values without | ||
25 | // consing up tons of objects. | ||
26 | var tcat, content; | ||
27 | function r(tc, style) { | ||
28 | tcat = tc; | ||
29 | return style; | ||
30 | } | ||
31 | |||
32 | function tokenBase(stream, state) { | ||
33 | var ch = stream.next(); | ||
34 | if (ch == '"') { | ||
35 | state.tokenize = tokenString; | ||
36 | return state.tokenize(stream, state); | ||
37 | } | ||
38 | if (ch == "'") { | ||
39 | tcat = "atom"; | ||
40 | if (stream.eat("\\")) { | ||
41 | if (stream.skipTo("'")) { stream.next(); return "string"; } | ||
42 | else { return "error"; } | ||
43 | } else { | ||
44 | stream.next(); | ||
45 | return stream.eat("'") ? "string" : "error"; | ||
46 | } | ||
47 | } | ||
48 | if (ch == "/") { | ||
49 | if (stream.eat("/")) { stream.skipToEnd(); return "comment"; } | ||
50 | if (stream.eat("*")) { | ||
51 | state.tokenize = tokenComment(1); | ||
52 | return state.tokenize(stream, state); | ||
53 | } | ||
54 | } | ||
55 | if (ch == "#") { | ||
56 | if (stream.eat("[")) { tcat = "open-attr"; return null; } | ||
57 | stream.eatWhile(/\w/); | ||
58 | return r("macro", "meta"); | ||
59 | } | ||
60 | if (ch == ":" && stream.match(":<")) { | ||
61 | return r("op", null); | ||
62 | } | ||
63 | if (ch.match(/\d/) || (ch == "." && stream.eat(/\d/))) { | ||
64 | var flp = false; | ||
65 | if (!stream.match(/^x[\da-f]+/i) && !stream.match(/^b[01]+/)) { | ||
66 | stream.eatWhile(/\d/); | ||
67 | if (stream.eat(".")) { flp = true; stream.eatWhile(/\d/); } | ||
68 | if (stream.match(/^e[+\-]?\d+/i)) { flp = true; } | ||
69 | } | ||
70 | if (flp) stream.match(/^f(?:32|64)/); | ||
71 | else stream.match(/^[ui](?:8|16|32|64)/); | ||
72 | return r("atom", "number"); | ||
73 | } | ||
74 | if (ch.match(/[()\[\]{}:;,]/)) return r(ch, null); | ||
75 | if (ch == "-" && stream.eat(">")) return r("->", null); | ||
76 | if (ch.match(operatorChar)) { | ||
77 | stream.eatWhile(operatorChar); | ||
78 | return r("op", null); | ||
79 | } | ||
80 | stream.eatWhile(/\w/); | ||
81 | content = stream.current(); | ||
82 | if (stream.match(/^::\w/)) { | ||
83 | stream.backUp(1); | ||
84 | return r("prefix", "variable-2"); | ||
85 | } | ||
86 | if (state.keywords.propertyIsEnumerable(content)) | ||
87 | return r(state.keywords[content], content.match(/true|false/) ? "atom" : "keyword"); | ||
88 | return r("name", "variable"); | ||
89 | } | ||
90 | |||
91 | function tokenString(stream, state) { | ||
92 | var ch, escaped = false; | ||
93 | while (ch = stream.next()) { | ||
94 | if (ch == '"' && !escaped) { | ||
95 | state.tokenize = tokenBase; | ||
96 | return r("atom", "string"); | ||
97 | } | ||
98 | escaped = !escaped && ch == "\\"; | ||
99 | } | ||
100 | // Hack to not confuse the parser when a string is split in | ||
101 | // pieces. | ||
102 | return r("op", "string"); | ||
103 | } | ||
104 | |||
105 | function tokenComment(depth) { | ||
106 | return function(stream, state) { | ||
107 | var lastCh = null, ch; | ||
108 | while (ch = stream.next()) { | ||
109 | if (ch == "/" && lastCh == "*") { | ||
110 | if (depth == 1) { | ||
111 | state.tokenize = tokenBase; | ||
112 | break; | ||
113 | } else { | ||
114 | state.tokenize = tokenComment(depth - 1); | ||
115 | return state.tokenize(stream, state); | ||
116 | } | ||
117 | } | ||
118 | if (ch == "*" && lastCh == "/") { | ||
119 | state.tokenize = tokenComment(depth + 1); | ||
120 | return state.tokenize(stream, state); | ||
121 | } | ||
122 | lastCh = ch; | ||
123 | } | ||
124 | return "comment"; | ||
125 | }; | ||
126 | } | ||
127 | |||
128 | // Parser | ||
129 | |||
130 | var cx = {state: null, stream: null, marked: null, cc: null}; | ||
131 | function pass() { | ||
132 | for (var i = arguments.length - 1; i >= 0; i--) cx.cc.push(arguments[i]); | ||
133 | } | ||
134 | function cont() { | ||
135 | pass.apply(null, arguments); | ||
136 | return true; | ||
137 | } | ||
138 | |||
139 | function pushlex(type, info) { | ||
140 | var result = function() { | ||
141 | var state = cx.state; | ||
142 | state.lexical = {indented: state.indented, column: cx.stream.column(), | ||
143 | type: type, prev: state.lexical, info: info}; | ||
144 | }; | ||
145 | result.lex = true; | ||
146 | return result; | ||
147 | } | ||
148 | function poplex() { | ||
149 | var state = cx.state; | ||
150 | if (state.lexical.prev) { | ||
151 | if (state.lexical.type == ")") | ||
152 | state.indented = state.lexical.indented; | ||
153 | state.lexical = state.lexical.prev; | ||
154 | } | ||
155 | } | ||
156 | function typecx() { cx.state.keywords = typeKeywords; } | ||
157 | function valcx() { cx.state.keywords = valKeywords; } | ||
158 | poplex.lex = typecx.lex = valcx.lex = true; | ||
159 | |||
160 | function commasep(comb, end) { | ||
161 | function more(type) { | ||
162 | if (type == ",") return cont(comb, more); | ||
163 | if (type == end) return cont(); | ||
164 | return cont(more); | ||
165 | } | ||
166 | return function(type) { | ||
167 | if (type == end) return cont(); | ||
168 | return pass(comb, more); | ||
169 | }; | ||
170 | } | ||
171 | |||
172 | function block(type) { | ||
173 | if (type == "}") return cont(); | ||
174 | if (type == "let") return cont(pushlex("stat", "let"), letdef1, poplex, block); | ||
175 | if (type == "fn") return cont(pushlex("stat"), fndef, poplex, block); | ||
176 | if (type == "type") return cont(pushlex("stat"), tydef, endstatement, poplex, block); | ||
177 | if (type == "tag") return cont(pushlex("stat"), tagdef, poplex, block); | ||
178 | if (type == "mod") return cont(pushlex("stat"), mod, poplex, block); | ||
179 | if (type == "open-attr") return cont(pushlex("]"), commasep(expression, "]"), poplex) |