UNPKG

adder-script

Version:

Python like language to execute untrusted codes in browsers and Node.js.

389 lines (335 loc) 13.9 kB
// CodeMirror, copyright (c) by Marijn Haverbeke and others // Distributed under an MIT license: https://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS mod(require("../../lib/codemirror")); else if (typeof define == "function" && define.amd) // AMD define(["../../lib/codemirror"], mod); else // Plain browser env mod(CodeMirror); })(function(CodeMirror) { "use strict"; function wordRegexp(words) { return new RegExp("^((" + words.join(")|(") + "))\\b"); } var wordOperators = wordRegexp(["and", "or", "not", "is"]); var commonKeywords = ["break", "continue", "def", "elif", "else", "for", "if", "pass", "return", "while", "in"]; var commonBuiltins = "Alert False Input Math NaN None Random True __ABOUT__ __VERSION__ _sec_test all and any bin bool break callable chr cmp continue def delete dict dir elif else equal exist float for if in int is len list not or ord pass print range repr return reversed set str type while".split(' '); var combinedUnique = Array.from(new Set(commonKeywords.concat(commonBuiltins))); CodeMirror.registerHelper("hintWords", "adder", combinedUnique); function top(state) { return state.scopes[state.scopes.length - 1]; } CodeMirror.defineMode("adder", function(conf, parserConf) { var ERRORCLASS = "error"; var delimiters = parserConf.delimiters || parserConf.singleDelimiters || /^[\(\)\[\]\{\}@,:`=;\.\\]/; // (Backwards-compatiblity with old, cumbersome config system) var operators = [parserConf.singleOperators, parserConf.doubleOperators, parserConf.doubleDelimiters, parserConf.tripleDelimiters, parserConf.operators || /^([-+*/%\/&|^]=?|[<>=]+|\/\/=?|\*\*=?|!=|[~!@])/] for (var i = 0; i < operators.length; i++) if (!operators[i]) operators.splice(i--, 1) var hangingIndent = parserConf.hangingIndent || conf.indentUnit; var myKeywords = commonKeywords, myBuiltins = commonBuiltins; if (parserConf.extra_keywords != undefined) myKeywords = myKeywords.concat(parserConf.extra_keywords); if (parserConf.extra_builtins != undefined) myBuiltins = myBuiltins.concat(parserConf.extra_builtins); var py3 = !(parserConf.version && Number(parserConf.version) < 3) if (py3) { // since http://legacy.adder.org/dev/peps/pep-0465/ @ is also an operator var identifiers = parserConf.identifiers|| /^[_A-Za-z\u00A1-\uFFFF][_A-Za-z0-9\u00A1-\uFFFF]*/; myKeywords = myKeywords.concat(["nonlocal", "False", "True", "None", "NaN"]); myBuiltins = myBuiltins.concat(["ascii", "bytes", "exec", "print"]); var stringPrefixes = new RegExp("^(([rbuf]|(br)|(fr))?('{3}|\"{3}|['\"]))", "i"); } else { var identifiers = parserConf.identifiers|| /^[_A-Za-z][_A-Za-z0-9]*/; myKeywords = myKeywords.concat(["exec", "print"]); myBuiltins = myBuiltins.concat(["apply", "basestring", "buffer", "cmp", "coerce", "execfile", "file", "intern", "long", "raw_input", "reduce", "reload", "unichr", "unicode", "xrange", "False", "True", "None"]); var stringPrefixes = new RegExp("^(([rubf]|(ur)|(br))?('{3}|\"{3}|['\"]))", "i"); } var keywords = wordRegexp(myKeywords); var builtins = wordRegexp(myBuiltins); // tokenizers function tokenBase(stream, state) { var sol = stream.sol() && state.lastToken != "\\" if (sol) state.indent = stream.indentation() // Handle scope changes if (sol && top(state).type == "py") { var scopeOffset = top(state).offset; if (stream.eatSpace()) { var lineOffset = stream.indentation(); if (lineOffset > scopeOffset) pushPyScope(state); else if (lineOffset < scopeOffset && dedent(stream, state) && stream.peek() != "#") state.errorToken = true; return null; } else { var style = tokenBaseInner(stream, state); if (scopeOffset > 0 && dedent(stream, state)) style += " " + ERRORCLASS; return style; } } return tokenBaseInner(stream, state); } function tokenBaseInner(stream, state) { if (stream.eatSpace()) return null; // Handle Comments if (stream.match(/^#.*/)) return "comment"; // Handle Number Literals if (stream.match(/^[0-9\.]/, false)) { var floatLiteral = false; // Floats if (stream.match(/^[\d_]*\.\d+(e[\+\-]?\d+)?/i)) { floatLiteral = true; } if (stream.match(/^[\d_]+\.\d*/)) { floatLiteral = true; } if (stream.match(/^\.\d+/)) { floatLiteral = true; } if (floatLiteral) { // Float literals may be "imaginary" stream.eat(/J/i); return "number"; } // Integers var intLiteral = false; // Hex if (stream.match(/^0x[0-9a-f_]+/i)) intLiteral = true; // Binary if (stream.match(/^0b[01_]+/i)) intLiteral = true; // Octal if (stream.match(/^0o[0-7_]+/i)) intLiteral = true; // Decimal if (stream.match(/^[1-9][\d_]*(e[\+\-]?[\d_]+)?/)) { // Decimal literals may be "imaginary" stream.eat(/J/i); // TODO - Can you have imaginary longs? intLiteral = true; } // Zero by itself with no other piece of number. if (stream.match(/^0(?![\dx])/i)) intLiteral = true; if (intLiteral) { // Integer literals may be "long" stream.eat(/L/i); return "number"; } } // Handle Strings if (stream.match(stringPrefixes)) { var isFmtString = stream.current().toLowerCase().indexOf('f') !== -1; if (!isFmtString) { state.tokenize = tokenStringFactory(stream.current()); return state.tokenize(stream, state); } else { state.tokenize = formatStringFactory(stream.current(), state.tokenize); return state.tokenize(stream, state); } } for (var i = 0; i < operators.length; i++) if (stream.match(operators[i])) return "operator" if (stream.match(delimiters)) return "punctuation"; if (state.lastToken == "." && stream.match(identifiers)) return "property"; if (stream.match(keywords) || stream.match(wordOperators)) return "keyword"; if (stream.match(builtins)) return "builtin"; if (stream.match(/^(self|cls)\b/)) return "variable-2"; if (stream.match(identifiers)) { if (state.lastToken == "def" || state.lastToken == "class") return "def"; return "variable"; } // Handle non-detected items stream.next(); return ERRORCLASS; } function formatStringFactory(delimiter, tokenOuter) { while ("rubf".indexOf(delimiter.charAt(0).toLowerCase()) >= 0) delimiter = delimiter.substr(1); var singleline = delimiter.length == 1; var OUTCLASS = "string"; function tokenFString(stream, state) { // inside f-str Expression if (stream.match(delimiter)) { // expression ends pre-maturally, but very common in editing // Could show error to remind users to close brace here state.tokenize = tokenString return OUTCLASS; } else if (stream.match('{')) { // starting brace, if not eaten below return "punctuation"; } else if (stream.match('}')) { // return to regular inside string state state.tokenize = tokenString return "punctuation"; } else { // use tokenBaseInner to parse the expression return tokenBaseInner(stream, state); } } function tokenString(stream, state) { while (!stream.eol()) { stream.eatWhile(/[^'"\{\}\\]/); if (stream.eat("\\")) { stream.next(); if (singleline && stream.eol()) return OUTCLASS; } else if (stream.match(delimiter)) { state.tokenize = tokenOuter; return OUTCLASS; } else if (stream.match('{{')) { // ignore {{ in f-str return OUTCLASS; } else if (stream.match('{', false)) { // switch to nested mode state.tokenize = tokenFString if (stream.current()) { return OUTCLASS; } else { // need to return something, so eat the starting { stream.next(); return "punctuation"; } } else if (stream.match('}}')) { return OUTCLASS; } else if (stream.match('}')) { // single } in f-string is an error return ERRORCLASS; } else { stream.eat(/['"]/); } } if (singleline) { if (parserConf.singleLineStringErrors) return ERRORCLASS; else state.tokenize = tokenOuter; } return OUTCLASS; } tokenString.isString = true; return tokenString; } function tokenStringFactory(delimiter) { while ("rubf".indexOf(delimiter.charAt(0).toLowerCase()) >= 0) delimiter = delimiter.substr(1); var singleline = delimiter.length == 1; var OUTCLASS = "string"; function tokenString(stream, state) { while (!stream.eol()) { stream.eatWhile(/[^'"\\]/); if (stream.eat("\\")) { stream.next(); if (singleline && stream.eol()) return OUTCLASS; } else if (stream.match(delimiter)) { state.tokenize = tokenBase; return OUTCLASS; } else { stream.eat(/['"]/); } } if (singleline) { if (parserConf.singleLineStringErrors) return ERRORCLASS; else state.tokenize = tokenBase; } return OUTCLASS; } tokenString.isString = true; return tokenString; } function pushPyScope(state) { while (top(state).type != "py") state.scopes.pop() state.scopes.push({offset: top(state).offset + conf.indentUnit, type: "py", align: null}) } function pushBracketScope(stream, state, type) { var align = stream.match(/^([\s\[\{\(]|#.*)*$/, false) ? null : stream.column() + 1 state.scopes.push({offset: state.indent + hangingIndent, type: type, align: align}) } function dedent(stream, state) { var indented = stream.indentation(); while (state.scopes.length > 1 && top(state).offset > indented) { if (top(state).type != "py") return true; state.scopes.pop(); } return top(state).offset != indented; } function tokenLexer(stream, state) { if (stream.sol()) state.beginningOfLine = true; var style = state.tokenize(stream, state); var current = stream.current(); // Handle decorators if (state.beginningOfLine && current == "@") return stream.match(identifiers, false) ? "meta" : py3 ? "operator" : ERRORCLASS; if (/\S/.test(current)) state.beginningOfLine = false; if ((style == "variable" || style == "builtin") && state.lastToken == "meta") style = "meta"; // Handle scope changes. if (current == "pass" || current == "return") state.dedent += 1; if (current == "lambda") state.lambda = true; if (current == ":" && !state.lambda && top(state).type == "py") pushPyScope(state); if (current.length == 1 && !/string|comment/.test(style)) { var delimiter_index = "[({".indexOf(current); if (delimiter_index != -1) pushBracketScope(stream, state, "])}".slice(delimiter_index, delimiter_index+1)); delimiter_index = "])}".indexOf(current); if (delimiter_index != -1) { if (top(state).type == current) state.indent = state.scopes.pop().offset - hangingIndent else return ERRORCLASS; } } if (state.dedent > 0 && stream.eol() && top(state).type == "py") { if (state.scopes.length > 1) state.scopes.pop(); state.dedent -= 1; } return style; } var external = { startState: function(basecolumn) { return { tokenize: tokenBase, scopes: [{offset: basecolumn || 0, type: "py", align: null}], indent: basecolumn || 0, lastToken: null, lambda: false, dedent: 0 }; }, token: function(stream, state) { var addErr = state.errorToken; if (addErr) state.errorToken = false; var style = tokenLexer(stream, state); if (style && style != "comment") state.lastToken = (style == "keyword" || style == "punctuation") ? stream.current() : style; if (style == "punctuation") style = null; if (stream.eol() && state.lambda) state.lambda = false; return addErr ? style + " " + ERRORCLASS : style; }, indent: function(state, textAfter) { if (state.tokenize != tokenBase) return state.tokenize.isString ? CodeMirror.Pass : 0; var scope = top(state), closing = scope.type == textAfter.charAt(0) if (scope.align != null) return scope.align - (closing ? 1 : 0) else return scope.offset - (closing ? hangingIndent : 0) }, electricInput: /^\s*[\}\]\)]$/, closeBrackets: {triples: "'\""}, lineComment: "#", fold: "indent" }; return external; }); CodeMirror.defineMIME("text/x-adder", "adder"); });