Show More
Commit Description:
merge
Commit Description:
merge
References:
File last commit:
Show/Diff file:
Action:
node_modules/levn/lib/parse.js
| 102 lines
| 3.3 KiB
| application/javascript
| JavascriptLexer
|
r789 | // Generated by LiveScript 1.2.0 | |||
(function(){ | ||||
var reject, special, tokenRegex; | ||||
reject = require('prelude-ls').reject; | ||||
function consumeOp(tokens, op){ | ||||
if (tokens[0] === op) { | ||||
return tokens.shift(); | ||||
} else { | ||||
throw new Error("Expected '" + op + "', but got '" + tokens[0] + "' instead in " + JSON.stringify(tokens) + "."); | ||||
} | ||||
} | ||||
function maybeConsumeOp(tokens, op){ | ||||
if (tokens[0] === op) { | ||||
return tokens.shift(); | ||||
} | ||||
} | ||||
function consumeList(tokens, delimiters, hasDelimiters){ | ||||
var result; | ||||
if (hasDelimiters) { | ||||
consumeOp(tokens, delimiters[0]); | ||||
} | ||||
result = []; | ||||
while (tokens.length && tokens[0] !== delimiters[1]) { | ||||
result.push(consumeElement(tokens)); | ||||
maybeConsumeOp(tokens, ','); | ||||
} | ||||
if (hasDelimiters) { | ||||
consumeOp(tokens, delimiters[1]); | ||||
} | ||||
return result; | ||||
} | ||||
function consumeArray(tokens, hasDelimiters){ | ||||
return consumeList(tokens, ['[', ']'], hasDelimiters); | ||||
} | ||||
function consumeTuple(tokens, hasDelimiters){ | ||||
return consumeList(tokens, ['(', ')'], hasDelimiters); | ||||
} | ||||
function consumeFields(tokens, hasDelimiters){ | ||||
var result, key; | ||||
if (hasDelimiters) { | ||||
consumeOp(tokens, '{'); | ||||
} | ||||
result = {}; | ||||
while (tokens.length && (!hasDelimiters || tokens[0] !== '}')) { | ||||
key = tokens.shift(); | ||||
consumeOp(tokens, ':'); | ||||
result[key] = consumeElement(tokens); | ||||
maybeConsumeOp(tokens, ','); | ||||
} | ||||
if (hasDelimiters) { | ||||
consumeOp(tokens, '}'); | ||||
} | ||||
return result; | ||||
} | ||||
function consumeElement(tokens){ | ||||
switch (tokens[0]) { | ||||
case '[': | ||||
return consumeArray(tokens, true); | ||||
case '(': | ||||
return consumeTuple(tokens, true); | ||||
case '{': | ||||
return consumeFields(tokens, true); | ||||
default: | ||||
return tokens.shift(); | ||||
} | ||||
} | ||||
function consumeTopLevel(tokens, types){ | ||||
var ref$, type, structure, origTokens, result, finalResult, x$, y$; | ||||
ref$ = types[0], type = ref$.type, structure = ref$.structure; | ||||
origTokens = tokens.concat(); | ||||
if (types.length === 1 && (structure || (type === 'Array' || type === 'Object'))) { | ||||
result = structure === 'array' || type === 'Array' | ||||
? consumeArray(tokens, tokens[0] === '[') | ||||
: structure === 'tuple' | ||||
? consumeTuple(tokens, tokens[0] === '(') | ||||
: consumeFields(tokens, tokens[0] === '{'); | ||||
finalResult = tokens.length ? consumeElement(structure === 'array' || type === 'Array' | ||||
? (x$ = origTokens, x$.unshift('['), x$.push(']'), x$) | ||||
: (y$ = origTokens, y$.unshift('('), y$.push(')'), y$)) : result; | ||||
} else { | ||||
finalResult = consumeElement(tokens); | ||||
} | ||||
if (tokens.length && origTokens.length) { | ||||
throw new Error("Unable to parse " + JSON.stringify(origTokens) + " of type " + JSON.stringify(types) + "."); | ||||
} else { | ||||
return finalResult; | ||||
} | ||||
} | ||||
special = /\[\]\(\)}{:,/.source; | ||||
tokenRegex = RegExp('("(?:[^"]|\\\\")*")|(\'(?:[^\']|\\\\\')*\')|(#.*#)|(/(?:\\\\/|[^/])*/[gimy]*)|([' + special + '])|([^\\s' + special + ']+)|\\s*'); | ||||
module.exports = function(string, types){ | ||||
var tokens, node; | ||||
tokens = reject(function(it){ | ||||
return !it || /^\s+$/.test(it); | ||||
}, string.split(tokenRegex)); | ||||
node = consumeTopLevel(tokens, types); | ||||
if (!node) { | ||||
throw new Error("Error parsing '" + string + "'."); | ||||
} | ||||
return node; | ||||
}; | ||||
}).call(this); | ||||