App-Mxpress-PDF
view release on metacpan or search on metacpan
public/javascripts/ace/mode-jsoniq.js view on Meta::CPAN
"'skip'",
"'sliding'",
"'some'",
"'stable'",
"'start'",
"'stemming'",
"'stop'",
"'strict'",
"'strip'",
"'structured-item'",
"'switch'",
"'text'",
"'then'",
"'thesaurus'",
"'times'",
"'to'",
"'treat'",
"'try'",
"'tumbling'",
"'type'",
"'typeswitch'",
"'union'",
"'unique'",
"'unordered'",
"'updating'",
"'uppercase'",
"'using'",
"'validate'",
"'value'",
"'variable'",
"'version'",
"'weight'",
"'when'",
"'where'",
"'while'",
"'wildcards'",
"'window'",
"'with'",
"'without'",
"'word'",
"'words'",
"'xquery'",
"'zero-digit'",
"'{'",
"'{{'",
"'|'",
"'}'",
"'}}'"
];
},{}],"/node_modules/xqlint/lib/lexers/jsoniq_lexer.js":[function(_dereq_,module,exports){
'use strict';
var JSONiqTokenizer = _dereq_('./JSONiqTokenizer').JSONiqTokenizer;
var Lexer = _dereq_('./lexer').Lexer;
var keys = 'NaN|after|allowing|ancestor|ancestor-or-self|and|append|array|as|ascending|at|attribute|base-uri|before|boundary-space|break|by|case|cast|castable|catch|child|collation|comment|constraint|construction|contains|context|continue|copy|copy-n...
var keywords = keys.map(function(val) { return { name: '\'' + val + '\'', token: 'keyword' }; });
var ncnames = keys.map(function(val) { return { name: '\'' + val + '\'', token: 'text', next: function(stack){ stack.pop(); } }; });
var cdata = 'constant.language';
var number = 'constant';
var xmlcomment = 'comment';
var pi = 'xml-pe';
var pragma = 'constant.buildin';
var n = function(name){
return '\'' + name + '\'';
};
var Rules = {
start: [
{ name: n('(#'), token: pragma, next: function(stack){ stack.push('Pragma'); } },
{ name: n('(:'), token: 'comment', next: function(stack){ stack.push('Comment'); } },
{ name: n('(:~'), token: 'comment.doc', next: function(stack){ stack.push('CommentDoc'); } },
{ name: n('<!--'), token: xmlcomment, next: function(stack){ stack.push('XMLComment'); } },
{ name: n('<?'), token: pi, next: function(stack) { stack.push('PI'); } },
{ name: n('\'\''), token: 'string', next: function(stack){ stack.push('AposString'); } },
{ name: n('"'), token: 'string', next: function(stack){ stack.push('QuotString'); } },
{ name: 'Annotation', token: 'support.function' },
{ name: 'ModuleDecl', token: 'keyword', next: function(stack){ stack.push('Prefix'); } },
{ name: 'OptionDecl', token: 'keyword', next: function(stack){ stack.push('_EQName'); } },
{ name: 'AttrTest', token: 'support.type' },
{ name: 'Variable', token: 'variable' },
{ name: n('<![CDATA['), token: cdata, next: function(stack){ stack.push('CData'); } },
{ name: 'IntegerLiteral', token: number },
{ name: 'DecimalLiteral', token: number },
{ name: 'DoubleLiteral', token: number },
{ name: 'Operator', token: 'keyword.operator' },
{ name: 'EQName', token: function(val) { return keys.indexOf(val) !== -1 ? 'keyword' : 'support.function'; } },
{ name: n('('), token: 'lparen' },
{ name: n(')'), token: 'rparen' },
{ name: 'Tag', token: 'meta.tag', next: function(stack){ stack.push('StartTag'); } },
{ name: n('}'), token: 'text', next: function(stack){ if(stack.length > 1) { stack.pop(); } } },
{ name: n('{'), token: 'text', next: function(stack){ stack.push('start'); } } //, next: function(stack){ if(stack.length > 1) { stack.pop(); } } }
].concat(keywords),
_EQName: [
{ name: 'EQName', token: 'text', next: function(stack) { stack.pop(); } }
].concat(ncnames),
Prefix: [
{ name: 'NCName', token: 'text', next: function(stack) { stack.pop(); } }
].concat(ncnames),
StartTag: [
{ name: n('>'), token: 'meta.tag', next: function(stack){ stack.push('TagContent'); } },
{ name: 'QName', token: 'entity.other.attribute-name' },
{ name: n('='), token: 'text' },
{ name: n('\'\''), token: 'string', next: function(stack){ stack.push('AposAttr'); } },
{ name: n('"'), token: 'string', next: function(stack){ stack.push('QuotAttr'); } },
{ name: n('/>'), token: 'meta.tag.r', next: function(stack){ stack.pop(); } }
],
TagContent: [
{ name: 'ElementContentChar', token: 'text' },
{ name: n('<![CDATA['), token: cdata, next: function(stack){ stack.push('CData'); } },
{ name: n('<!--'), token: xmlcomment, next: function(stack){ stack.push('XMLComment'); } },
{ name: 'Tag', token: 'meta.tag', next: function(stack){ stack.push('StartTag'); } },
{ name: 'PredefinedEntityRef', token: 'constant.language.escape' },
{ name: 'CharRef', token: 'constant.language.escape' },
{ name: n('{{'), token: 'text' },
{ name: n('}}'), token: 'text' },
{ name: n('{'), token: 'text', next: function(stack){ stack.push('start'); } },
{ name: 'EndTag', token: 'meta.tag', next: function(stack){ stack.pop(); stack.pop(); } }
],
AposAttr: [
{ name: n('\'\''), token: 'string', next: function(stack){ stack.pop(); } },
{ name: 'EscapeApos', token: 'constant.language.escape' },
{ name: 'AposAttrContentChar', token: 'string' },
{ name: 'PredefinedEntityRef', token: 'constant.language.escape' },
{ name: 'CharRef', token: 'constant.language.escape' },
{ name: n('{{'), token: 'string' },
{ name: n('}}'), token: 'string' },
{ name: n('{'), token: 'text', next: function(stack){ stack.push('start'); } }
],
QuotAttr: [
{ name: n('\"'), token: 'string', next: function(stack){ stack.pop(); } },
{ name: 'EscapeQuot', token: 'constant.language.escape' },
{ name: 'QuotAttrContentChar', token: 'string' },
{ name: 'PredefinedEntityRef', token: 'constant.language.escape' },
{ name: 'CharRef', token: 'constant.language.escape' },
{ name: n('{{'), token: 'string' },
{ name: n('}}'), token: 'string' },
{ name: n('{'), token: 'text', next: function(stack){ stack.push('start'); } }
],
Pragma: [
{ name: 'PragmaContents', token: pragma },
{ name: n('#'), token: pragma },
{ name: n('#)'), token: pragma, next: function(stack){ stack.pop(); } }
],
Comment: [
{ name: 'CommentContents', token: 'comment' },
{ name: n('(:'), token: 'comment', next: function(stack){ stack.push('Comment'); } },
{ name: n(':)'), token: 'comment', next: function(stack){ stack.pop(); } }
],
CommentDoc: [
{ name: 'DocCommentContents', token: 'comment.doc' },
{ name: 'DocTag', token: 'comment.doc.tag' },
{ name: n('(:'), token: 'comment.doc', next: function(stack){ stack.push('CommentDoc'); } },
{ name: n(':)'), token: 'comment.doc', next: function(stack){ stack.pop(); } }
],
XMLComment: [
{ name: 'DirCommentContents', token: xmlcomment },
{ name: n('-->'), token: xmlcomment, next: function(stack){ stack.pop(); } }
],
CData: [
{ name: 'CDataSectionContents', token: cdata },
{ name: n(']]>'), token: cdata, next: function(stack){ stack.pop(); } }
],
PI: [
{ name: 'DirPIContents', token: pi },
{ name: n('?'), token: pi },
{ name: n('?>'), token: pi, next: function(stack){ stack.pop(); } }
],
AposString: [
{ name: n('\'\''), token: 'string', next: function(stack){ stack.pop(); } },
{ name: 'PredefinedEntityRef', token: 'constant.language.escape' },
{ name: 'CharRef', token: 'constant.language.escape' },
{ name: 'EscapeApos', token: 'constant.language.escape' },
{ name: 'AposChar', token: 'string' }
],
QuotString: [
{ name: n('"'), token: 'string', next: function(stack){ stack.pop(); } },
{ name: 'JSONPredefinedCharRef', token: 'constant.language.escape' },
{ name: 'JSONCharRef', token: 'constant.language.escape' },
{ name: 'JSONChar', token: 'string' }
]
};
exports.JSONiqLexer = function(){ return new Lexer(JSONiqTokenizer, Rules); };
},{"./JSONiqTokenizer":"/node_modules/xqlint/lib/lexers/JSONiqTokenizer.js","./lexer":"/node_modules/xqlint/lib/lexers/lexer.js"}],"/node_modules/xqlint/lib/lexers/lexer.js":[function(_dereq_,module,exports){
'use strict';
var TokenHandler = function(code) {
var input = code;
this.tokens = [];
this.reset = function() {
input = input;
this.tokens = [];
};
this.startNonterminal = function() {};
this.endNonterminal = function() {};
this.terminal = function(name, begin, end) {
this.tokens.push({
name: name,
value: input.substring(begin, end)
});
};
this.whitespace = function(begin, end) {
this.tokens.push({
name: 'WS',
value: input.substring(begin, end)
});
};
};
exports.Lexer = function(Tokenizer, Rules) {
this.tokens = [];
this.getLineTokens = function(line, state) {
state = (state === 'start' || !state) ? '["start"]' : state;
var stack = JSON.parse(state);
var h = new TokenHandler(line);
( run in 2.067 seconds using v1.01-cache-2.11-cpan-39bf76dae61 )