[fix] use lsp-client 6.1
This commit is contained in:
parent
d5c5673c96
commit
776d5013d0
4 changed files with 470 additions and 471 deletions
|
|
@ -6,17 +6,22 @@ import { lineNumbers, highlightActiveLineGutter, highlightSpecialChars,
|
||||||
keymap, dropCursor,EditorView } from '@codemirror/view';
|
keymap, dropCursor,EditorView } from '@codemirror/view';
|
||||||
|
|
||||||
import { openSearchPanel, highlightSelectionMatches, searchKeymap } from '@codemirror/search';
|
import { openSearchPanel, highlightSelectionMatches, searchKeymap } from '@codemirror/search';
|
||||||
|
|
||||||
import { openLintPanel, lintGutter, lintKeymap, linter, setDiagnostics, } from "@codemirror/lint"
|
import { openLintPanel, lintGutter, lintKeymap, linter, setDiagnostics, } from "@codemirror/lint"
|
||||||
|
|
||||||
import { indentWithTab, history, defaultKeymap, historyKeymap } from '@codemirror/commands';
|
import { indentWithTab, history, defaultKeymap, historyKeymap } from '@codemirror/commands';
|
||||||
|
|
||||||
import { foldGutter, indentOnInput, indentUnit, bracketMatching, foldKeymap,
|
import { foldGutter, indentOnInput, indentUnit, bracketMatching, foldKeymap,
|
||||||
syntaxHighlighting, defaultHighlightStyle , StreamLanguage } from '@codemirror/language';
|
syntaxHighlighting, defaultHighlightStyle , StreamLanguage } from '@codemirror/language';
|
||||||
|
|
||||||
import { closeBrackets, autocompletion, closeBracketsKeymap, completionKeymap } from '@codemirror/autocomplete';
|
import { closeBrackets, autocompletion, closeBracketsKeymap, completionKeymap } from '@codemirror/autocomplete';
|
||||||
|
|
||||||
|
import { LSPClient, LSPPlugin, languageServerSupport, languageServerExtensions } from "@codemirror/lsp-client";
|
||||||
|
|
||||||
import { xQuery } from "@codemirror/legacy-modes/mode/xquery"
|
import { xQuery } from "@codemirror/legacy-modes/mode/xquery"
|
||||||
|
|
||||||
|
|
||||||
import { LSPClient, LSPPlugin, languageServerSupport } from "@codemirror/lsp-client";
|
|
||||||
// Language
|
// Language
|
||||||
import { xml } from "@codemirror/lang-xml";
|
import { xml } from "@codemirror/lang-xml";
|
||||||
|
|
||||||
|
|
@ -105,4 +110,6 @@ function listCommands(view) {
|
||||||
return commands;
|
return commands;
|
||||||
};
|
};
|
||||||
|
|
||||||
export { EditorView, EditorState, StateEffect, openSearchPanel, openLintPanel, languageServerSupport, baseExts, simpleWebSocketTransport, linter, LSPPlugin, setDiagnostics, LSPClient, debouncedChangeListener, listCommands };
|
export { baseExts, EditorView, EditorState, StateEffect, LSPPlugin, LSPClient,
|
||||||
|
openSearchPanel, openLintPanel, languageServerSupport, languageServerExtensions,
|
||||||
|
simpleWebSocketTransport, linter, setDiagnostics, debouncedChangeListener, listCommands };
|
||||||
|
|
@ -25710,416 +25710,6 @@ var lsp = (function (exports) {
|
||||||
];
|
];
|
||||||
const completionKeymapExt = /*@__PURE__*/Prec.highest(/*@__PURE__*/keymap.computeN([completionConfig], state => state.facet(completionConfig).defaultKeymap ? [completionKeymap] : []));
|
const completionKeymapExt = /*@__PURE__*/Prec.highest(/*@__PURE__*/keymap.computeN([completionConfig], state => state.facet(completionConfig).defaultKeymap ? [completionKeymap] : []));
|
||||||
|
|
||||||
// The keywords object is set to the result of this self executing
|
|
||||||
// function. Each keyword is a property of the keywords object whose
|
|
||||||
// value is {type: atype, style: astyle}
|
|
||||||
var keywords = function(){
|
|
||||||
// convenience functions used to build keywords object
|
|
||||||
function kw(type) {return {type: type, style: "keyword"};}
|
|
||||||
var operator = kw("operator")
|
|
||||||
, atom = {type: "atom", style: "atom"}
|
|
||||||
, punctuation = {type: "punctuation", style: null}
|
|
||||||
, qualifier = {type: "axis_specifier", style: "qualifier"};
|
|
||||||
|
|
||||||
// kwObj is what is return from this function at the end
|
|
||||||
var kwObj = {
|
|
||||||
',': punctuation
|
|
||||||
};
|
|
||||||
|
|
||||||
// a list of 'basic' keywords. For each add a property to kwObj with the value of
|
|
||||||
// {type: basic[i], style: "keyword"} e.g. 'after' --> {type: "after", style: "keyword"}
|
|
||||||
var basic = ['after', 'all', 'allowing', 'ancestor', 'ancestor-or-self', 'any', 'array', 'as',
|
|
||||||
'ascending', 'at', 'attribute', 'base-uri', 'before', 'boundary-space', 'by', 'case', 'cast',
|
|
||||||
'castable', 'catch', 'child', 'collation', 'comment', 'construction', 'contains', 'content',
|
|
||||||
'context', 'copy', 'copy-namespaces', 'count', 'decimal-format', 'declare', 'default', 'delete',
|
|
||||||
'descendant', 'descendant-or-self', 'descending', 'diacritics', 'different', 'distance',
|
|
||||||
'document', 'document-node', 'element', 'else', 'empty', 'empty-sequence', 'encoding', 'end',
|
|
||||||
'entire', 'every', 'exactly', 'except', 'external', 'first', 'following', 'following-sibling',
|
|
||||||
'for', 'from', 'ftand', 'ftnot', 'ft-option', 'ftor', 'function', 'fuzzy', 'greatest', 'group',
|
|
||||||
'if', 'import', 'in', 'inherit', 'insensitive', 'insert', 'instance', 'intersect', 'into',
|
|
||||||
'invoke', 'is', 'item', 'language', 'last', 'lax', 'least', 'let', 'levels', 'lowercase', 'map',
|
|
||||||
'modify', 'module', 'most', 'namespace', 'next', 'no', 'node', 'nodes', 'no-inherit',
|
|
||||||
'no-preserve', 'not', 'occurs', 'of', 'only', 'option', 'order', 'ordered', 'ordering',
|
|
||||||
'paragraph', 'paragraphs', 'parent', 'phrase', 'preceding', 'preceding-sibling', 'preserve',
|
|
||||||
'previous', 'processing-instruction', 'relationship', 'rename', 'replace', 'return',
|
|
||||||
'revalidation', 'same', 'satisfies', 'schema', 'schema-attribute', 'schema-element', 'score',
|
|
||||||
'self', 'sensitive', 'sentence', 'sentences', 'sequence', 'skip', 'sliding', 'some', 'stable',
|
|
||||||
'start', 'stemming', 'stop', 'strict', 'strip', 'switch', 'text', 'then', 'thesaurus', 'times',
|
|
||||||
'to', 'transform', 'treat', 'try', 'tumbling', 'type', 'typeswitch', 'union', 'unordered',
|
|
||||||
'update', 'updating', 'uppercase', 'using', 'validate', 'value', 'variable', 'version',
|
|
||||||
'weight', 'when', 'where', 'wildcards', 'window', 'with', 'without', 'word', 'words', 'xquery'];
|
|
||||||
for(var i=0, l=basic.length; i < l; i++) { kwObj[basic[i]] = kw(basic[i]);}
|
|
||||||
// a list of types. For each add a property to kwObj with the value of
|
|
||||||
// {type: "atom", style: "atom"}
|
|
||||||
var types = ['xs:anyAtomicType', 'xs:anySimpleType', 'xs:anyType', 'xs:anyURI',
|
|
||||||
'xs:base64Binary', 'xs:boolean', 'xs:byte', 'xs:date', 'xs:dateTime', 'xs:dateTimeStamp',
|
|
||||||
'xs:dayTimeDuration', 'xs:decimal', 'xs:double', 'xs:duration', 'xs:ENTITIES', 'xs:ENTITY',
|
|
||||||
'xs:float', 'xs:gDay', 'xs:gMonth', 'xs:gMonthDay', 'xs:gYear', 'xs:gYearMonth', 'xs:hexBinary',
|
|
||||||
'xs:ID', 'xs:IDREF', 'xs:IDREFS', 'xs:int', 'xs:integer', 'xs:item', 'xs:java', 'xs:language',
|
|
||||||
'xs:long', 'xs:Name', 'xs:NCName', 'xs:negativeInteger', 'xs:NMTOKEN', 'xs:NMTOKENS',
|
|
||||||
'xs:nonNegativeInteger', 'xs:nonPositiveInteger', 'xs:normalizedString', 'xs:NOTATION',
|
|
||||||
'xs:numeric', 'xs:positiveInteger', 'xs:precisionDecimal', 'xs:QName', 'xs:short', 'xs:string',
|
|
||||||
'xs:time', 'xs:token', 'xs:unsignedByte', 'xs:unsignedInt', 'xs:unsignedLong',
|
|
||||||
'xs:unsignedShort', 'xs:untyped', 'xs:untypedAtomic', 'xs:yearMonthDuration'];
|
|
||||||
for(var i=0, l=types.length; i < l; i++) { kwObj[types[i]] = atom;}
|
|
||||||
// each operator will add a property to kwObj with value of {type: "operator", style: "keyword"}
|
|
||||||
var operators = ['eq', 'ne', 'lt', 'le', 'gt', 'ge', ':=', '=', '>', '>=', '<', '<=', '.', '|', '?', 'and', 'or', 'div', 'idiv', 'mod', '*', '/', '+', '-'];
|
|
||||||
for(var i=0, l=operators.length; i < l; i++) { kwObj[operators[i]] = operator;}
|
|
||||||
// each axis_specifiers will add a property to kwObj with value of {type: "axis_specifier", style: "qualifier"}
|
|
||||||
var axis_specifiers = ["self::", "attribute::", "child::", "descendant::", "descendant-or-self::", "parent::",
|
|
||||||
"ancestor::", "ancestor-or-self::", "following::", "preceding::", "following-sibling::", "preceding-sibling::"];
|
|
||||||
for(var i=0, l=axis_specifiers.length; i < l; i++) { kwObj[axis_specifiers[i]] = qualifier; }
|
|
||||||
return kwObj;
|
|
||||||
}();
|
|
||||||
|
|
||||||
function chain(stream, state, f) {
|
|
||||||
state.tokenize = f;
|
|
||||||
return f(stream, state);
|
|
||||||
}
|
|
||||||
|
|
||||||
// the primary mode tokenizer
|
|
||||||
function tokenBase(stream, state) {
|
|
||||||
var ch = stream.next(),
|
|
||||||
mightBeFunction = false,
|
|
||||||
isEQName = isEQNameAhead(stream);
|
|
||||||
|
|
||||||
// an XML tag (if not in some sub, chained tokenizer)
|
|
||||||
if (ch == "<") {
|
|
||||||
if(stream.match("!--", true))
|
|
||||||
return chain(stream, state, tokenXMLComment);
|
|
||||||
|
|
||||||
if(stream.match("![CDATA", false)) {
|
|
||||||
state.tokenize = tokenCDATA;
|
|
||||||
return "tag";
|
|
||||||
}
|
|
||||||
|
|
||||||
if(stream.match("?", false)) {
|
|
||||||
return chain(stream, state, tokenPreProcessing);
|
|
||||||
}
|
|
||||||
|
|
||||||
var isclose = stream.eat("/");
|
|
||||||
stream.eatSpace();
|
|
||||||
var tagName = "", c;
|
|
||||||
while ((c = stream.eat(/[^\s\u00a0=<>\"\'\/?]/))) tagName += c;
|
|
||||||
|
|
||||||
return chain(stream, state, tokenTag(tagName, isclose));
|
|
||||||
}
|
|
||||||
// start code block
|
|
||||||
else if(ch == "{") {
|
|
||||||
pushStateStack(state, { type: "codeblock"});
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
// end code block
|
|
||||||
else if(ch == "}") {
|
|
||||||
popStateStack(state);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
// if we're in an XML block
|
|
||||||
else if(isInXmlBlock(state)) {
|
|
||||||
if(ch == ">")
|
|
||||||
return "tag";
|
|
||||||
else if(ch == "/" && stream.eat(">")) {
|
|
||||||
popStateStack(state);
|
|
||||||
return "tag";
|
|
||||||
}
|
|
||||||
else
|
|
||||||
return "variable";
|
|
||||||
}
|
|
||||||
// if a number
|
|
||||||
else if (/\d/.test(ch)) {
|
|
||||||
stream.match(/^\d*(?:\.\d*)?(?:E[+\-]?\d+)?/);
|
|
||||||
return "atom";
|
|
||||||
}
|
|
||||||
// comment start
|
|
||||||
else if (ch === "(" && stream.eat(":")) {
|
|
||||||
pushStateStack(state, { type: "comment"});
|
|
||||||
return chain(stream, state, tokenComment);
|
|
||||||
}
|
|
||||||
// quoted string
|
|
||||||
else if (!isEQName && (ch === '"' || ch === "'"))
|
|
||||||
return startString(stream, state, ch);
|
|
||||||
// variable
|
|
||||||
else if(ch === "$") {
|
|
||||||
return chain(stream, state, tokenVariable);
|
|
||||||
}
|
|
||||||
// assignment
|
|
||||||
else if(ch ===":" && stream.eat("=")) {
|
|
||||||
return "keyword";
|
|
||||||
}
|
|
||||||
// open paren
|
|
||||||
else if(ch === "(") {
|
|
||||||
pushStateStack(state, { type: "paren"});
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
// close paren
|
|
||||||
else if(ch === ")") {
|
|
||||||
popStateStack(state);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
// open paren
|
|
||||||
else if(ch === "[") {
|
|
||||||
pushStateStack(state, { type: "bracket"});
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
// close paren
|
|
||||||
else if(ch === "]") {
|
|
||||||
popStateStack(state);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
var known = keywords.propertyIsEnumerable(ch) && keywords[ch];
|
|
||||||
|
|
||||||
// if there's a EQName ahead, consume the rest of the string portion, it's likely a function
|
|
||||||
if(isEQName && ch === '\"') while(stream.next() !== '"'){}
|
|
||||||
if(isEQName && ch === '\'') while(stream.next() !== '\''){}
|
|
||||||
|
|
||||||
// gobble up a word if the character is not known
|
|
||||||
if(!known) stream.eatWhile(/[\w\$_-]/);
|
|
||||||
|
|
||||||
// gobble a colon in the case that is a lib func type call fn:doc
|
|
||||||
var foundColon = stream.eat(":");
|
|
||||||
|
|
||||||
// if there's not a second colon, gobble another word. Otherwise, it's probably an axis specifier
|
|
||||||
// which should get matched as a keyword
|
|
||||||
if(!stream.eat(":") && foundColon) {
|
|
||||||
stream.eatWhile(/[\w\$_-]/);
|
|
||||||
}
|
|
||||||
// if the next non whitespace character is an open paren, this is probably a function (if not a keyword of other sort)
|
|
||||||
if(stream.match(/^[ \t]*\(/, false)) {
|
|
||||||
mightBeFunction = true;
|
|
||||||
}
|
|
||||||
// is the word a keyword?
|
|
||||||
var word = stream.current();
|
|
||||||
known = keywords.propertyIsEnumerable(word) && keywords[word];
|
|
||||||
|
|
||||||
// if we think it's a function call but not yet known,
|
|
||||||
// set style to variable for now for lack of something better
|
|
||||||
if(mightBeFunction && !known) known = {type: "function_call", style: "def"};
|
|
||||||
|
|
||||||
// if the previous word was element, attribute, axis specifier, this word should be the name of that
|
|
||||||
if(isInXmlConstructor(state)) {
|
|
||||||
popStateStack(state);
|
|
||||||
return "variable";
|
|
||||||
}
|
|
||||||
// as previously checked, if the word is element,attribute, axis specifier, call it an "xmlconstructor" and
|
|
||||||
// push the stack so we know to look for it on the next word
|
|
||||||
if(word == "element" || word == "attribute" || known.type == "axis_specifier") pushStateStack(state, {type: "xmlconstructor"});
|
|
||||||
|
|
||||||
// if the word is known, return the details of that else just call this a generic 'word'
|
|
||||||
return known ? known.style : "variable";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// handle comments, including nested
|
|
||||||
function tokenComment(stream, state) {
|
|
||||||
var maybeEnd = false, maybeNested = false, nestedCount = 0, ch;
|
|
||||||
while (ch = stream.next()) {
|
|
||||||
if (ch == ")" && maybeEnd) {
|
|
||||||
if(nestedCount > 0)
|
|
||||||
nestedCount--;
|
|
||||||
else {
|
|
||||||
popStateStack(state);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if(ch == ":" && maybeNested) {
|
|
||||||
nestedCount++;
|
|
||||||
}
|
|
||||||
maybeEnd = (ch == ":");
|
|
||||||
maybeNested = (ch == "(");
|
|
||||||
}
|
|
||||||
|
|
||||||
return "comment";
|
|
||||||
}
|
|
||||||
|
|
||||||
// tokenizer for string literals
|
|
||||||
// optionally pass a tokenizer function to set state.tokenize back to when finished
|
|
||||||
function tokenString(quote, f) {
|
|
||||||
return function(stream, state) {
|
|
||||||
var ch;
|
|
||||||
while (ch = stream.next()) {
|
|
||||||
if (ch == quote) {
|
|
||||||
popStateStack(state);
|
|
||||||
if (f) state.tokenize = f;
|
|
||||||
break;
|
|
||||||
} else if (stream.match("{", false) && isInXmlAttributeBlock(state)) {
|
|
||||||
// if we're in a string and in an XML block, allow an embedded code block in an attribute
|
|
||||||
pushStateStack(state, { type: "codeblock"});
|
|
||||||
state.tokenize = tokenBase;
|
|
||||||
return "string";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return "string";
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function startString(stream, state, quote, f) {
|
|
||||||
let tokenize = tokenString(quote, f);
|
|
||||||
pushStateStack(state, { type: "string", name: quote, tokenize });
|
|
||||||
return chain(stream, state, tokenize);
|
|
||||||
}
|
|
||||||
|
|
||||||
// tokenizer for variables
|
|
||||||
function tokenVariable(stream, state) {
|
|
||||||
var isVariableChar = /[\w\$_-]/;
|
|
||||||
|
|
||||||
// a variable may start with a quoted EQName so if the next character is quote, consume to the next quote
|
|
||||||
if(stream.eat("\"")) {
|
|
||||||
while(stream.next() !== '\"'){} stream.eat(":");
|
|
||||||
} else {
|
|
||||||
stream.eatWhile(isVariableChar);
|
|
||||||
if(!stream.match(":=", false)) stream.eat(":");
|
|
||||||
}
|
|
||||||
stream.eatWhile(isVariableChar);
|
|
||||||
state.tokenize = tokenBase;
|
|
||||||
return "variable";
|
|
||||||
}
|
|
||||||
|
|
||||||
// tokenizer for XML tags
|
|
||||||
function tokenTag(name, isclose) {
|
|
||||||
return function(stream, state) {
|
|
||||||
stream.eatSpace();
|
|
||||||
if(isclose && stream.eat(">")) {
|
|
||||||
popStateStack(state);
|
|
||||||
state.tokenize = tokenBase;
|
|
||||||
return "tag";
|
|
||||||
}
|
|
||||||
// self closing tag without attributes?
|
|
||||||
if(!stream.eat("/"))
|
|
||||||
pushStateStack(state, { type: "tag", name: name, tokenize: tokenBase});
|
|
||||||
if(!stream.eat(">")) {
|
|
||||||
state.tokenize = tokenAttribute;
|
|
||||||
return "tag";
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
state.tokenize = tokenBase;
|
|
||||||
}
|
|
||||||
return "tag";
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// tokenizer for XML attributes
|
|
||||||
function tokenAttribute(stream, state) {
|
|
||||||
var ch = stream.next();
|
|
||||||
|
|
||||||
if(ch == "/" && stream.eat(">")) {
|
|
||||||
if(isInXmlAttributeBlock(state)) popStateStack(state);
|
|
||||||
if(isInXmlBlock(state)) popStateStack(state);
|
|
||||||
return "tag";
|
|
||||||
}
|
|
||||||
if(ch == ">") {
|
|
||||||
if(isInXmlAttributeBlock(state)) popStateStack(state);
|
|
||||||
return "tag";
|
|
||||||
}
|
|
||||||
if(ch == "=")
|
|
||||||
return null;
|
|
||||||
// quoted string
|
|
||||||
if (ch == '"' || ch == "'")
|
|
||||||
return startString(stream, state, ch, tokenAttribute);
|
|
||||||
|
|
||||||
if(!isInXmlAttributeBlock(state))
|
|
||||||
pushStateStack(state, { type: "attribute", tokenize: tokenAttribute});
|
|
||||||
|
|
||||||
stream.eat(/[a-zA-Z_:]/);
|
|
||||||
stream.eatWhile(/[-a-zA-Z0-9_:.]/);
|
|
||||||
stream.eatSpace();
|
|
||||||
|
|
||||||
// the case where the attribute has not value and the tag was closed
|
|
||||||
if(stream.match(">", false) || stream.match("/", false)) {
|
|
||||||
popStateStack(state);
|
|
||||||
state.tokenize = tokenBase;
|
|
||||||
}
|
|
||||||
|
|
||||||
return "attribute";
|
|
||||||
}
|
|
||||||
|
|
||||||
// handle comments, including nested
|
|
||||||
function tokenXMLComment(stream, state) {
|
|
||||||
var ch;
|
|
||||||
while (ch = stream.next()) {
|
|
||||||
if (ch == "-" && stream.match("->", true)) {
|
|
||||||
state.tokenize = tokenBase;
|
|
||||||
return "comment";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// handle CDATA
|
|
||||||
function tokenCDATA(stream, state) {
|
|
||||||
var ch;
|
|
||||||
while (ch = stream.next()) {
|
|
||||||
if (ch == "]" && stream.match("]", true)) {
|
|
||||||
state.tokenize = tokenBase;
|
|
||||||
return "comment";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// handle preprocessing instructions
|
|
||||||
function tokenPreProcessing(stream, state) {
|
|
||||||
var ch;
|
|
||||||
while (ch = stream.next()) {
|
|
||||||
if (ch == "?" && stream.match(">", true)) {
|
|
||||||
state.tokenize = tokenBase;
|
|
||||||
return "processingInstruction";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// functions to test the current context of the state
|
|
||||||
function isInXmlBlock(state) { return isIn(state, "tag"); }
|
|
||||||
function isInXmlAttributeBlock(state) { return isIn(state, "attribute"); }
|
|
||||||
function isInXmlConstructor(state) { return isIn(state, "xmlconstructor"); }
|
|
||||||
|
|
||||||
function isEQNameAhead(stream) {
|
|
||||||
// assume we've already eaten a quote (")
|
|
||||||
if(stream.current() === '"')
|
|
||||||
return stream.match(/^[^\"]+\"\:/, false);
|
|
||||||
else if(stream.current() === '\'')
|
|
||||||
return stream.match(/^[^\"]+\'\:/, false);
|
|
||||||
else
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
function isIn(state, type) {
|
|
||||||
return (state.stack.length && state.stack[state.stack.length - 1].type == type);
|
|
||||||
}
|
|
||||||
|
|
||||||
function pushStateStack(state, newState) {
|
|
||||||
state.stack.push(newState);
|
|
||||||
}
|
|
||||||
|
|
||||||
function popStateStack(state) {
|
|
||||||
state.stack.pop();
|
|
||||||
var reinstateTokenize = state.stack.length && state.stack[state.stack.length-1].tokenize;
|
|
||||||
state.tokenize = reinstateTokenize || tokenBase;
|
|
||||||
}
|
|
||||||
|
|
||||||
// the interface for the mode API
|
|
||||||
const xQuery = {
|
|
||||||
name: "xquery",
|
|
||||||
startState: function() {
|
|
||||||
return {
|
|
||||||
tokenize: tokenBase,
|
|
||||||
cc: [],
|
|
||||||
stack: []
|
|
||||||
};
|
|
||||||
},
|
|
||||||
|
|
||||||
token: function(stream, state) {
|
|
||||||
if (stream.eatSpace()) return null;
|
|
||||||
var style = state.tokenize(stream, state);
|
|
||||||
return style;
|
|
||||||
},
|
|
||||||
|
|
||||||
languageData: {
|
|
||||||
commentTokens: {block: {open: "(:", close: ":)"}}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* marked v15.0.12 - a markdown parser
|
* marked v15.0.12 - a markdown parser
|
||||||
* Copyright (c) 2011-2025, Christopher Jeffrey. (MIT Licensed)
|
* Copyright (c) 2011-2025, Christopher Jeffrey. (MIT Licensed)
|
||||||
|
|
@ -29952,6 +29542,35 @@ ${text}</tr>
|
||||||
{ key: "Escape", run: closeReferencePanel },
|
{ key: "Escape", run: closeReferencePanel },
|
||||||
];
|
];
|
||||||
|
|
||||||
|
function toSeverity(sev) {
|
||||||
|
return sev == 1 ? "error" : sev == 2 ? "warning" : sev == 3 ? "info" : "hint";
|
||||||
|
}
|
||||||
|
function serverDiagnostics() {
|
||||||
|
return {
|
||||||
|
clientCapabilities: { textDocument: { publishDiagnostics: { versionSupport: true } } },
|
||||||
|
notificationHandlers: {
|
||||||
|
"textDocument/publishDiagnostics": (client, params) => {
|
||||||
|
let file = client.workspace.getFile(params.uri);
|
||||||
|
if (!file || params.version != null && params.version != file.version)
|
||||||
|
return false;
|
||||||
|
const view = file.getView(), plugin = view && LSPPlugin.get(view);
|
||||||
|
if (!view || !plugin)
|
||||||
|
return false;
|
||||||
|
view.dispatch(setDiagnostics(view.state, params.diagnostics.map(item => {
|
||||||
|
var _a;
|
||||||
|
return ({
|
||||||
|
from: plugin.unsyncedChanges.mapPos(plugin.fromPosition(item.range.start, plugin.syncedDoc)),
|
||||||
|
to: plugin.unsyncedChanges.mapPos(plugin.fromPosition(item.range.end, plugin.syncedDoc)),
|
||||||
|
severity: toSeverity((_a = item.severity) !== null && _a !== void 0 ? _a : 1),
|
||||||
|
message: item.message,
|
||||||
|
});
|
||||||
|
})));
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Returns an extension that enables the [LSP
|
Returns an extension that enables the [LSP
|
||||||
plugin](https://codemirror.net/6/docs/ref/#lsp-client.LSPPlugin) as well as LSP based
|
plugin](https://codemirror.net/6/docs/ref/#lsp-client.LSPPlugin) as well as LSP based
|
||||||
|
|
@ -29973,6 +29592,431 @@ ${text}</tr>
|
||||||
signatureHelp(),
|
signatureHelp(),
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
This function bundles all the extensions defined in this package,
|
||||||
|
in a way that can be passed to the
|
||||||
|
[`extensions`](https://codemirror.net/6/docs/ref/#lsp-client.LSPClientConfig.extensions) option to
|
||||||
|
`LSPClient`.
|
||||||
|
*/
|
||||||
|
function languageServerExtensions() {
|
||||||
|
return [
|
||||||
|
serverCompletion(),
|
||||||
|
hoverTooltips(),
|
||||||
|
keymap.of([...formatKeymap, ...renameKeymap, ...jumpToDefinitionKeymap, ...findReferencesKeymap]),
|
||||||
|
signatureHelp(),
|
||||||
|
serverDiagnostics()
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
// The keywords object is set to the result of this self executing
|
||||||
|
// function. Each keyword is a property of the keywords object whose
|
||||||
|
// value is {type: atype, style: astyle}
|
||||||
|
var keywords = function(){
|
||||||
|
// convenience functions used to build keywords object
|
||||||
|
function kw(type) {return {type: type, style: "keyword"};}
|
||||||
|
var operator = kw("operator")
|
||||||
|
, atom = {type: "atom", style: "atom"}
|
||||||
|
, punctuation = {type: "punctuation", style: null}
|
||||||
|
, qualifier = {type: "axis_specifier", style: "qualifier"};
|
||||||
|
|
||||||
|
// kwObj is what is return from this function at the end
|
||||||
|
var kwObj = {
|
||||||
|
',': punctuation
|
||||||
|
};
|
||||||
|
|
||||||
|
// a list of 'basic' keywords. For each add a property to kwObj with the value of
|
||||||
|
// {type: basic[i], style: "keyword"} e.g. 'after' --> {type: "after", style: "keyword"}
|
||||||
|
var basic = ['after', 'all', 'allowing', 'ancestor', 'ancestor-or-self', 'any', 'array', 'as',
|
||||||
|
'ascending', 'at', 'attribute', 'base-uri', 'before', 'boundary-space', 'by', 'case', 'cast',
|
||||||
|
'castable', 'catch', 'child', 'collation', 'comment', 'construction', 'contains', 'content',
|
||||||
|
'context', 'copy', 'copy-namespaces', 'count', 'decimal-format', 'declare', 'default', 'delete',
|
||||||
|
'descendant', 'descendant-or-self', 'descending', 'diacritics', 'different', 'distance',
|
||||||
|
'document', 'document-node', 'element', 'else', 'empty', 'empty-sequence', 'encoding', 'end',
|
||||||
|
'entire', 'every', 'exactly', 'except', 'external', 'first', 'following', 'following-sibling',
|
||||||
|
'for', 'from', 'ftand', 'ftnot', 'ft-option', 'ftor', 'function', 'fuzzy', 'greatest', 'group',
|
||||||
|
'if', 'import', 'in', 'inherit', 'insensitive', 'insert', 'instance', 'intersect', 'into',
|
||||||
|
'invoke', 'is', 'item', 'language', 'last', 'lax', 'least', 'let', 'levels', 'lowercase', 'map',
|
||||||
|
'modify', 'module', 'most', 'namespace', 'next', 'no', 'node', 'nodes', 'no-inherit',
|
||||||
|
'no-preserve', 'not', 'occurs', 'of', 'only', 'option', 'order', 'ordered', 'ordering',
|
||||||
|
'paragraph', 'paragraphs', 'parent', 'phrase', 'preceding', 'preceding-sibling', 'preserve',
|
||||||
|
'previous', 'processing-instruction', 'relationship', 'rename', 'replace', 'return',
|
||||||
|
'revalidation', 'same', 'satisfies', 'schema', 'schema-attribute', 'schema-element', 'score',
|
||||||
|
'self', 'sensitive', 'sentence', 'sentences', 'sequence', 'skip', 'sliding', 'some', 'stable',
|
||||||
|
'start', 'stemming', 'stop', 'strict', 'strip', 'switch', 'text', 'then', 'thesaurus', 'times',
|
||||||
|
'to', 'transform', 'treat', 'try', 'tumbling', 'type', 'typeswitch', 'union', 'unordered',
|
||||||
|
'update', 'updating', 'uppercase', 'using', 'validate', 'value', 'variable', 'version',
|
||||||
|
'weight', 'when', 'where', 'wildcards', 'window', 'with', 'without', 'word', 'words', 'xquery'];
|
||||||
|
for(var i=0, l=basic.length; i < l; i++) { kwObj[basic[i]] = kw(basic[i]);}
|
||||||
|
// a list of types. For each add a property to kwObj with the value of
|
||||||
|
// {type: "atom", style: "atom"}
|
||||||
|
var types = ['xs:anyAtomicType', 'xs:anySimpleType', 'xs:anyType', 'xs:anyURI',
|
||||||
|
'xs:base64Binary', 'xs:boolean', 'xs:byte', 'xs:date', 'xs:dateTime', 'xs:dateTimeStamp',
|
||||||
|
'xs:dayTimeDuration', 'xs:decimal', 'xs:double', 'xs:duration', 'xs:ENTITIES', 'xs:ENTITY',
|
||||||
|
'xs:float', 'xs:gDay', 'xs:gMonth', 'xs:gMonthDay', 'xs:gYear', 'xs:gYearMonth', 'xs:hexBinary',
|
||||||
|
'xs:ID', 'xs:IDREF', 'xs:IDREFS', 'xs:int', 'xs:integer', 'xs:item', 'xs:java', 'xs:language',
|
||||||
|
'xs:long', 'xs:Name', 'xs:NCName', 'xs:negativeInteger', 'xs:NMTOKEN', 'xs:NMTOKENS',
|
||||||
|
'xs:nonNegativeInteger', 'xs:nonPositiveInteger', 'xs:normalizedString', 'xs:NOTATION',
|
||||||
|
'xs:numeric', 'xs:positiveInteger', 'xs:precisionDecimal', 'xs:QName', 'xs:short', 'xs:string',
|
||||||
|
'xs:time', 'xs:token', 'xs:unsignedByte', 'xs:unsignedInt', 'xs:unsignedLong',
|
||||||
|
'xs:unsignedShort', 'xs:untyped', 'xs:untypedAtomic', 'xs:yearMonthDuration'];
|
||||||
|
for(var i=0, l=types.length; i < l; i++) { kwObj[types[i]] = atom;}
|
||||||
|
// each operator will add a property to kwObj with value of {type: "operator", style: "keyword"}
|
||||||
|
var operators = ['eq', 'ne', 'lt', 'le', 'gt', 'ge', ':=', '=', '>', '>=', '<', '<=', '.', '|', '?', 'and', 'or', 'div', 'idiv', 'mod', '*', '/', '+', '-'];
|
||||||
|
for(var i=0, l=operators.length; i < l; i++) { kwObj[operators[i]] = operator;}
|
||||||
|
// each axis_specifiers will add a property to kwObj with value of {type: "axis_specifier", style: "qualifier"}
|
||||||
|
var axis_specifiers = ["self::", "attribute::", "child::", "descendant::", "descendant-or-self::", "parent::",
|
||||||
|
"ancestor::", "ancestor-or-self::", "following::", "preceding::", "following-sibling::", "preceding-sibling::"];
|
||||||
|
for(var i=0, l=axis_specifiers.length; i < l; i++) { kwObj[axis_specifiers[i]] = qualifier; }
|
||||||
|
return kwObj;
|
||||||
|
}();
|
||||||
|
|
||||||
|
function chain(stream, state, f) {
|
||||||
|
state.tokenize = f;
|
||||||
|
return f(stream, state);
|
||||||
|
}
|
||||||
|
|
||||||
|
// the primary mode tokenizer
|
||||||
|
function tokenBase(stream, state) {
|
||||||
|
var ch = stream.next(),
|
||||||
|
mightBeFunction = false,
|
||||||
|
isEQName = isEQNameAhead(stream);
|
||||||
|
|
||||||
|
// an XML tag (if not in some sub, chained tokenizer)
|
||||||
|
if (ch == "<") {
|
||||||
|
if(stream.match("!--", true))
|
||||||
|
return chain(stream, state, tokenXMLComment);
|
||||||
|
|
||||||
|
if(stream.match("![CDATA", false)) {
|
||||||
|
state.tokenize = tokenCDATA;
|
||||||
|
return "tag";
|
||||||
|
}
|
||||||
|
|
||||||
|
if(stream.match("?", false)) {
|
||||||
|
return chain(stream, state, tokenPreProcessing);
|
||||||
|
}
|
||||||
|
|
||||||
|
var isclose = stream.eat("/");
|
||||||
|
stream.eatSpace();
|
||||||
|
var tagName = "", c;
|
||||||
|
while ((c = stream.eat(/[^\s\u00a0=<>\"\'\/?]/))) tagName += c;
|
||||||
|
|
||||||
|
return chain(stream, state, tokenTag(tagName, isclose));
|
||||||
|
}
|
||||||
|
// start code block
|
||||||
|
else if(ch == "{") {
|
||||||
|
pushStateStack(state, { type: "codeblock"});
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
// end code block
|
||||||
|
else if(ch == "}") {
|
||||||
|
popStateStack(state);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
// if we're in an XML block
|
||||||
|
else if(isInXmlBlock(state)) {
|
||||||
|
if(ch == ">")
|
||||||
|
return "tag";
|
||||||
|
else if(ch == "/" && stream.eat(">")) {
|
||||||
|
popStateStack(state);
|
||||||
|
return "tag";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
return "variable";
|
||||||
|
}
|
||||||
|
// if a number
|
||||||
|
else if (/\d/.test(ch)) {
|
||||||
|
stream.match(/^\d*(?:\.\d*)?(?:E[+\-]?\d+)?/);
|
||||||
|
return "atom";
|
||||||
|
}
|
||||||
|
// comment start
|
||||||
|
else if (ch === "(" && stream.eat(":")) {
|
||||||
|
pushStateStack(state, { type: "comment"});
|
||||||
|
return chain(stream, state, tokenComment);
|
||||||
|
}
|
||||||
|
// quoted string
|
||||||
|
else if (!isEQName && (ch === '"' || ch === "'"))
|
||||||
|
return startString(stream, state, ch);
|
||||||
|
// variable
|
||||||
|
else if(ch === "$") {
|
||||||
|
return chain(stream, state, tokenVariable);
|
||||||
|
}
|
||||||
|
// assignment
|
||||||
|
else if(ch ===":" && stream.eat("=")) {
|
||||||
|
return "keyword";
|
||||||
|
}
|
||||||
|
// open paren
|
||||||
|
else if(ch === "(") {
|
||||||
|
pushStateStack(state, { type: "paren"});
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
// close paren
|
||||||
|
else if(ch === ")") {
|
||||||
|
popStateStack(state);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
// open paren
|
||||||
|
else if(ch === "[") {
|
||||||
|
pushStateStack(state, { type: "bracket"});
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
// close paren
|
||||||
|
else if(ch === "]") {
|
||||||
|
popStateStack(state);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
var known = keywords.propertyIsEnumerable(ch) && keywords[ch];
|
||||||
|
|
||||||
|
// if there's a EQName ahead, consume the rest of the string portion, it's likely a function
|
||||||
|
if(isEQName && ch === '\"') while(stream.next() !== '"'){}
|
||||||
|
if(isEQName && ch === '\'') while(stream.next() !== '\''){}
|
||||||
|
|
||||||
|
// gobble up a word if the character is not known
|
||||||
|
if(!known) stream.eatWhile(/[\w\$_-]/);
|
||||||
|
|
||||||
|
// gobble a colon in the case that is a lib func type call fn:doc
|
||||||
|
var foundColon = stream.eat(":");
|
||||||
|
|
||||||
|
// if there's not a second colon, gobble another word. Otherwise, it's probably an axis specifier
|
||||||
|
// which should get matched as a keyword
|
||||||
|
if(!stream.eat(":") && foundColon) {
|
||||||
|
stream.eatWhile(/[\w\$_-]/);
|
||||||
|
}
|
||||||
|
// if the next non whitespace character is an open paren, this is probably a function (if not a keyword of other sort)
|
||||||
|
if(stream.match(/^[ \t]*\(/, false)) {
|
||||||
|
mightBeFunction = true;
|
||||||
|
}
|
||||||
|
// is the word a keyword?
|
||||||
|
var word = stream.current();
|
||||||
|
known = keywords.propertyIsEnumerable(word) && keywords[word];
|
||||||
|
|
||||||
|
// if we think it's a function call but not yet known,
|
||||||
|
// set style to variable for now for lack of something better
|
||||||
|
if(mightBeFunction && !known) known = {type: "function_call", style: "def"};
|
||||||
|
|
||||||
|
// if the previous word was element, attribute, axis specifier, this word should be the name of that
|
||||||
|
if(isInXmlConstructor(state)) {
|
||||||
|
popStateStack(state);
|
||||||
|
return "variable";
|
||||||
|
}
|
||||||
|
// as previously checked, if the word is element,attribute, axis specifier, call it an "xmlconstructor" and
|
||||||
|
// push the stack so we know to look for it on the next word
|
||||||
|
if(word == "element" || word == "attribute" || known.type == "axis_specifier") pushStateStack(state, {type: "xmlconstructor"});
|
||||||
|
|
||||||
|
// if the word is known, return the details of that else just call this a generic 'word'
|
||||||
|
return known ? known.style : "variable";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// handle comments, including nested
|
||||||
|
function tokenComment(stream, state) {
|
||||||
|
var maybeEnd = false, maybeNested = false, nestedCount = 0, ch;
|
||||||
|
while (ch = stream.next()) {
|
||||||
|
if (ch == ")" && maybeEnd) {
|
||||||
|
if(nestedCount > 0)
|
||||||
|
nestedCount--;
|
||||||
|
else {
|
||||||
|
popStateStack(state);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if(ch == ":" && maybeNested) {
|
||||||
|
nestedCount++;
|
||||||
|
}
|
||||||
|
maybeEnd = (ch == ":");
|
||||||
|
maybeNested = (ch == "(");
|
||||||
|
}
|
||||||
|
|
||||||
|
return "comment";
|
||||||
|
}
|
||||||
|
|
||||||
|
// tokenizer for string literals
|
||||||
|
// optionally pass a tokenizer function to set state.tokenize back to when finished
|
||||||
|
function tokenString(quote, f) {
|
||||||
|
return function(stream, state) {
|
||||||
|
var ch;
|
||||||
|
while (ch = stream.next()) {
|
||||||
|
if (ch == quote) {
|
||||||
|
popStateStack(state);
|
||||||
|
if (f) state.tokenize = f;
|
||||||
|
break;
|
||||||
|
} else if (stream.match("{", false) && isInXmlAttributeBlock(state)) {
|
||||||
|
// if we're in a string and in an XML block, allow an embedded code block in an attribute
|
||||||
|
pushStateStack(state, { type: "codeblock"});
|
||||||
|
state.tokenize = tokenBase;
|
||||||
|
return "string";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "string";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function startString(stream, state, quote, f) {
|
||||||
|
let tokenize = tokenString(quote, f);
|
||||||
|
pushStateStack(state, { type: "string", name: quote, tokenize });
|
||||||
|
return chain(stream, state, tokenize);
|
||||||
|
}
|
||||||
|
|
||||||
|
// tokenizer for variables
|
||||||
|
function tokenVariable(stream, state) {
|
||||||
|
var isVariableChar = /[\w\$_-]/;
|
||||||
|
|
||||||
|
// a variable may start with a quoted EQName so if the next character is quote, consume to the next quote
|
||||||
|
if(stream.eat("\"")) {
|
||||||
|
while(stream.next() !== '\"'){} stream.eat(":");
|
||||||
|
} else {
|
||||||
|
stream.eatWhile(isVariableChar);
|
||||||
|
if(!stream.match(":=", false)) stream.eat(":");
|
||||||
|
}
|
||||||
|
stream.eatWhile(isVariableChar);
|
||||||
|
state.tokenize = tokenBase;
|
||||||
|
return "variable";
|
||||||
|
}
|
||||||
|
|
||||||
|
// tokenizer for XML tags
|
||||||
|
function tokenTag(name, isclose) {
|
||||||
|
return function(stream, state) {
|
||||||
|
stream.eatSpace();
|
||||||
|
if(isclose && stream.eat(">")) {
|
||||||
|
popStateStack(state);
|
||||||
|
state.tokenize = tokenBase;
|
||||||
|
return "tag";
|
||||||
|
}
|
||||||
|
// self closing tag without attributes?
|
||||||
|
if(!stream.eat("/"))
|
||||||
|
pushStateStack(state, { type: "tag", name: name, tokenize: tokenBase});
|
||||||
|
if(!stream.eat(">")) {
|
||||||
|
state.tokenize = tokenAttribute;
|
||||||
|
return "tag";
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
state.tokenize = tokenBase;
|
||||||
|
}
|
||||||
|
return "tag";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// tokenizer for XML attributes
|
||||||
|
function tokenAttribute(stream, state) {
|
||||||
|
var ch = stream.next();
|
||||||
|
|
||||||
|
if(ch == "/" && stream.eat(">")) {
|
||||||
|
if(isInXmlAttributeBlock(state)) popStateStack(state);
|
||||||
|
if(isInXmlBlock(state)) popStateStack(state);
|
||||||
|
return "tag";
|
||||||
|
}
|
||||||
|
if(ch == ">") {
|
||||||
|
if(isInXmlAttributeBlock(state)) popStateStack(state);
|
||||||
|
return "tag";
|
||||||
|
}
|
||||||
|
if(ch == "=")
|
||||||
|
return null;
|
||||||
|
// quoted string
|
||||||
|
if (ch == '"' || ch == "'")
|
||||||
|
return startString(stream, state, ch, tokenAttribute);
|
||||||
|
|
||||||
|
if(!isInXmlAttributeBlock(state))
|
||||||
|
pushStateStack(state, { type: "attribute", tokenize: tokenAttribute});
|
||||||
|
|
||||||
|
stream.eat(/[a-zA-Z_:]/);
|
||||||
|
stream.eatWhile(/[-a-zA-Z0-9_:.]/);
|
||||||
|
stream.eatSpace();
|
||||||
|
|
||||||
|
// the case where the attribute has not value and the tag was closed
|
||||||
|
if(stream.match(">", false) || stream.match("/", false)) {
|
||||||
|
popStateStack(state);
|
||||||
|
state.tokenize = tokenBase;
|
||||||
|
}
|
||||||
|
|
||||||
|
return "attribute";
|
||||||
|
}
|
||||||
|
|
||||||
|
// handle comments, including nested
|
||||||
|
function tokenXMLComment(stream, state) {
|
||||||
|
var ch;
|
||||||
|
while (ch = stream.next()) {
|
||||||
|
if (ch == "-" && stream.match("->", true)) {
|
||||||
|
state.tokenize = tokenBase;
|
||||||
|
return "comment";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// handle CDATA
|
||||||
|
function tokenCDATA(stream, state) {
|
||||||
|
var ch;
|
||||||
|
while (ch = stream.next()) {
|
||||||
|
if (ch == "]" && stream.match("]", true)) {
|
||||||
|
state.tokenize = tokenBase;
|
||||||
|
return "comment";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// handle preprocessing instructions
|
||||||
|
function tokenPreProcessing(stream, state) {
|
||||||
|
var ch;
|
||||||
|
while (ch = stream.next()) {
|
||||||
|
if (ch == "?" && stream.match(">", true)) {
|
||||||
|
state.tokenize = tokenBase;
|
||||||
|
return "processingInstruction";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// functions to test the current context of the state
|
||||||
|
function isInXmlBlock(state) { return isIn(state, "tag"); }
|
||||||
|
function isInXmlAttributeBlock(state) { return isIn(state, "attribute"); }
|
||||||
|
function isInXmlConstructor(state) { return isIn(state, "xmlconstructor"); }
|
||||||
|
|
||||||
|
function isEQNameAhead(stream) {
|
||||||
|
// assume we've already eaten a quote (")
|
||||||
|
if(stream.current() === '"')
|
||||||
|
return stream.match(/^[^\"]+\"\:/, false);
|
||||||
|
else if(stream.current() === '\'')
|
||||||
|
return stream.match(/^[^\"]+\'\:/, false);
|
||||||
|
else
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isIn(state, type) {
|
||||||
|
return (state.stack.length && state.stack[state.stack.length - 1].type == type);
|
||||||
|
}
|
||||||
|
|
||||||
|
function pushStateStack(state, newState) {
|
||||||
|
state.stack.push(newState);
|
||||||
|
}
|
||||||
|
|
||||||
|
function popStateStack(state) {
|
||||||
|
state.stack.pop();
|
||||||
|
var reinstateTokenize = state.stack.length && state.stack[state.stack.length-1].tokenize;
|
||||||
|
state.tokenize = reinstateTokenize || tokenBase;
|
||||||
|
}
|
||||||
|
|
||||||
|
// the interface for the mode API
|
||||||
|
const xQuery = {
|
||||||
|
name: "xquery",
|
||||||
|
startState: function() {
|
||||||
|
return {
|
||||||
|
tokenize: tokenBase,
|
||||||
|
cc: [],
|
||||||
|
stack: []
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
token: function(stream, state) {
|
||||||
|
if (stream.eatSpace()) return null;
|
||||||
|
var style = state.tokenize(stream, state);
|
||||||
|
return style;
|
||||||
|
},
|
||||||
|
|
||||||
|
languageData: {
|
||||||
|
commentTokens: {block: {open: "(:", close: ":)"}}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// return promise with socket map or reject if no connect
|
// return promise with socket map or reject if no connect
|
||||||
function simpleWebSocketTransport(uri) {
|
function simpleWebSocketTransport(uri) {
|
||||||
|
|
@ -30064,6 +30108,7 @@ ${text}</tr>
|
||||||
exports.StateEffect = StateEffect;
|
exports.StateEffect = StateEffect;
|
||||||
exports.baseExts = baseExts;
|
exports.baseExts = baseExts;
|
||||||
exports.debouncedChangeListener = debouncedChangeListener;
|
exports.debouncedChangeListener = debouncedChangeListener;
|
||||||
|
exports.languageServerExtensions = languageServerExtensions;
|
||||||
exports.languageServerSupport = languageServerSupport;
|
exports.languageServerSupport = languageServerSupport;
|
||||||
exports.linter = linter;
|
exports.linter = linter;
|
||||||
exports.listCommands = listCommands;
|
exports.listCommands = listCommands;
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -62,25 +62,15 @@ function connect() {
|
||||||
const file = $("iFile").value;
|
const file = $("iFile").value;
|
||||||
lsp.simpleWebSocketTransport(server)
|
lsp.simpleWebSocketTransport(server)
|
||||||
.then(transport => {
|
.then(transport => {
|
||||||
transport.subscribe(incoming);
|
|
||||||
transport.socket.onclose = (event) => connectStatus(false);
|
transport.socket.onclose = (event) => connectStatus(false);
|
||||||
transport.socket.oneror = (event) => $("msg").innerText = "sock error!";
|
transport.socket.oneror = (event) => $("msg").innerText = "sock error!";
|
||||||
client = new lsp.LSPClient().connect(transport);
|
|
||||||
|
client = new lsp.LSPClient({extensions: lsp.languageServerExtensions()});
|
||||||
|
client.connect(transport);
|
||||||
$("popConnect").hidePopover();
|
$("popConnect").hidePopover();
|
||||||
connectStatus(true);
|
connectStatus(true);
|
||||||
let extLsp = lsp.languageServerSupport(client, file, "xquery");
|
let extLsp =client.plugin( file, "xquery");
|
||||||
extLint = lsp.linter(null,{ autoPanel: true });
|
view.dispatch({ effects: lsp.StateEffect.appendConfig.of(extLsp) })
|
||||||
const doc = view.state.doc.toString();
|
|
||||||
const exts=[ extLsp, extLint,
|
|
||||||
lsp.debouncedChangeListener({
|
|
||||||
delay: 750,
|
|
||||||
onChange: (content, state) => {
|
|
||||||
console.log('Debounced change detected:'+content);
|
|
||||||
client.sync();
|
|
||||||
}})
|
|
||||||
];
|
|
||||||
view.dispatch({ effects: lsp.StateEffect.appendConfig.of(exts) })
|
|
||||||
|
|
||||||
})
|
})
|
||||||
.catch(r => { connectStatus(false); alert("connection failed: " + server) });
|
.catch(r => { connectStatus(false); alert("connection failed: " + server) });
|
||||||
|
|
||||||
|
|
@ -96,19 +86,6 @@ function connectStatus(bool) {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
function incoming(msg) {
|
|
||||||
const rpc = JSON.parse(msg);
|
|
||||||
log(rpc);
|
|
||||||
switch (rpc.method) {
|
|
||||||
case "textDocument/publishDiagnostics":
|
|
||||||
diags(rpc.params);
|
|
||||||
break;
|
|
||||||
|
|
||||||
default:
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
function log(rpc) {
|
function log(rpc) {
|
||||||
if (rpc.id) return
|
if (rpc.id) return
|
||||||
|
|
@ -118,33 +95,3 @@ function log(rpc) {
|
||||||
$("traffic").insertBefore(li, $("traffic").firstChild)
|
$("traffic").insertBefore(li, $("traffic").firstChild)
|
||||||
};
|
};
|
||||||
|
|
||||||
function diags(params) {
|
|
||||||
console.log("--", params)
|
|
||||||
let plugin = lsp.LSPPlugin.get(view);
|
|
||||||
const severities = ["error", "warning", "info", "hint"]
|
|
||||||
//
|
|
||||||
const diagnostics = params.diagnostics
|
|
||||||
.map(({ range, message, severity, code }) => ({
|
|
||||||
from: plugin.fromPosition(range.start, view.state.doc),
|
|
||||||
to: plugin.fromPosition(range.end, view.state.doc),
|
|
||||||
severity: severities[severity - 1],
|
|
||||||
message: ((typeof code === 'undefined') ? "" : `[${code}] `) + message,
|
|
||||||
}))
|
|
||||||
.filter(
|
|
||||||
({ from, to }) =>
|
|
||||||
from !== null &&
|
|
||||||
to !== null &&
|
|
||||||
from !== undefined &&
|
|
||||||
to !== undefined,
|
|
||||||
)
|
|
||||||
.sort((a, b) => {
|
|
||||||
switch (true) {
|
|
||||||
case a.from < b.from:
|
|
||||||
return -1;
|
|
||||||
case a.from > b.from:
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
});
|
|
||||||
view.dispatch(lsp.setDiagnostics(view.state, diagnostics));
|
|
||||||
};
|
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue