@abaplint/core
Version:
abaplint - Core API
133 lines • 6.18 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.SemanticHighlighting = void 0;
const LServer = require("vscode-languageserver-types");
const position_1 = require("../position");
const virtual_position_1 = require("../virtual_position");
const tokens_1 = require("../abap/1_lexer/tokens");
const nodes_1 = require("../abap/nodes");
const Statements = require("../abap/2_statements/statements");
const _lsp_utils_1 = require("./_lsp_utils");
const SOURCE_ABAP = "source.abap";
const BLOCK_ABAP = "storage.type.block.abap";
class SemanticHighlighting {
constructor(reg) {
this.reg = reg;
SemanticHighlighting.initLegend();
}
static semanticTokensLegend() {
// https://code.visualstudio.com/api/language-extensions/semantic-highlight-guide#semantic-token-scope-map
// https://microsoft.github.io/language-server-protocol/specifications/specification-3-17/#semanticTokenTypes
this.initLegend();
return {
tokenTypes: SemanticHighlighting.tokenTypes,
tokenModifiers: [],
};
}
static initLegend() {
if (SemanticHighlighting.tokenTypes.length === 0) {
SemanticHighlighting.tokenTypeMap = {};
SemanticHighlighting.tokenTypeMap[SOURCE_ABAP] = SemanticHighlighting.tokenTypes.length;
SemanticHighlighting.tokenTypes.push(SOURCE_ABAP);
SemanticHighlighting.tokenTypeMap[BLOCK_ABAP] = SemanticHighlighting.tokenTypes.length;
SemanticHighlighting.tokenTypes.push(BLOCK_ABAP);
for (const t in LServer.SemanticTokenTypes) {
SemanticHighlighting.tokenTypeMap[t] = SemanticHighlighting.tokenTypes.length;
SemanticHighlighting.tokenTypes.push(t);
}
}
}
// https://microsoft.github.io/language-server-protocol/specifications/specification-3-17/#textDocument_semanticTokens
semanticTokensRange(range) {
const file = _lsp_utils_1.LSPUtils.getABAPFile(this.reg, range.textDocument.uri);
if (file === undefined) {
return { data: [] };
}
const rangeStartPosition = new position_1.Position(range.start.line + 1, range.start.character + 1);
const rangeEndPosition = new position_1.Position(range.end.line + 1, range.end.character + 1);
const tokens = [];
for (const s of file.getStatements()) {
if (s.getFirstToken().getStart() instanceof virtual_position_1.VirtualPosition) {
continue;
}
else if (s.getFirstToken().getStart().isAfter(rangeEndPosition)) {
break;
}
else if (s.getLastToken().getEnd().isBefore(rangeStartPosition)) {
continue;
}
const statementInstance = s.get();
for (const t of s.getTokenNodes()) {
const tokenInstance = t.get();
let tokenType = LServer.SemanticTokenTypes.keyword;
if (tokenInstance instanceof tokens_1.Punctuation) {
tokenType = SOURCE_ABAP;
}
else if (statementInstance instanceof Statements.Public
|| statementInstance instanceof Statements.Private
|| statementInstance instanceof Statements.Protected
|| statementInstance instanceof Statements.ClassDefinition
|| statementInstance instanceof Statements.ClassImplementation
|| statementInstance instanceof Statements.MethodImplementation
|| statementInstance instanceof Statements.EndMethod
|| statementInstance instanceof Statements.EndClass
|| statementInstance instanceof Statements.Interface
|| statementInstance instanceof Statements.EndInterface
|| statementInstance instanceof Statements.Form
|| statementInstance instanceof Statements.EndForm) {
tokenType = BLOCK_ABAP;
}
else if (tokenInstance instanceof tokens_1.StringToken
|| tokenInstance instanceof tokens_1.StringTemplate
|| tokenInstance instanceof tokens_1.StringTemplateBegin
|| tokenInstance instanceof tokens_1.StringTemplateEnd
|| tokenInstance instanceof tokens_1.StringTemplateMiddle) {
tokenType = LServer.SemanticTokenTypes.string;
}
else if (tokenInstance instanceof tokens_1.Comment) {
tokenType = LServer.SemanticTokenTypes.comment;
}
else if (t instanceof nodes_1.TokenNodeRegex) {
tokenType = SOURCE_ABAP;
}
const token = t.getFirstToken();
tokens.push({
line: token.getStart().getRow() - 1,
startChar: token.getStart().getCol() - 1,
length: token.getStr().length,
tokenType: tokenType,
tokenModifiers: [],
});
}
}
return { data: this.encodeTokens(tokens) };
}
encodeTokens(tokens) {
const ret = [];
let prevLine = undefined;
let prevChar = undefined;
for (const t of tokens) {
if (prevLine === undefined) {
ret.push(t.line);
}
else {
ret.push(t.line - prevLine);
}
if (prevLine === t.line && prevChar) {
ret.push(t.startChar - prevChar);
}
else {
ret.push(t.startChar); // todo, delta?
}
ret.push(t.length);
ret.push(SemanticHighlighting.tokenTypeMap[t.tokenType]);
ret.push(0); // no modifier logic implemented yet
prevLine = t.line;
prevChar = t.startChar;
}
return ret;
}
}
exports.SemanticHighlighting = SemanticHighlighting;
SemanticHighlighting.tokenTypes = [];
//# sourceMappingURL=semantic.js.map