125 lines
3.0 KiB
Plaintext
125 lines
3.0 KiB
Plaintext
// Document analysis module.
|
|
// Call make(tokenize_mod, parse_mod, index_mod) to get an analysis object.
|
|
|
|
var json = use('json')
|
|
|
|
// Create an analysis module bound to the tokenize, parse, and index functions.
|
|
var make = function(tokenize_mod, parse_mod, index_mod) {
|
|
|
|
// Tokenize and parse a document, storing the results.
|
|
var update = function(docs, uri, params) {
|
|
var src = params.src
|
|
var version = params.version
|
|
var tok_result = null
|
|
var ast = null
|
|
var errors = []
|
|
var doc = null
|
|
|
|
var do_tokenize = function() {
|
|
tok_result = tokenize_mod(src, uri)
|
|
} disruption {
|
|
errors = [{message: "Tokenize failed", line: 1, column: 1}]
|
|
}
|
|
var do_parse = function() {
|
|
ast = parse_mod(tok_result.tokens, src, uri, tokenize_mod)
|
|
} disruption {
|
|
// parse_mod may set errors on ast even on partial failure
|
|
}
|
|
|
|
do_tokenize()
|
|
|
|
if (tok_result != null) {
|
|
do_parse()
|
|
|
|
if (ast != null && ast.errors != null) {
|
|
errors = ast.errors
|
|
}
|
|
}
|
|
|
|
var idx = null
|
|
var do_index = function() {
|
|
idx = index_mod.index_ast(ast, (tok_result != null) ? tok_result.tokens : [], uri)
|
|
} disruption {
|
|
// indexing failure is non-fatal
|
|
}
|
|
if (ast != null && index_mod != null) {
|
|
do_index()
|
|
}
|
|
|
|
doc = {
|
|
uri: uri,
|
|
text: src,
|
|
version: version,
|
|
tokens: (tok_result != null) ? tok_result.tokens : [],
|
|
ast: ast,
|
|
errors: errors,
|
|
index: idx
|
|
}
|
|
docs[uri] = doc
|
|
return doc
|
|
}
|
|
|
|
// Remove a document from the store.
|
|
var remove = function(docs, uri) {
|
|
delete docs[uri]
|
|
}
|
|
|
|
// Convert parse errors to LSP diagnostics.
|
|
var diagnostics = function(doc) {
|
|
var result = []
|
|
var _i = 0
|
|
var e = null
|
|
var line = null
|
|
var col = null
|
|
while (_i < length(doc.errors)) {
|
|
e = doc.errors[_i]
|
|
line = (e.line != null) ? e.line - 1 : 0
|
|
col = (e.column != null) ? e.column - 1 : 0
|
|
result[] = {
|
|
range: {
|
|
start: {line: line, character: col},
|
|
end: {line: line, character: col + 1}
|
|
},
|
|
severity: 1,
|
|
source: "pit",
|
|
message: e.message
|
|
}
|
|
_i = _i + 1
|
|
}
|
|
return result
|
|
}
|
|
|
|
// Find the token at a given line/column (0-based).
|
|
var token_at = function(doc, line, col) {
|
|
var tokens = doc.tokens
|
|
var _i = 0
|
|
var tok = null
|
|
while (_i < length(tokens)) {
|
|
tok = tokens[_i]
|
|
if (tok.from_row == line && tok.from_column <= col && tok.to_column >= col) {
|
|
return tok
|
|
}
|
|
if (tok.from_row < line && tok.to_row > line) {
|
|
return tok
|
|
}
|
|
if (tok.from_row < line && tok.to_row == line && tok.to_column >= col) {
|
|
return tok
|
|
}
|
|
if (tok.from_row == line && tok.to_row > line && tok.from_column <= col) {
|
|
return tok
|
|
}
|
|
_i = _i + 1
|
|
}
|
|
return null
|
|
}
|
|
|
|
return {
|
|
update: update,
|
|
remove: remove,
|
|
diagnostics: diagnostics,
|
|
token_at: token_at
|
|
}
|
|
}
|
|
|
|
return make
|