[mlir:LSP] Switch document sync mode to Incremental

This is much more efficient over the full mode, as it only requires sending
smalls chunks of files. It also works around a weird command ordering
issue (full document updates are being sent after other commands like
code completion) in newer versions of vscode.

Differential Revision: https://reviews.llvm.org/D126032
This commit is contained in:
River Riddle 2022-05-17 15:16:24 -07:00
parent e52a38c8f1
commit 6187178e83
12 changed files with 425 additions and 82 deletions

View File

@ -11,6 +11,8 @@
//===----------------------------------------------------------------------===//
#include "Protocol.h"
#include "Logging.h"
#include "mlir/Support/LogicalResult.h"
#include "llvm/ADT/Hashing.h"
#include "llvm/ADT/SmallString.h"
#include "llvm/ADT/StringSwitch.h"
@ -18,6 +20,7 @@
#include "llvm/Support/Format.h"
#include "llvm/Support/FormatVariadic.h"
#include "llvm/Support/JSON.h"
#include "llvm/Support/MemoryBuffer.h"
#include "llvm/Support/Path.h"
#include "llvm/Support/raw_ostream.h"
@ -462,6 +465,36 @@ bool mlir::lsp::fromJSON(const llvm::json::Value &value,
// DidChangeTextDocumentParams
//===----------------------------------------------------------------------===//
LogicalResult
TextDocumentContentChangeEvent::applyTo(std::string &contents) const {
// If there is no range, the full document changed.
if (!range) {
contents = text;
return success();
}
// Try to map the replacement range to the content.
llvm::SourceMgr tmpScrMgr;
tmpScrMgr.AddNewSourceBuffer(llvm::MemoryBuffer::getMemBuffer(contents),
SMLoc());
SMRange rangeLoc = range->getAsSMRange(tmpScrMgr);
if (!rangeLoc.isValid())
return failure();
contents.replace(rangeLoc.Start.getPointer() - contents.data(),
rangeLoc.End.getPointer() - rangeLoc.Start.getPointer(),
text);
return success();
}
LogicalResult TextDocumentContentChangeEvent::applyTo(
ArrayRef<TextDocumentContentChangeEvent> changes, std::string &contents) {
for (const auto &change : changes)
if (failed(change.applyTo(contents)))
return failure();
return success();
}
bool mlir::lsp::fromJSON(const llvm::json::Value &value,
TextDocumentContentChangeEvent &result,
llvm::json::Path path) {

View File

@ -35,6 +35,8 @@
#include <vector>
namespace mlir {
struct LogicalResult;
namespace lsp {
enum class ErrorCode {
@ -322,6 +324,18 @@ struct Range {
bool contains(Range range) const {
return start <= range.start && range.end <= end;
}
/// Convert this range into a source range in the main file of the given
/// source manager.
SMRange getAsSMRange(llvm::SourceMgr &mgr) const {
SMLoc startLoc = start.getAsSMLoc(mgr);
SMLoc endLoc = end.getAsSMLoc(mgr);
// Check that the start and end locations are valid.
if (!startLoc.isValid() || !endLoc.isValid() ||
startLoc.getPointer() > endLoc.getPointer())
return SMRange();
return SMRange(startLoc, endLoc);
}
};
/// Add support for JSON serialization.
@ -431,6 +445,12 @@ bool fromJSON(const llvm::json::Value &value,
//===----------------------------------------------------------------------===//
struct TextDocumentContentChangeEvent {
/// Try to apply this change to the given contents string.
LogicalResult applyTo(std::string &contents) const;
/// Try to apply a set of changes to the given contents string.
static LogicalResult applyTo(ArrayRef<TextDocumentContentChangeEvent> changes,
std::string &contents);
/// The range of the document that changed.
Optional<Range> range;

View File

@ -115,7 +115,7 @@ void LSPServer::onInitialize(const InitializeParams &params,
{"textDocumentSync",
llvm::json::Object{
{"openClose", true},
{"change", (int)TextDocumentSyncKind::Full},
{"change", (int)TextDocumentSyncKind::Incremental},
{"save", true},
}},
{"completionProvider",
@ -160,9 +160,8 @@ void LSPServer::onShutdown(const NoParams &, Callback<std::nullptr_t> reply) {
void LSPServer::onDocumentDidOpen(const DidOpenTextDocumentParams &params) {
PublishDiagnosticsParams diagParams(params.textDocument.uri,
params.textDocument.version);
server.addOrUpdateDocument(params.textDocument.uri, params.textDocument.text,
params.textDocument.version,
diagParams.diagnostics);
server.addDocument(params.textDocument.uri, params.textDocument.text,
params.textDocument.version, diagParams.diagnostics);
// Publish any recorded diagnostics.
publishDiagnostics(diagParams);
@ -179,15 +178,10 @@ void LSPServer::onDocumentDidClose(const DidCloseTextDocumentParams &params) {
PublishDiagnosticsParams(params.textDocument.uri, *version));
}
void LSPServer::onDocumentDidChange(const DidChangeTextDocumentParams &params) {
// TODO: We currently only support full document updates, we should refactor
// to avoid this.
if (params.contentChanges.size() != 1)
return;
PublishDiagnosticsParams diagParams(params.textDocument.uri,
params.textDocument.version);
server.addOrUpdateDocument(
params.textDocument.uri, params.contentChanges.front().text,
params.textDocument.version, diagParams.diagnostics);
server.updateDocument(params.textDocument.uri, params.contentChanges,
params.textDocument.version, diagParams.diagnostics);
// Publish any recorded diagnostics.
publishDiagnostics(diagParams);

View File

@ -1248,6 +1248,12 @@ public:
/// Return the current version of this text file.
int64_t getVersion() const { return version; }
/// Update the file to the new version using the provided set of content
/// changes. Returns failure if the update was unsuccessful.
LogicalResult update(const lsp::URIForFile &uri, int64_t newVersion,
ArrayRef<lsp::TextDocumentContentChangeEvent> changes,
std::vector<lsp::Diagnostic> &diagnostics);
//===--------------------------------------------------------------------===//
// LSP Queries
//===--------------------------------------------------------------------===//
@ -1268,6 +1274,10 @@ public:
lsp::PDLLViewOutputResult getPDLLViewOutput(lsp::PDLLViewOutputKind kind);
private:
/// Initialize the text file from the given file contents.
void initialize(const lsp::URIForFile &uri, int64_t newVersion,
std::vector<lsp::Diagnostic> &diagnostics);
/// Find the PDL document that contains the given position, and update the
/// position to be anchored at the start of the found chunk instead of the
/// beginning of the file.
@ -1277,7 +1287,7 @@ private:
std::string contents;
/// The version of this file.
int64_t version;
int64_t version = 0;
/// The number of lines in the file.
int64_t totalNumLines = 0;
@ -1285,6 +1295,9 @@ private:
/// The chunks of this file. The order of these chunks is the order in which
/// they appear in the text file.
std::vector<std::unique_ptr<PDLTextFileChunk>> chunks;
/// The extra set of include directories for this file.
std::vector<std::string> extraIncludeDirs;
};
} // namespace
@ -1292,38 +1305,22 @@ PDLTextFile::PDLTextFile(const lsp::URIForFile &uri, StringRef fileContents,
int64_t version,
const std::vector<std::string> &extraDirs,
std::vector<lsp::Diagnostic> &diagnostics)
: contents(fileContents.str()), version(version) {
// Split the file into separate PDL documents.
// TODO: Find a way to share the split file marker with other tools. We don't
// want to use `splitAndProcessBuffer` here, but we do want to make sure this
// marker doesn't go out of sync.
SmallVector<StringRef, 8> subContents;
StringRef(contents).split(subContents, "// -----");
chunks.emplace_back(std::make_unique<PDLTextFileChunk>(
/*lineOffset=*/0, uri, subContents.front(), extraDirs, diagnostics));
: contents(fileContents.str()), extraIncludeDirs(extraDirs) {
initialize(uri, version, diagnostics);
}
uint64_t lineOffset = subContents.front().count('\n');
for (StringRef docContents : llvm::drop_begin(subContents)) {
unsigned currentNumDiags = diagnostics.size();
auto chunk = std::make_unique<PDLTextFileChunk>(
lineOffset, uri, docContents, extraDirs, diagnostics);
lineOffset += docContents.count('\n');
// Adjust locations used in diagnostics to account for the offset from the
// beginning of the file.
for (lsp::Diagnostic &diag :
llvm::drop_begin(diagnostics, currentNumDiags)) {
chunk->adjustLocForChunkOffset(diag.range);
if (!diag.relatedInformation)
continue;
for (auto &it : *diag.relatedInformation)
if (it.location.uri == uri)
chunk->adjustLocForChunkOffset(it.location.range);
}
chunks.emplace_back(std::move(chunk));
LogicalResult
PDLTextFile::update(const lsp::URIForFile &uri, int64_t newVersion,
ArrayRef<lsp::TextDocumentContentChangeEvent> changes,
std::vector<lsp::Diagnostic> &diagnostics) {
if (failed(lsp::TextDocumentContentChangeEvent::applyTo(changes, contents))) {
lsp::Logger::error("Failed to update contents of {0}", uri.file());
return failure();
}
totalNumLines = lineOffset;
// If the file contents were properly changed, reinitialize the text file.
initialize(uri, newVersion, diagnostics);
return success();
}
void PDLTextFile::getLocationsOf(const lsp::URIForFile &uri,
@ -1454,6 +1451,45 @@ PDLTextFile::getPDLLViewOutput(lsp::PDLLViewOutputKind kind) {
return result;
}
void PDLTextFile::initialize(const lsp::URIForFile &uri, int64_t newVersion,
std::vector<lsp::Diagnostic> &diagnostics) {
version = newVersion;
chunks.clear();
// Split the file into separate PDL documents.
// TODO: Find a way to share the split file marker with other tools. We don't
// want to use `splitAndProcessBuffer` here, but we do want to make sure this
// marker doesn't go out of sync.
SmallVector<StringRef, 8> subContents;
StringRef(contents).split(subContents, "// -----");
chunks.emplace_back(std::make_unique<PDLTextFileChunk>(
/*lineOffset=*/0, uri, subContents.front(), extraIncludeDirs,
diagnostics));
uint64_t lineOffset = subContents.front().count('\n');
for (StringRef docContents : llvm::drop_begin(subContents)) {
unsigned currentNumDiags = diagnostics.size();
auto chunk = std::make_unique<PDLTextFileChunk>(
lineOffset, uri, docContents, extraIncludeDirs, diagnostics);
lineOffset += docContents.count('\n');
// Adjust locations used in diagnostics to account for the offset from the
// beginning of the file.
for (lsp::Diagnostic &diag :
llvm::drop_begin(diagnostics, currentNumDiags)) {
chunk->adjustLocForChunkOffset(diag.range);
if (!diag.relatedInformation)
continue;
for (auto &it : *diag.relatedInformation)
if (it.location.uri == uri)
chunk->adjustLocForChunkOffset(it.location.range);
}
chunks.emplace_back(std::move(chunk));
}
totalNumLines = lineOffset;
}
PDLTextFileChunk &PDLTextFile::getChunkFor(lsp::Position &pos) {
if (chunks.size() == 1)
return *chunks.front();
@ -1496,9 +1532,9 @@ lsp::PDLLServer::PDLLServer(const Options &options)
: impl(std::make_unique<Impl>(options)) {}
lsp::PDLLServer::~PDLLServer() = default;
void lsp::PDLLServer::addOrUpdateDocument(
const URIForFile &uri, StringRef contents, int64_t version,
std::vector<Diagnostic> &diagnostics) {
void lsp::PDLLServer::addDocument(const URIForFile &uri, StringRef contents,
int64_t version,
std::vector<Diagnostic> &diagnostics) {
// Build the set of additional include directories.
std::vector<std::string> additionalIncludeDirs = impl->options.extraDirs;
const auto &fileInfo = impl->compilationDatabase.getFileInfo(uri.file());
@ -1508,6 +1544,20 @@ void lsp::PDLLServer::addOrUpdateDocument(
uri, contents, version, additionalIncludeDirs, diagnostics);
}
void lsp::PDLLServer::updateDocument(
const URIForFile &uri, ArrayRef<TextDocumentContentChangeEvent> changes,
int64_t version, std::vector<Diagnostic> &diagnostics) {
// Check that we actually have a document for this uri.
auto it = impl->files.find(uri.file());
if (it == impl->files.end())
return;
// Try to update the document. If we fail, erase the file from the server. A
// failed updated generally means we've fallen out of sync somewhere.
if (failed(it->second->update(uri, version, changes, diagnostics)))
impl->files.erase(it);
}
Optional<int64_t> lsp::PDLLServer::removeDocument(const URIForFile &uri) {
auto it = impl->files.find(uri.file());
if (it == impl->files.end())

View File

@ -27,6 +27,7 @@ struct Hover;
struct Location;
struct Position;
struct SignatureHelp;
struct TextDocumentContentChangeEvent;
class URIForFile;
/// This class implements all of the PDLL related functionality necessary for a
@ -50,12 +51,16 @@ public:
PDLLServer(const Options &options);
~PDLLServer();
/// Add or update the document, with the provided `version`, at the given URI.
/// Any diagnostics emitted for this document should be added to
/// `diagnostics`.
void addOrUpdateDocument(const URIForFile &uri, StringRef contents,
int64_t version,
std::vector<Diagnostic> &diagnostics);
/// Add the document, with the provided `version`, at the given URI. Any
/// diagnostics emitted for this document should be added to `diagnostics`.
void addDocument(const URIForFile &uri, StringRef contents, int64_t version,
std::vector<Diagnostic> &diagnostics);
/// Update the document, with the provided `version`, at the given URI. Any
/// diagnostics emitted for this document should be added to `diagnostics`.
void updateDocument(const URIForFile &uri,
ArrayRef<TextDocumentContentChangeEvent> changes,
int64_t version, std::vector<Diagnostic> &diagnostics);
/// Remove the document with the given uri. Returns the version of the removed
/// document, or None if the uri did not have a corresponding document within

View File

@ -89,7 +89,7 @@ void LSPServer::onInitialize(const InitializeParams &params,
{"textDocumentSync",
llvm::json::Object{
{"openClose", true},
{"change", (int)TextDocumentSyncKind::Full},
{"change", (int)TextDocumentSyncKind::Incremental},
{"save", true},
}},
{"definitionProvider", true},
@ -119,9 +119,8 @@ void LSPServer::onShutdown(const NoParams &, Callback<std::nullptr_t> reply) {
void LSPServer::onDocumentDidOpen(const DidOpenTextDocumentParams &params) {
PublishDiagnosticsParams diagParams(params.textDocument.uri,
params.textDocument.version);
server.addOrUpdateDocument(params.textDocument.uri, params.textDocument.text,
params.textDocument.version,
diagParams.diagnostics);
server.addDocument(params.textDocument.uri, params.textDocument.text,
params.textDocument.version, diagParams.diagnostics);
// Publish any recorded diagnostics.
publishDiagnostics(diagParams);
@ -138,15 +137,10 @@ void LSPServer::onDocumentDidClose(const DidCloseTextDocumentParams &params) {
PublishDiagnosticsParams(params.textDocument.uri, *version));
}
void LSPServer::onDocumentDidChange(const DidChangeTextDocumentParams &params) {
// TODO: We currently only support full document updates, we should refactor
// to avoid this.
if (params.contentChanges.size() != 1)
return;
PublishDiagnosticsParams diagParams(params.textDocument.uri,
params.textDocument.version);
server.addOrUpdateDocument(
params.textDocument.uri, params.contentChanges.front().text,
params.textDocument.version, diagParams.diagnostics);
server.updateDocument(params.textDocument.uri, params.contentChanges,
params.textDocument.version, diagParams.diagnostics);
// Publish any recorded diagnostics.
publishDiagnostics(diagParams);

View File

@ -12,6 +12,7 @@
#include "../lsp-server-support/Logging.h"
#include "../lsp-server-support/Protocol.h"
#include "../lsp-server-support/SourceMgrUtils.h"
#include "mlir/Support/LogicalResult.h"
#include "llvm/ADT/IntervalMap.h"
#include "llvm/ADT/PointerUnion.h"
#include "llvm/ADT/StringMap.h"
@ -244,6 +245,12 @@ public:
/// Return the current version of this text file.
int64_t getVersion() const { return version; }
/// Update the file to the new version using the provided set of content
/// changes. Returns failure if the update was unsuccessful.
LogicalResult update(const lsp::URIForFile &uri, int64_t newVersion,
ArrayRef<lsp::TextDocumentContentChangeEvent> changes,
std::vector<lsp::Diagnostic> &diagnostics);
//===--------------------------------------------------------------------===//
// Definitions and References
//===--------------------------------------------------------------------===//
@ -268,6 +275,10 @@ public:
const lsp::Position &hoverPos);
private:
/// Initialize the text file from the given file contents.
void initialize(const lsp::URIForFile &uri, int64_t newVersion,
std::vector<lsp::Diagnostic> &diagnostics);
/// The full string contents of the file.
std::string contents;
@ -281,7 +292,7 @@ private:
llvm::SourceMgr sourceMgr;
/// The record keeper containing the parsed tablegen constructs.
llvm::RecordKeeper recordKeeper;
std::unique_ptr<llvm::RecordKeeper> recordKeeper;
/// The index of the parsed file.
TableGenIndex index;
@ -296,12 +307,6 @@ TableGenTextFile::TableGenTextFile(
const std::vector<std::string> &extraIncludeDirs,
std::vector<lsp::Diagnostic> &diagnostics)
: contents(fileContents.str()), version(version) {
auto memBuffer = llvm::MemoryBuffer::getMemBufferCopy(contents, uri.file());
if (!memBuffer) {
lsp::Logger::error("Failed to create memory buffer for file", uri.file());
return;
}
// Build the set of include directories for this file.
llvm::SmallString<32> uriDirectory(uri.file());
llvm::sys::path::remove_filename(uriDirectory);
@ -309,6 +314,37 @@ TableGenTextFile::TableGenTextFile(
includeDirs.insert(includeDirs.end(), extraIncludeDirs.begin(),
extraIncludeDirs.end());
// Initialize the file.
initialize(uri, version, diagnostics);
}
LogicalResult
TableGenTextFile::update(const lsp::URIForFile &uri, int64_t newVersion,
ArrayRef<lsp::TextDocumentContentChangeEvent> changes,
std::vector<lsp::Diagnostic> &diagnostics) {
if (failed(lsp::TextDocumentContentChangeEvent::applyTo(changes, contents))) {
lsp::Logger::error("Failed to update contents of {0}", uri.file());
return failure();
}
// If the file contents were properly changed, reinitialize the text file.
initialize(uri, newVersion, diagnostics);
return success();
}
void TableGenTextFile::initialize(const lsp::URIForFile &uri,
int64_t newVersion,
std::vector<lsp::Diagnostic> &diagnostics) {
version = newVersion;
sourceMgr = llvm::SourceMgr();
recordKeeper = std::make_unique<llvm::RecordKeeper>();
// Build a buffer for this file.
auto memBuffer = llvm::MemoryBuffer::getMemBuffer(contents, uri.file());
if (!memBuffer) {
lsp::Logger::error("Failed to create memory buffer for file", uri.file());
return;
}
sourceMgr.setIncludeDirs(includeDirs);
sourceMgr.AddNewSourceBuffer(std::move(memBuffer), SMLoc());
@ -327,7 +363,7 @@ TableGenTextFile::TableGenTextFile(
ctx->diagnostics.push_back(*lspDiag);
},
&handlerContext);
bool failedToParse = llvm::TableGenParseFile(sourceMgr, recordKeeper);
bool failedToParse = llvm::TableGenParseFile(sourceMgr, *recordKeeper);
// Process all of the include files.
lsp::gatherIncludeFiles(sourceMgr, parsedIncludes);
@ -335,7 +371,7 @@ TableGenTextFile::TableGenTextFile(
return;
// If we successfully parsed the file, we can now build the index.
index.initialize(recordKeeper);
index.initialize(*recordKeeper);
}
//===----------------------------------------------------------------------===//
@ -417,9 +453,9 @@ lsp::TableGenServer::TableGenServer(const Options &options)
: impl(std::make_unique<Impl>(options)) {}
lsp::TableGenServer::~TableGenServer() = default;
void lsp::TableGenServer::addOrUpdateDocument(
const URIForFile &uri, StringRef contents, int64_t version,
std::vector<Diagnostic> &diagnostics) {
void lsp::TableGenServer::addDocument(const URIForFile &uri, StringRef contents,
int64_t version,
std::vector<Diagnostic> &diagnostics) {
// Build the set of additional include directories.
std::vector<std::string> additionalIncludeDirs = impl->options.extraDirs;
const auto &fileInfo = impl->compilationDatabase.getFileInfo(uri.file());
@ -429,6 +465,20 @@ void lsp::TableGenServer::addOrUpdateDocument(
uri, contents, version, additionalIncludeDirs, diagnostics);
}
void lsp::TableGenServer::updateDocument(
const URIForFile &uri, ArrayRef<TextDocumentContentChangeEvent> changes,
int64_t version, std::vector<Diagnostic> &diagnostics) {
// Check that we actually have a document for this uri.
auto it = impl->files.find(uri.file());
if (it == impl->files.end())
return;
// Try to update the document. If we fail, erase the file from the server. A
// failed updated generally means we've fallen out of sync somewhere.
if (failed(it->second->update(uri, version, changes, diagnostics)))
impl->files.erase(it);
}
Optional<int64_t> lsp::TableGenServer::removeDocument(const URIForFile &uri) {
auto it = impl->files.find(uri.file());
if (it == impl->files.end())

View File

@ -21,6 +21,7 @@ struct DocumentLink;
struct Hover;
struct Location;
struct Position;
struct TextDocumentContentChangeEvent;
class URIForFile;
/// This class implements all of the TableGen related functionality necessary
@ -44,12 +45,16 @@ public:
TableGenServer(const Options &options);
~TableGenServer();
/// Add or update the document, with the provided `version`, at the given URI.
/// Any diagnostics emitted for this document should be added to
/// `diagnostics`.
void addOrUpdateDocument(const URIForFile &uri, StringRef contents,
int64_t version,
std::vector<Diagnostic> &diagnostics);
/// Add the document, with the provided `version`, at the given URI. Any
/// diagnostics emitted for this document should be added to `diagnostics`.
void addDocument(const URIForFile &uri, StringRef contents, int64_t version,
std::vector<Diagnostic> &diagnostics);
/// Update the document, with the provided `version`, at the given URI. Any
/// diagnostics emitted for this document should be added to `diagnostics`.
void updateDocument(const URIForFile &uri,
ArrayRef<TextDocumentContentChangeEvent> changes,
int64_t version, std::vector<Diagnostic> &diagnostics);
/// Remove the document with the given uri. Returns the version of the removed
/// document, or None if the uri did not have a corresponding document within

View File

@ -26,7 +26,7 @@
// CHECK-NEXT: ]
// CHECK-NEXT: },
// CHECK-NEXT: "textDocumentSync": {
// CHECK-NEXT: "change": 1,
// CHECK-NEXT: "change": 2,
// CHECK-NEXT: "openClose": true,
// CHECK-NEXT: "save": true
// CHECK-NEXT: }

View File

@ -0,0 +1,96 @@
// RUN: mlir-pdll-lsp-server -lit-test < %s | FileCheck -strict-whitespace %s
{"jsonrpc":"2.0","id":0,"method":"initialize","params":{"processId":123,"rootPath":"pdll","capabilities":{},"trace":"off"}}
// -----
{"jsonrpc":"2.0","method":"textDocument/didOpen","params":{"textDocument":{
"uri":"test:///foo.pdll",
"languageId":"pdll",
"version":1,
"text":"Pattern => replace with ;"
}}}
// CHECK: "method": "textDocument/publishDiagnostics",
// CHECK-NEXT: "params": {
// CHECK-NEXT: "diagnostics": [
// CHECK-NEXT: {
// CHECK-NEXT: "category": "Parse Error",
// CHECK-NEXT: "message": "expected expression",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "end": {
// CHECK-NEXT: "character": 23,
// CHECK-NEXT: "line": 0
// CHECK-NEXT: },
// CHECK-NEXT: "start": {
// CHECK-NEXT: "character": 19,
// CHECK-NEXT: "line": 0
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "severity": 1,
// CHECK-NEXT: "source": "pdll"
// CHECK-NEXT: }
// CHECK-NEXT: ],
// CHECK-NEXT: "uri": "test:///foo.pdll",
// CHECK-NEXT: "version": 1
// CHECK-NEXT: }
// -----
{"jsonrpc":"2.0","method":"textDocument/didChange","params":{"textDocument":{
"uri":"test:///foo.pdll",
"version":2
}, "contentChanges": [{
"range":{
"start":{"line":0,"character":18},
"end":{"line":0,"character":18}
},
"text": " op<test.op>"
}]}}
// CHECK: "method": "textDocument/publishDiagnostics",
// CHECK-NEXT: "params": {
// CHECK-NEXT: "diagnostics": [
// CHECK-NEXT: {
// CHECK-NEXT: "category": "Parse Error",
// CHECK-NEXT: "message": "expected expression",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "end": {
// CHECK-NEXT: "character": 37,
// CHECK-NEXT: "line": 0
// CHECK-NEXT: },
// CHECK-NEXT: "start": {
// CHECK-NEXT: "character": 36,
// CHECK-NEXT: "line": 0
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "severity": 1,
// CHECK-NEXT: "source": "pdll"
// CHECK-NEXT: }
// CHECK-NEXT: ],
// CHECK-NEXT: "uri": "test:///foo.pdll",
// CHECK-NEXT: "version": 2
// CHECK-NEXT: }
// -----
{"jsonrpc":"2.0","method":"textDocument/didChange","params":{"textDocument":{
"uri":"test:///foo.pdll",
"version":3
}, "contentChanges": [
{
"range":{
"start":{"line":0,"character":30},
"end":{"line":0,"character":30}
},
"text": "(values: ValueRange)"
},
{
"range":{
"start":{"line":0,"character":56},
"end":{"line":0,"character":57}
},
"text": "values;"
}
]}}
// CHECK: "method": "textDocument/publishDiagnostics",
// CHECK-NEXT: "params": {
// CHECK-NEXT: "diagnostics": [],
// CHECK-NEXT: "uri": "test:///foo.pdll",
// CHECK-NEXT: "version": 3
// CHECK-NEXT: }
// -----
{"jsonrpc":"2.0","id":3,"method":"shutdown"}
// -----
{"jsonrpc":"2.0","method":"exit"}

View File

@ -12,7 +12,7 @@
// CHECK-NEXT: "hoverProvider": true,
// CHECK-NEXT: "referencesProvider": true,
// CHECK-NEXT: "textDocumentSync": {
// CHECK-NEXT: "change": 1,
// CHECK-NEXT: "change": 2,
// CHECK-NEXT: "openClose": true,
// CHECK-NEXT: "save": true
// CHECK-NEXT: }

View File

@ -0,0 +1,96 @@
// RUN: tblgen-lsp-server -lit-test < %s | FileCheck -strict-whitespace %s
{"jsonrpc":"2.0","id":0,"method":"initialize","params":{"processId":123,"rootPath":"tablegen","capabilities":{},"trace":"off"}}
// -----
{"jsonrpc":"2.0","method":"textDocument/didOpen","params":{"textDocument":{
"uri":"test:///foo.td",
"languageId":"tablegen",
"version":1,
"text":"class Foo<>;"
}}}
// CHECK: "method": "textDocument/publishDiagnostics",
// CHECK-NEXT: "params": {
// CHECK-NEXT: "diagnostics": [
// CHECK-NEXT: {
// CHECK-NEXT: "category": "Parse Error",
// CHECK-NEXT: "message": "Unknown token when expecting a type",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "end": {
// CHECK-NEXT: "character": 11,
// CHECK-NEXT: "line": 0
// CHECK-NEXT: },
// CHECK-NEXT: "start": {
// CHECK-NEXT: "character": 10,
// CHECK-NEXT: "line": 0
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "severity": 1,
// CHECK-NEXT: "source": "tablegen"
// CHECK-NEXT: }
// CHECK-NEXT: ],
// CHECK-NEXT: "uri": "test:///foo.td",
// CHECK-NEXT: "version": 1
// CHECK-NEXT: }
// -----
{"jsonrpc":"2.0","method":"textDocument/didChange","params":{"textDocument":{
"uri":"test:///foo.td",
"version":2
}, "contentChanges": [{
"range":{
"start":{"line":0,"character":10},
"end":{"line":0,"character":10}
},
"text": "int"
}]}}
// CHECK: "method": "textDocument/publishDiagnostics",
// CHECK-NEXT: "params": {
// CHECK-NEXT: "diagnostics": [
// CHECK-NEXT: {
// CHECK-NEXT: "category": "Parse Error",
// CHECK-NEXT: "message": "Expected identifier in declaration",
// CHECK-NEXT: "range": {
// CHECK-NEXT: "end": {
// CHECK-NEXT: "character": 14,
// CHECK-NEXT: "line": 0
// CHECK-NEXT: },
// CHECK-NEXT: "start": {
// CHECK-NEXT: "character": 13,
// CHECK-NEXT: "line": 0
// CHECK-NEXT: }
// CHECK-NEXT: },
// CHECK-NEXT: "severity": 1,
// CHECK-NEXT: "source": "tablegen"
// CHECK-NEXT: }
// CHECK-NEXT: ],
// CHECK-NEXT: "uri": "test:///foo.td",
// CHECK-NEXT: "version": 2
// CHECK-NEXT: }
// -----
{"jsonrpc":"2.0","method":"textDocument/didChange","params":{"textDocument":{
"uri":"test:///foo.td",
"version":3
}, "contentChanges": [
{
"range":{
"start":{"line":0,"character":13},
"end":{"line":0,"character":13}
},
"text": " i"
},
{
"range":{
"start":{"line":0,"character":15},
"end":{"line":0,"character":17}
},
"text": "> { int x = i; }"
}
]}}
// CHECK: "method": "textDocument/publishDiagnostics",
// CHECK-NEXT: "params": {
// CHECK-NEXT: "diagnostics": [],
// CHECK-NEXT: "uri": "test:///foo.td",
// CHECK-NEXT: "version": 3
// CHECK-NEXT: }
// -----
{"jsonrpc":"2.0","id":3,"method":"shutdown"}
// -----
{"jsonrpc":"2.0","method":"exit"}