Skip to content

Commit

Permalink
libnixf: remove DiagnosticEngine (#302)
Browse files Browse the repository at this point in the history
  • Loading branch information
inclyc authored Jan 19, 2024
1 parent ff6c872 commit 62d3a0a
Show file tree
Hide file tree
Showing 8 changed files with 133 additions and 159 deletions.
6 changes: 3 additions & 3 deletions libnixf/include/nixf/Basic/Diagnostic.h
Original file line number Diff line number Diff line change
Expand Up @@ -142,10 +142,10 @@ class Diagnostic : public PartialDiagnostic {
[[nodiscard]] virtual const char *sname() const { return sname(kind()); }

Note &note(Note::NoteKind Kind, RangeTy Range) {
return *Notes.emplace_back(std::make_unique<Note>(Kind, Range));
return Notes.emplace_back(Kind, Range);
}

std::vector<std::unique_ptr<Note>> &notes() { return Notes; }
std::vector<Note> &notes() { return Notes; }

Diagnostic &fix(Fix F) {
Fixes.emplace_back(std::move(F));
Expand All @@ -161,7 +161,7 @@ class Diagnostic : public PartialDiagnostic {
/// Location of this diagnostic
RangeTy Range;

std::vector<std::unique_ptr<Note>> Notes;
std::vector<Note> Notes;
std::vector<Fix> Fixes;
};

Expand Down
27 changes: 0 additions & 27 deletions libnixf/include/nixf/Basic/DiagnosticEngine.h

This file was deleted.

9 changes: 6 additions & 3 deletions libnixf/include/nixf/Parse/Parser.h
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,17 @@

#include <memory>
#include <string_view>
#include <vector>

namespace nixf {

class Node;
class DiagnosticEngine;
class Diagnostic;

/// \brief Parse a string.
/// \param Diag Diagnostics will be written here.
std::shared_ptr<Node> parse(std::string_view Src, DiagnosticEngine &Diag);
/// \param Src The string to parse.
/// \param Diags Diagnostics will be appended to this vector.
std::shared_ptr<Node> parse(std::string_view Src,
std::vector<Diagnostic> &Diags);

} // namespace nixf
7 changes: 3 additions & 4 deletions libnixf/src/Parse/Lexer.cpp
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
#include "Lexer.h"

#include "nixf/Basic/DiagnosticEngine.h"
#include "nixf/Basic/Range.h"

#include <cassert>
Expand Down Expand Up @@ -124,7 +123,7 @@ bool Lexer::consumeComments() {
if (eof()) {
// There is no '*/' to terminate comments
Diagnostic &Diag =
Diags.diag(DK::DK_UnterminatedBComment, {cur(), cur()});
Diags.emplace_back(DK::DK_UnterminatedBComment, RangeTy{cur()});
Diag.note(NK::NK_BCommentBegin, *BeginRange);
Diag.fix(Fix::mkInsertion(cur(), "*/"));
return true;
Expand Down Expand Up @@ -164,7 +163,7 @@ bool Lexer::lexFloatExp() {
// [0-9]+
if (!consumeManyDigits()) {
// not matching [0-9]+, error
Diags.diag(DK::DK_FloatNoExp, curRange()) << std::string(1, *ECh);
Diags.emplace_back(DK::DK_FloatNoExp, curRange()) << std::string(1, *ECh);
return false;
}
}
Expand Down Expand Up @@ -204,7 +203,7 @@ void Lexer::lexNumbers() {
// Checking that if the float token has leading zeros.
std::string_view Prefix = Src.substr(Ch->begin().Offset, 2);
if (Prefix.starts_with("0") && Prefix != "0.")
Diags.diag(DK::DK_FloatLeadingZero, *Ch) << std::string(Prefix);
Diags.emplace_back(DK::DK_FloatLeadingZero, *Ch) << std::string(Prefix);
} else {
Tok = tok_int;
}
Expand Down
7 changes: 3 additions & 4 deletions libnixf/src/Parse/Lexer.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

#include "Token.h"

#include "nixf/Basic/Diagnostic.h"
#include "nixf/Basic/Range.h"

#include <cassert>
Expand All @@ -19,11 +20,9 @@

namespace nixf {

class DiagnosticEngine;

class Lexer {
const std::string_view Src;
DiagnosticEngine &Diags;
std::vector<Diagnostic> &Diags;

Point Cur;

Expand Down Expand Up @@ -120,7 +119,7 @@ class Lexer {
}

public:
Lexer(std::string_view Src, DiagnosticEngine &Diags)
Lexer(std::string_view Src, std::vector<Diagnostic> &Diags)
: Src(Src), Diags(Diags), Cur() {}

/// Reset the cursor at source \p offset (zero-based indexing)
Expand Down
27 changes: 14 additions & 13 deletions libnixf/src/Parse/Parser.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
#include "Token.h"

#include "nixf/Basic/Diagnostic.h"
#include "nixf/Basic/DiagnosticEngine.h"
#include "nixf/Basic/Range.h"
#include "nixf/Parse/Nodes.h"
#include "nixf/Parse/Parser.h"
Expand All @@ -24,8 +23,9 @@ namespace {
using namespace nixf;
using namespace nixf::tok;

Diagnostic &diagNullExpr(DiagnosticEngine &Diag, Point Loc, std::string As) {
Diagnostic &D = Diag.diag(Diagnostic::DK_Expected, RangeTy(Loc));
Diagnostic &diagNullExpr(std::vector<Diagnostic> &Diags, Point Loc,
std::string As) {
Diagnostic &D = Diags.emplace_back(Diagnostic::DK_Expected, RangeTy(Loc));
D << ("an expression as " + std::move(As));
D.fix(Fix::mkInsertion(Loc, " expr"));
return D;
Expand All @@ -43,7 +43,7 @@ class Parser {
private:
std::string_view Src;
Lexer Lex;
DiagnosticEngine &Diag;
std::vector<Diagnostic> &Diags;

std::deque<Token> LookAheadBuf;
std::optional<Token> LastToken;
Expand Down Expand Up @@ -119,8 +119,8 @@ class Parser {
}

public:
Parser(std::string_view Src, DiagnosticEngine &Diag)
: Src(Src), Lex(Src, Diag), Diag(Diag) {
Parser(std::string_view Src, std::vector<Diagnostic> &Diags)
: Src(Src), Lex(Src, Diags), Diags(Diags) {
pushState(PS_Expr);
}

Expand All @@ -136,13 +136,13 @@ class Parser {
auto ExprState = withState(PS_Expr);
auto Expr = parseExpr();
if (!Expr)
diagNullExpr(Diag, LastToken->end(), "interpolation");
diagNullExpr(Diags, LastToken->end(), "interpolation");
if (peek().kind() == tok_r_curly) {
consume(); // }
} else {
// expected "}" for interpolation
Diagnostic &D =
Diag.diag(Diagnostic::DK_Expected, RangeTy(LastToken->end()));
Diagnostic &D = Diags.emplace_back(Diagnostic::DK_Expected,
RangeTy(LastToken->end()));
D << std::string(tok::spelling(tok_r_curly));
D.note(Note::NK_ToMachThis, TokDollarCurly.range())
<< std::string(tok::spelling(tok_dollar_curly));
Expand Down Expand Up @@ -248,8 +248,8 @@ class Parser {
},
std::move(Parts));
}
Diagnostic &D =
Diag.diag(Diagnostic::DK_Expected, RangeTy(LastToken->end()));
Diagnostic &D = Diags.emplace_back(Diagnostic::DK_Expected,
RangeTy(LastToken->end()));
D << QuoteSpel;
D.note(Note::NK_ToMachThis, Quote.range()) << QuoteSpel;
D.fix(Fix::mkInsertion(LastToken->end(), QuoteSpel));
Expand Down Expand Up @@ -311,8 +311,9 @@ class Parser {

namespace nixf {

std::shared_ptr<Node> parse(std::string_view Src, DiagnosticEngine &Diag) {
Parser P(Src, Diag);
std::shared_ptr<Node> parse(std::string_view Src,
std::vector<Diagnostic> &Diags) {
Parser P(Src, Diags);
return P.parse();
}

Expand Down
56 changes: 28 additions & 28 deletions libnixf/test/Parse/Lexer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

#include "Lexer.h"

#include "nixf/Basic/DiagnosticEngine.h"
#include "nixf/Basic/Diagnostic.h"

#include <cstddef>

Expand All @@ -23,99 +23,99 @@ auto collect(Lexer &L, Token (Lexer::*Ptr)()) {
}

struct LexerTest : testing::Test {
DiagnosticEngine Diag;
std::vector<Diagnostic> Diags;
std::stringstream SS;
};

TEST_F(LexerTest, Integer) {
Lexer Lexer("1", Diag);
Lexer Lexer("1", Diags);
auto P = Lexer.lex();
ASSERT_EQ(P.kind(), tok_int);
ASSERT_TRUE(Diag.diags().empty());
ASSERT_TRUE(Diags.empty());
}

TEST_F(LexerTest, Integer2) {
Lexer Lexer("1123123", Diag);
Lexer Lexer("1123123", Diags);
auto P = Lexer.lex();
ASSERT_EQ(P.kind(), tok_int);
ASSERT_TRUE(Diag.diags().empty());
ASSERT_TRUE(Diags.empty());
}

TEST_F(LexerTest, Integer4) {
Lexer Lexer("00023121123123", Diag);
Lexer Lexer("00023121123123", Diags);
auto P = Lexer.lex();
ASSERT_EQ(P.kind(), tok_int);
ASSERT_TRUE(Diag.diags().empty());
ASSERT_TRUE(Diags.empty());
}

TEST_F(LexerTest, Integer5) {
Lexer Lexer("00023121123123", Diag);
Lexer Lexer("00023121123123", Diags);
auto P = Lexer.lex();
ASSERT_EQ(P.kind(), tok_int);
ASSERT_TRUE(Diag.diags().empty());
ASSERT_TRUE(Diags.empty());
}

TEST_F(LexerTest, Trivia1) {
std::string Trivia("\r\n /* */# line comment\n\f \v\r \n");
std::string Src = Trivia + "3";
Lexer Lexer(Src, Diag);
Lexer Lexer(Src, Diags);
auto P = Lexer.lex();
ASSERT_EQ(P.kind(), tok_int);
ASSERT_EQ(P.view(), "3");
ASSERT_TRUE(Diag.diags().empty());
ASSERT_TRUE(Diags.empty());
}

TEST_F(LexerTest, TriviaLComment) {
Lexer Lexer(R"(# single line comment
3
)",
Diag);
Diags);
auto P = Lexer.lex();
ASSERT_EQ(P.kind(), tok_int);
ASSERT_EQ(P.view(), "3");
ASSERT_TRUE(Diag.diags().empty());
ASSERT_TRUE(Diags.empty());
}

TEST_F(LexerTest, TriviaBComment) {
const char *Src = R"(/* block comment
aaa
*/)";
Lexer Lexer(Src, Diag);
Lexer Lexer(Src, Diags);
auto P = Lexer.lex();
ASSERT_EQ(P.kind(), tok_eof);
ASSERT_EQ(P.view(), "");
ASSERT_TRUE(Diag.diags().empty());
ASSERT_TRUE(Diags.empty());
}

TEST_F(LexerTest, TriviaBComment2) {
const char *Src = R"(/* block comment
aaa
)";
Lexer Lexer(Src, Diag);
Lexer Lexer(Src, Diags);
auto P = Lexer.lex();
ASSERT_EQ(P.kind(), tok_eof);
ASSERT_EQ(P.view(), "");
ASSERT_TRUE(!Diag.diags().empty());
ASSERT_TRUE(!Diags.empty());

ASSERT_EQ(std::string(Diag.diags()[0]->message()), "unterminated /* comment");
ASSERT_EQ(std::string(Diag.diags()[0]->notes()[0]->message()),
ASSERT_EQ(std::string(Diags[0].message()), "unterminated /* comment");
ASSERT_EQ(std::string(Diags[0].notes()[0].message()),
"/* comment begins at here");
}

TEST_F(LexerTest, FloatLeadingZero) {
Lexer Lexer("00.33", Diag);
Lexer Lexer("00.33", Diags);
auto P = Lexer.lex();
ASSERT_EQ(P.kind(), tok_float);
ASSERT_EQ(P.view(), "00.33");
ASSERT_FALSE(Diag.diags().empty());
ASSERT_EQ(std::string(Diag.diags()[0]->message()),
ASSERT_FALSE(Diags.empty());
ASSERT_EQ(std::string(Diags[0].message()),
"float begins with extra zeros `{}` is nixf extension");
ASSERT_EQ(std::string(Diag.diags()[0]->args()[0]), "00");
ASSERT_EQ(std::string(Diags[0].args()[0]), "00");
}

TEST_F(LexerTest, lexString) {
Lexer Lexer(R"("aa bb \\ \t \" \n ${}")", Diag);
Lexer Lexer(R"("aa bb \\ \t \" \n ${}")", Diags);
const TokenKind Match[] = {
tok_dquote, // '"'
tok_string_part, // 'aa bb '
Expand All @@ -140,7 +140,7 @@ TEST_F(LexerTest, lexString) {

TEST_F(LexerTest, lexIDPath) {
// FIXME: test pp//a to see that we can lex this as Update(pp, a)
Lexer Lexer(R"(id pa/t)", Diag);
Lexer Lexer(R"(id pa/t)", Diags);
const TokenKind Match[] = {
tok_id, // id
tok_path_fragment, // pa/
Expand All @@ -155,7 +155,7 @@ TEST_F(LexerTest, lexIDPath) {

TEST_F(LexerTest, lexKW) {
// FIXME: test pp//a to see that we can lex this as Update(pp, a)
Lexer Lexer(R"(if then)", Diag);
Lexer Lexer(R"(if then)", Diags);
const TokenKind Match[] = {
tok_kw_if, // if
tok_kw_then, // then
Expand All @@ -168,7 +168,7 @@ TEST_F(LexerTest, lexKW) {
}

TEST_F(LexerTest, lexURI) {
Lexer Lexer(R"(https://github.com/inclyc/libnixf)", Diag);
Lexer Lexer(R"(https://github.com/inclyc/libnixf)", Diags);
auto Tokens = collect(Lexer, &Lexer::lex);
const TokenKind Match[] = {tok_uri};
for (size_t I = 0; I < sizeof(Match) / sizeof(TokenKind); I++) {
Expand Down
Loading

0 comments on commit 62d3a0a

Please sign in to comment.