aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGabriel Dos Reis <gdr@axiomatics.org>2017-01-20 05:10:04 -0800
committerGabriel Dos Reis <gdr@axiomatics.org>2017-01-20 05:10:04 -0800
commit13819bd8e6828e5aadecad77804b25651fd8d9ae (patch)
tree0e1b3fd70e172822ba9c8b3336f8e5213e80bed2 /src
parent8988a27cf5f414263d0796a323f0d033265271e7 (diff)
downloadopen-axiom-13819bd8e6828e5aadecad77804b25651fd8d9ae.tar.gz
Format boot tokens.
Diffstat (limited to 'src')
-rw-r--r--src/syntax/Parser.cxx40
1 files changed, 34 insertions, 6 deletions
diff --git a/src/syntax/Parser.cxx b/src/syntax/Parser.cxx
index 8245b70b..ada3963b 100644
--- a/src/syntax/Parser.cxx
+++ b/src/syntax/Parser.cxx
@@ -107,13 +107,40 @@ namespace {
using InputFile = FileAs<std::ifstream>;
using OutputFile = FileAs<std::ofstream>;
- // Helper function for streaming out details of tokens.
- std::ostream& operator<<(std::ostream& os, const Token& t) {
+ void format_text(const Fragment& f, const Token& t, std::ostream& os) {
+ if (t.start.line == t.end.line) {
+ auto& line = f(t.start);
+ std::copy(line.begin() + t.start.column, line.begin() + t.end.column,
+ std::ostream_iterator<char>(os));
+ }
+ else {
+ auto& first_line = f(t.start);
+ std::copy(first_line.begin() + t.start.column, first_line.end(),
+ std::ostream_iterator<char>(os));
+ for (auto i = t.start.line + 1; i < t.end.line; ++i)
+ os << f[i];
+ auto& last_line = f[t.end.line];
+ std::copy(last_line.begin(), last_line.begin() + t.end.column,
+ std::ostream_iterator<char>(os));
+ }
+ }
+
+ void format_token(const Fragment& f, const Token& t, std::ostream& os) {
os << t.category << '{'
<< t.start << '-' << t.end
- << ", " << t.value
- << '}';
- return os;
+ << ", ";
+ switch (t.category) {
+ case TokenCategory::Integer:
+ case TokenCategory::FloatingPoint:
+ case TokenCategory::String:
+ case TokenCategory::Identifier:
+ format_text(f, t, os);
+ break;
+ default:
+ os << t.value;
+ break;
+ }
+ os << '}';
}
// FIXME: This is just a stub to get a native parsing entry point
@@ -129,7 +156,8 @@ namespace {
try {
TokenSequence ts { f, Language::Boot };
for (auto& t : ts) {
- out.stream << '\t' << t;
+ out.stream << '\t';
+ format_token(f, t, out.stream);
switch (t.category) {
case TokenCategory::Junk:
case TokenCategory::Unclassified: