63 lines
1.3 KiB
D
63 lines
1.3 KiB
D
module nimpc.lexer;
|
|
|
|
import streams;
|
|
import std.stdio;
|
|
import std.regex;
|
|
import std.array;
|
|
|
|
enum TokenType {
|
|
KEYWORD,
|
|
DATA_TYPE,
|
|
SENTENCE_END,
|
|
LITERAL_INTEGER,
|
|
LITERAL_FLOAT,
|
|
LITERAL_BOOLEAN,
|
|
LITERAL_STRING
|
|
}
|
|
|
|
struct Token {
|
|
TokenType type;
|
|
string content;
|
|
uint line;
|
|
uint column;
|
|
}
|
|
|
|
class LexerException : Exception {
|
|
const uint sourceLine;
|
|
const uint sourceColumn;
|
|
|
|
this(string msg, uint sourceLine, uint sourceColumn) {
|
|
super(msg);
|
|
this.sourceLine = sourceLine;
|
|
this.sourceColumn = sourceColumn;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Parses a list of tokens from an input stream of lines of code.
|
|
* Params:
|
|
* inputStream = The lines of input to parse.
|
|
* Returns: A list of tokens.
|
|
*/
|
|
Token[] tokenize(S)(S inputStream) if (isInputStream!(S, char)) {
|
|
Appender!(Token[]) tokenApp;
|
|
uint line = 0;
|
|
uint col = 0;
|
|
|
|
while (true) {
|
|
char c = readChar(inputStream, line, col);
|
|
writeln(c);
|
|
}
|
|
|
|
|
|
return tokenApp[];
|
|
}
|
|
|
|
private char readChar(S)(S stream, uint line, uint col) if (isInputStream!(S, char)) {
|
|
auto result = readOne(stream);
|
|
if (result.hasError) {
|
|
throw new LexerException("Failed to read one more char from stream.", line, col);
|
|
}
|
|
return result.element;
|
|
}
|