Basic arithmetic and jump labels

This commit is contained in:
2019-11-17 21:02:35 +01:00
commit b84557b3e1
34 changed files with 1350 additions and 0 deletions

27
src/token/operandtype.cpp Normal file
View File

@@ -0,0 +1,27 @@
#include <map>
#include <token/operandtype.hpp>
namespace Token
{
OperandType GetOperandType(std::string const & op)
{
static std::map<std::string, OperandType, std::less<>> const operations =
{
{ "addi", OperandType::AddInteger },
{ "subi", OperandType::SubtractInteger },
{ "divi", OperandType::DivideInteger },
{ "muli", OperandType::MultiplyInteger },
{ "shri", OperandType::ShiftIntegerRight },
{ "shli", OperandType::ShiftIntegerLeft },
{ "jmp", OperandType::Jump }
};
auto const & result = operations.find(op);
if (result != operations.end())
{
return result->second;
}
return OperandType::Unknown;
}
}

View File

@@ -0,0 +1,24 @@
#include <token/registertype.hpp>
#include <map>
namespace Token
{
RegisterType GetRegisterType(std::string const & reg)
{
static std::map<std::string, RegisterType, std::less<>> const registers =
{
{ "A", RegisterType::A },
{ "B", RegisterType::B },
{ "C", RegisterType::C },
{ "D", RegisterType::D }
};
auto const & result = registers.find(reg);
if (result != registers.end())
{
return result->second;
}
return RegisterType::Unknown;
}
}

165
src/token/token.cpp Normal file
View File

@@ -0,0 +1,165 @@
#include <cstdio>
#include <token/token.hpp>
namespace Token
{
Token::Token(int const _lineNumber, int const _lineColumn)
: lineNumber(_lineNumber),
lineColumn(_lineColumn),
type(TokenType::Unknown),
isValid(false)
{
}
Token::Token(int const _lineNumber, int const _lineColumn, OperandType _operatorType, bool validness)
: lineNumber(_lineNumber),
lineColumn(_lineColumn),
type(TokenType::Operand),
isValid(validness),
data(_operatorType)
{
}
Token::Token(int const _lineNumber, int const _lineColumn, RegisterType _registerType, bool validness)
: lineNumber(_lineNumber),
lineColumn(_lineColumn),
type(TokenType::Register),
isValid(validness),
data(_registerType)
{
}
Token::Token(int const _lineNumber, int const _lineColumn, int value, bool validness)
: lineNumber(_lineNumber),
lineColumn(_lineColumn),
type(TokenType::ImmediateInteger),
isValid(validness),
data(value)
{
}
Token::Token(int const _lineNumber, int const _lineColumn, std::string const & value, bool validness)
: lineNumber(_lineNumber),
lineColumn(_lineColumn),
type(TokenType::Label),
isValid(validness),
data(value)
{
}
Token::Token(Token const & other)
: lineNumber(other.lineNumber),
lineColumn(other.lineColumn),
type(other.type),
isValid(other.isValid),
data(other.data)
{
}
void Token::DebugPrint() const
{
std::putc(' ', stdout);
switch(type)
{
case TokenType::ImmediateInteger:
if (isValid)
{
std::printf("$int=%i", std::get<int>(data));
}
else
{
std::printf("BAD_IMM_INT");
}
break;
case TokenType::Operand:
if (isValid)
{
switch(std::get<OperandType>(data))
{
case OperandType::AddInteger:
std::printf("addi");
break;
case OperandType::MultiplyInteger:
std::printf("muli");
break;
case OperandType::SubtractInteger:
std::printf("subi");
break;
case OperandType::DivideInteger:
std::printf("divi");
break;
case OperandType::ShiftIntegerLeft:
std::printf("shli");
break;
case OperandType::ShiftIntegerRight:
std::printf("shri");
break;
case OperandType::Jump:
std::printf("jump");
break;
default:
std::printf("unknown_op");
break;
}
}
else
{
std::printf("BAD_OP");
}
break;
case TokenType::Register:
if (isValid)
{
switch(std::get<RegisterType>(data))
{
case RegisterType::A:
std::printf("%%A");
break;
case RegisterType::B:
std::printf("%%B");
break;
case RegisterType::C:
std::printf("%%C");
break;
case RegisterType::D:
std::printf("%%D");
break;
default:
std::printf("%%unknown_reg");
break;
}
}
else
{
std::printf("BAD_REG");
}
break;
case TokenType::StatementEnd:
std::printf("EOS");
break;
case TokenType::Label:
std::printf("label=%s", std::get<std::string>(data).c_str());
break;
case TokenType::Unknown:
default:
std::printf("UNKNOWN_TOKEN");
break;
}
}
}

109
src/token/tokenizer.cpp Normal file
View File

@@ -0,0 +1,109 @@
#include <map>
#include <token/tokenizer.hpp>
namespace Token
{
bool IsWhiteSpace(char const c)
{
return c == '\n' || c == ' ' || c == '\t' || c == '\r';
}
Token ExtractToken(std::string const & string, int const lineNumber, int const lineColumn)
{
if (string.size() == 0)
{
return Token(lineNumber, lineColumn);
}
char const prefix = string[0];
if (prefix == '$')
{
int value = 0;
try
{
value = std::stoi(string.substr(1, string.size()));
}
catch(std::invalid_argument &)
{
return Token(lineNumber, lineColumn, 0, false);
}
return Token(lineNumber, lineColumn, value, true);
}
if (prefix == '%')
{
RegisterType const rtype = GetRegisterType(string.substr(1, string.size()));
return Token(lineNumber, lineColumn, rtype, rtype != RegisterType::Unknown);
}
if (prefix == ';')
{
Token token(lineNumber, lineColumn);
token.type = TokenType::StatementEnd;
token.isValid = true;
return token;
}
char const postfix = string[string.size() - 1];
if (postfix == ':')
{
return Token(lineNumber, lineColumn, string.substr(0, string.size() - 1), true);
}
OperandType const opType = GetOperandType(string);
if (opType != OperandType::Unknown)
{
return Token(lineNumber, lineColumn, opType, true);
}
// Last resort: it must be a label
return Token(lineNumber, lineColumn, string, true);
}
void Tokenizer::Tokenize(std::string const & line, int const lineNumber, std::vector<Token> & tokens)
{
enum class TokenizerState
{
LookForNextToken,
LookForTokenEnd,
};
TokenizerState state = TokenizerState::LookForNextToken;
unsigned tokenStart = 0;
for(unsigned i = 0u; i < line.size(); ++i)
{
switch(state)
{
case TokenizerState::LookForNextToken:
if (!IsWhiteSpace(line[i]))
{
if (line[i] == '#')
{
// Ignore comments
return;
}
tokenStart = i;
state = TokenizerState::LookForTokenEnd;
}
break;
case TokenizerState::LookForTokenEnd:
if (IsWhiteSpace(line[i]) || line[i] == ';')
{
tokens.push_back(ExtractToken(line.substr(tokenStart, i - tokenStart), lineNumber, tokenStart));
if (line[i] == ';')
{
tokens.push_back(ExtractToken(line.substr(i, 1), lineNumber, tokenStart));
}
state = TokenizerState::LookForNextToken;
}
break;
}
}
if (state == TokenizerState::LookForTokenEnd)
{
tokens.push_back(ExtractToken(line.substr(tokenStart, line.size()), lineNumber, tokenStart));
}
}
}