Skip to content

Commit

Permalink
refactor: Use trailing return types for void functions; Use auto
Browse files Browse the repository at this point in the history
…to simplify type declarations in the touched files. (#61)

Co-authored-by: Lin Zhihao <59785146+LinZhihao-723@users.noreply.github.com>
  • Loading branch information
SharafMohamed and LinZhihao-723 authored Jan 10, 2025
1 parent 51d11cc commit 44c5578
Show file tree
Hide file tree
Showing 9 changed files with 126 additions and 133 deletions.
4 changes: 2 additions & 2 deletions src/log_surgeon/Buffer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,8 @@ class Buffer {
// Currently needed for compression
[[nodiscard]] auto get_mutable_active_buffer() -> Item* { return m_active_storage; }

void
copy(Item const* storage_to_copy_first, Item const* storage_to_copy_last, uint32_t offset) {
auto copy(Item const* storage_to_copy_first, Item const* storage_to_copy_last, uint32_t offset)
-> void {
std::copy(storage_to_copy_first, storage_to_copy_last, m_active_storage + offset);
}

Expand Down
192 changes: 92 additions & 100 deletions src/log_surgeon/Lalr1Parser.tpp

Large diffs are not rendered by default.

38 changes: 19 additions & 19 deletions src/log_surgeon/Lexer.tpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
*/
namespace log_surgeon {
template <typename TypedNfaState, typename TypedDfaState>
void Lexer<TypedNfaState, TypedDfaState>::flip_states(uint32_t old_storage_size) {
auto Lexer<TypedNfaState, TypedDfaState>::flip_states(uint32_t old_storage_size) -> void {
if (m_match_pos >= old_storage_size / 2) {
m_match_pos -= old_storage_size / 2;
} else {
Expand All @@ -41,7 +41,7 @@ void Lexer<TypedNfaState, TypedDfaState>::flip_states(uint32_t old_storage_size)
template <typename TypedNfaState, typename TypedDfaState>
auto Lexer<TypedNfaState, TypedDfaState>::scan(ParserInputBuffer& input_buffer, Token& token)
-> ErrorCode {
TypedDfaState const* state = m_dfa->get_root();
auto const* state = m_dfa->get_root();
if (m_asked_for_more_data) {
state = m_prev_state;
m_asked_for_more_data = false;
Expand All @@ -65,9 +65,10 @@ auto Lexer<TypedNfaState, TypedDfaState>::scan(ParserInputBuffer& input_buffer,
m_type_ids = nullptr;
}
while (true) {
uint32_t prev_byte_buf_pos = input_buffer.storage().pos();
unsigned char next_char{utf8::cCharErr};
if (ErrorCode err = input_buffer.get_next_character(next_char); ErrorCode::Success != err) {
auto prev_byte_buf_pos = input_buffer.storage().pos();
auto next_char{utf8::cCharErr};
if (auto const err = input_buffer.get_next_character(next_char); ErrorCode::Success != err)
{
m_asked_for_more_data = true;
m_prev_state = state;
return err;
Expand All @@ -80,7 +81,7 @@ auto Lexer<TypedNfaState, TypedDfaState>::scan(ParserInputBuffer& input_buffer,
m_match_pos = prev_byte_buf_pos;
m_match_line = m_line;
}
TypedDfaState* next = state->next(next_char);
auto* next = state->next(next_char);
if (next_char == '\n') {
m_line++;
if (m_has_delimiters && !m_match) {
Expand Down Expand Up @@ -172,7 +173,7 @@ auto Lexer<TypedNfaState, TypedDfaState>::scan_with_wildcard(
char wildcard,
Token& token
) -> ErrorCode {
TypedDfaState const* state = m_dfa->get_root();
auto const* state = m_dfa->get_root();
if (m_asked_for_more_data) {
state = m_prev_state;
m_asked_for_more_data = false;
Expand All @@ -196,7 +197,7 @@ auto Lexer<TypedNfaState, TypedDfaState>::scan_with_wildcard(
m_type_ids = nullptr;
}
while (true) {
uint32_t prev_byte_buf_pos = input_buffer.storage().pos();
auto prev_byte_buf_pos = input_buffer.storage().pos();
unsigned char next_char{utf8::cCharErr};
if (ErrorCode err = input_buffer.get_next_character(next_char); ErrorCode::Success != err) {
m_asked_for_more_data = true;
Expand Down Expand Up @@ -239,7 +240,7 @@ auto Lexer<TypedNfaState, TypedDfaState>::scan_with_wildcard(
// BFS (keep track of m_type_ids)
if (wildcard == '?') {
for (uint32_t byte = 0; byte < cSizeOfByte; byte++) {
TypedDfaState* next_state = state->next(byte);
auto* next_state = state->next(byte);
if (next_state->is_accepting() == false) {
token
= Token{m_last_match_pos,
Expand Down Expand Up @@ -345,10 +346,10 @@ template <typename TypedNfaState, typename TypedDfaState>
void Lexer<TypedNfaState, TypedDfaState>::add_delimiters(std::vector<uint32_t> const& delimiters) {
assert(!delimiters.empty());
m_has_delimiters = true;
for (bool& i : m_is_delimiter) {
for (auto& i : m_is_delimiter) {
i = false;
}
for (uint32_t delimiter : delimiters) {
for (auto delimiter : delimiters) {
m_is_delimiter[delimiter] = true;
}
m_is_delimiter[utf8::cCharStartOfFile] = true;
Expand Down Expand Up @@ -378,7 +379,7 @@ void Lexer<TypedNfaState, TypedDfaState>::generate() {
finite_automata::Nfa<TypedNfaState> nfa{std::move(m_rules)};
// TODO: DFA ignores tags. E.g., treats "capture:user=(?<user_id>\d+)" as "capture:user=\d+"
m_dfa = nfa_to_dfa(nfa);
TypedDfaState const* state = m_dfa->get_root();
auto const* state = m_dfa->get_root();
for (uint32_t i = 0; i < cSizeOfByte; i++) {
if (state->next(i) != nullptr) {
m_is_first_char[i] = true;
Expand Down Expand Up @@ -429,23 +430,22 @@ template <typename TypedNfaState, typename TypedDfaState>
auto Lexer<TypedNfaState, TypedDfaState>::nfa_to_dfa(finite_automata::Nfa<TypedNfaState>& nfa
) -> std::unique_ptr<finite_automata::Dfa<TypedDfaState>> {
typedef std::set<TypedNfaState const*> StateSet;
std::unique_ptr<finite_automata::Dfa<TypedDfaState>> dfa
= std::make_unique<finite_automata::Dfa<TypedDfaState>>();
auto dfa = std::make_unique<finite_automata::Dfa<TypedDfaState>>();
std::map<StateSet, TypedDfaState*> dfa_states;
std::stack<StateSet> unmarked_sets;
auto create_dfa_state
= [&dfa, &dfa_states, &unmarked_sets](StateSet const& set) -> TypedDfaState* {
TypedDfaState* state = dfa->new_state(set);
auto* state = dfa->new_state(set);
dfa_states[set] = state;
unmarked_sets.push(set);
return state;
};
StateSet start_set = epsilon_closure(nfa.get_root());
auto start_set = epsilon_closure(nfa.get_root());
create_dfa_state(start_set);
while (!unmarked_sets.empty()) {
StateSet set = unmarked_sets.top();
auto set = unmarked_sets.top();
unmarked_sets.pop();
TypedDfaState* dfa_state = dfa_states.at(set);
auto* dfa_state = dfa_states.at(set);
std::map<uint32_t, StateSet> ascii_transitions_map;
for (TypedNfaState const* s0 : set) {
for (uint32_t i = 0; i < cSizeOfByte; i++) {
Expand All @@ -467,7 +467,7 @@ auto Lexer<TypedNfaState, TypedDfaState>::nfa_to_dfa(finite_automata::Nfa<TypedN
return state;
};
for (typename std::map<uint32_t, StateSet>::value_type const& kv : ascii_transitions_map) {
TypedDfaState* dest_state = next_dfa_state(kv.second);
auto* dest_state = next_dfa_state(kv.second);
dfa_state->add_byte_transition(kv.first, dest_state);
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/log_surgeon/LogEvent.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ auto LogEventView::reset() -> void {
start = 1;
}
for (uint32_t i = start; i < m_log_output_buffer->pos(); i++) {
Token& token = m_log_output_buffer->get_mutable_token(i);
auto& token = m_log_output_buffer->get_mutable_token(i);
raw_log += token.to_string_view();
}
return raw_log;
Expand All @@ -51,7 +51,7 @@ auto LogEventView::reset() -> void {
auto LogEventView::get_logtype() const -> std::string {
std::string logtype;
for (uint32_t i = 1; i < m_log_output_buffer->pos(); i++) {
Token& token = m_log_output_buffer->get_mutable_token(i);
auto& token = m_log_output_buffer->get_mutable_token(i);
if (token.m_type_ids_ptr->at(0) == (uint32_t)SymbolId::TokenUncaughtString) {
logtype += token.to_string_view();
} else {
Expand Down
2 changes: 1 addition & 1 deletion src/log_surgeon/LogParser.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ auto LogParser::add_delimiters(unique_ptr<ParserAST> const& delimiters) -> void
}
}

void LogParser::add_rules(std::unique_ptr<SchemaAST> schema_ast) {
auto LogParser::add_rules(std::unique_ptr<SchemaAST> schema_ast) -> void {
for (auto const& delimiters : schema_ast->m_delimiters) {
add_delimiters(delimiters);
}
Expand Down
7 changes: 4 additions & 3 deletions src/log_surgeon/Parser.tpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,10 @@ Parser<TypedNfaState, TypedDfaState>::Parser() {
}

template <typename TypedNfaState, typename TypedDfaState>
void Parser<TypedNfaState, TypedDfaState>::add_rule(
auto Parser<TypedNfaState, TypedDfaState>::add_rule(
std::string const& name,
std::unique_ptr<finite_automata::RegexAST<TypedNfaState>> rule
) {
) -> void {
if (m_lexer.m_symbol_id.find(name) == m_lexer.m_symbol_id.end()) {
m_lexer.m_symbol_id[name] = m_lexer.m_symbol_id.size();
m_lexer.m_id_symbol[m_lexer.m_symbol_id[name]] = name;
Expand All @@ -42,7 +42,8 @@ void Parser<TypedNfaState, TypedDfaState>::add_rule(
}

template <typename TypedNfaState, typename TypedDfaState>
void Parser<TypedNfaState, TypedDfaState>::add_token(std::string const& name, char rule_char) {
auto Parser<TypedNfaState, TypedDfaState>::add_token(std::string const& name, char rule_char)
-> void {
add_rule(name, std::make_unique<finite_automata::RegexASTLiteral<TypedNfaState>>(rule_char));
}
} // namespace log_surgeon
Expand Down
6 changes: 3 additions & 3 deletions src/log_surgeon/ParserInputBuffer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ using std::string;
using std::to_string;

namespace log_surgeon {
void ParserInputBuffer::reset() {
auto ParserInputBuffer::reset() -> void {
m_log_fully_consumed = false;
m_finished_reading_input = false;
m_pos_last_read_char = 0;
Expand Down Expand Up @@ -107,12 +107,12 @@ auto ParserInputBuffer::get_next_character(unsigned char& next_char) -> ErrorCod
// the user to wrap their input buffer. It tricks the LogParser and
// ParserInputBuffer into thinking it never reaches the wrap, while still
// respecting the actual size of the buffer the user passed in.
void ParserInputBuffer::set_storage(
auto ParserInputBuffer::set_storage(
char* storage,
uint32_t size,
uint32_t pos,
bool finished_reading_input
) {
) -> void {
reset();
m_storage.set_active_buffer(storage, size * 2, pos);
m_finished_reading_input = finished_reading_input;
Expand Down
2 changes: 1 addition & 1 deletion src/log_surgeon/Schema.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ Schema::Schema(std::string const& schema_file_path)
: m_schema_ast{SchemaParser::try_schema_file(schema_file_path)} {}

auto Schema::add_variable(std::string_view const var_schema, int const priority) const -> void {
std::unique_ptr<SchemaAST> const schema_ast = SchemaParser::try_schema_string(var_schema);
auto const schema_ast = SchemaParser::try_schema_string(var_schema);
m_schema_ast->add_schema_var(std::move(schema_ast->m_schema_vars[0]), priority);
}
} // namespace log_surgeon
4 changes: 2 additions & 2 deletions src/log_surgeon/SchemaParser.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -416,7 +416,7 @@ static auto new_delimiter_string_rule(NonTerminal* m) -> unique_ptr<ParserAST> {
return make_unique<DelimiterStringAST>(character);
}

void SchemaParser::add_lexical_rules() {
auto SchemaParser::add_lexical_rules() -> void {
if (m_special_regex_characters.empty()) {
m_special_regex_characters.emplace('(', "Lparen");
m_special_regex_characters.emplace(')', "Rparen");
Expand Down Expand Up @@ -481,7 +481,7 @@ void SchemaParser::add_lexical_rules() {
add_token_group("CommentCharacters", std::move(comment_characters));
}

void SchemaParser::add_productions() {
auto SchemaParser::add_productions() -> void {
// add_production("Schema", {}, new_schema_rule);
add_production("Schema", {"Comment"}, new_schema_rule);
add_production("Schema", {"SchemaVar"}, new_schema_rule_with_var);
Expand Down

0 comments on commit 44c5578

Please sign in to comment.