-
Notifications
You must be signed in to change notification settings - Fork 9
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
fixing bugs with duplicate message analyzer (#84)
- Loading branch information
Showing
3 changed files
with
46 additions
and
16 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,21 +1,24 @@ | ||
from alogamous import analyzer | ||
from alogamous import analyzer, log_line_parser | ||
|
||
|
||
class FlagDuplicateLogMessages(analyzer.Analyzer): | ||
def __init__(self): | ||
def __init__(self, line_parser): | ||
self.parser = line_parser | ||
self.logMessages = set() | ||
self.duplicateMessages = set() | ||
|
||
def read_log_line(self, line): | ||
message = line.split("-")[-1].strip() | ||
if message in self.logMessages: | ||
self.duplicateMessages.add(message) | ||
else: | ||
self.logMessages.add(message) | ||
parsed_line = self.parser.parse(line) | ||
if parsed_line["type"] == log_line_parser.LineType.LOG_LINE: | ||
message = parsed_line["message"] | ||
if message in self.logMessages: | ||
self.duplicateMessages.add(message) | ||
else: | ||
self.logMessages.add(message) | ||
|
||
def report(self, out_stream): | ||
if len(self.duplicateMessages) > 0: | ||
out_stream.write("Duplicate Log Messages:\n") | ||
out_stream.write("\n".join(self.duplicateMessages)) | ||
out_stream.write("Duplicate Log Messages:\n- ") | ||
out_stream.write("\n- ".join(self.duplicateMessages)) | ||
else: | ||
out_stream.write("No duplicate log messages") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,27 +1,54 @@ | ||
import io | ||
|
||
from alogamous import flag_duplicate_log_messages | ||
from alogamous import flag_duplicate_log_messages, log_line_parser | ||
|
||
|
||
def test_flag_duplicate_log_messages(): | ||
flagger = flag_duplicate_log_messages.FlagDuplicateLogMessages() | ||
line_parser = log_line_parser.LogLineParser( | ||
["datetime", "source", "level", "message"], " - ", "====================================================" | ||
) | ||
flagger = flag_duplicate_log_messages.FlagDuplicateLogMessages(line_parser) | ||
in_stream = io.StringIO("""Date - root - INFO - log message 1 | ||
Date - root - WARNING - log message 2 | ||
Date - root - WARNING - log message 1""") | ||
out_stream = io.StringIO() | ||
for line in in_stream: | ||
flagger.read_log_line(line) | ||
flagger.read_log_line(line.rstrip()) | ||
flagger.report(out_stream) | ||
assert out_stream.getvalue() == """Duplicate Log Messages:\nlog message 1""" | ||
assert out_stream.getvalue() == """Duplicate Log Messages:\n- log message 1""" | ||
|
||
|
||
def test_flag_duplicate_log_messages_no_duplicates(): | ||
flagger = flag_duplicate_log_messages.FlagDuplicateLogMessages() | ||
line_parser = log_line_parser.LogLineParser( | ||
["datetime", "source", "level", "message"], " - ", "====================================================" | ||
) | ||
flagger = flag_duplicate_log_messages.FlagDuplicateLogMessages(line_parser) | ||
in_stream = io.StringIO("""Date - root - INFO - log message 1 | ||
Date - root - WARNING - log message 2 | ||
Date - root - WARNING - log message 3""") | ||
out_stream = io.StringIO() | ||
for line in in_stream: | ||
flagger.read_log_line(line) | ||
flagger.read_log_line(line.rstrip()) | ||
flagger.report(out_stream) | ||
assert out_stream.getvalue() == """No duplicate log messages""" | ||
|
||
|
||
def test_flag_duplicate_messages_with_header_and_dashes(): | ||
line_parser = log_line_parser.LogLineParser( | ||
["datetime", "source", "level", "message"], " - ", "====================================================" | ||
) | ||
flagger = flag_duplicate_log_messages.FlagDuplicateLogMessages(line_parser) | ||
in_stream = io.StringIO("""==================================================== | ||
STARTING Tracking service | ||
Start time: 2024-06-20 09:00:00.001550+00:00 | ||
Version: 2729a | ||
Command line: ['.venv/bin/python3', '-m', 'app.tracking_service', '--market', 'US', '--version', '2729a'] | ||
==================================================== | ||
2024-06-20 11:00:17,983 - root - INFO - Adding subscription for pid None | ||
2024-06-20 11:00:18,115 - root - INFO - Initialized Influx DB Client to host | ||
2024-06-20 11:00:18,185 - root - INFO - Kafka reading from start of day 2024-06-20 05:00:00+00:00 on topic internal""") | ||
out_stream = io.StringIO() | ||
for line in in_stream: | ||
flagger.read_log_line(line.rstrip()) | ||
flagger.report(out_stream) | ||
assert out_stream.getvalue() == """No duplicate log messages""" |