Skip to content

Commit 6336f22

Browse files
Merge pull request #2393 from blacklanternsecurity/logging-fix-2
Logging Improvements
2 parents 43822a8 + fb7ed05 commit 6336f22

File tree

8 files changed

+71
-58
lines changed

8 files changed

+71
-58
lines changed

bbot/core/config/logger.py

Lines changed: 5 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,6 @@ def __init__(self, core):
6060
self._loggers = None
6161
self._log_handlers = None
6262
self._log_level = None
63-
self.root_logger = logging.getLogger()
6463
self.core_logger = logging.getLogger("bbot")
6564
self.core = core
6665

@@ -83,14 +82,6 @@ def cleanup_logging(self):
8382
with suppress(Exception):
8483
self.queue_handler.close()
8584

86-
# Clean root logger
87-
root_logger = logging.getLogger()
88-
for handler in list(root_logger.handlers):
89-
with suppress(Exception):
90-
root_logger.removeHandler(handler)
91-
with suppress(Exception):
92-
handler.close()
93-
9485
# Clean all other loggers
9586
for logger in logging.Logger.manager.loggerDict.values():
9687
if hasattr(logger, "handlers"): # Logger, not PlaceHolder
@@ -111,8 +102,7 @@ def setup_queue_handler(self, logging_queue=None, log_level=logging.DEBUG):
111102
self.queue = logging_queue
112103
self.queue_handler = logging.handlers.QueueHandler(logging_queue)
113104

114-
self.root_logger.addHandler(self.queue_handler)
115-
105+
self.core_logger.addHandler(self.queue_handler)
116106
self.core_logger.setLevel(log_level)
117107
# disable asyncio logging for child processes
118108
if not SHARED_INTERPRETER_STATE.is_main_process:
@@ -216,10 +206,12 @@ def log_handlers(self):
216206
error_and_exit(f"Failure creating or error writing to BBOT logs directory ({log_dir})")
217207

218208
# Main log file (compressed)
219-
main_handler = GzipRotatingFileHandler(f"{log_dir}/bbot.log", when="d", interval=1, backupCount=14)
209+
main_handler = GzipRotatingFileHandler(f"{log_dir}/bbot.log", maxBytes=1024 * 1024 * 100, backupCount=100)
220210

221211
# Separate log file for debugging (compressed)
222-
debug_handler = GzipRotatingFileHandler(f"{log_dir}/bbot.debug.log", when="d", interval=1, backupCount=14)
212+
debug_handler = GzipRotatingFileHandler(
213+
f"{log_dir}/bbot.debug.log", maxBytes=1024 * 1024 * 100, backupCount=100
214+
)
223215

224216
# Log to stderr
225217
stderr_handler = logging.StreamHandler(sys.stderr)

bbot/logger.py

Lines changed: 40 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import os
12
import sys
23
import logging.handlers
34

@@ -53,9 +54,44 @@ def log_to_stderr(msg, level="INFO", logname=True):
5354
print(msg, file=sys.stderr)
5455

5556

56-
# Create a compressed file handler for logs
57-
class GzipRotatingFileHandler(logging.handlers.TimedRotatingFileHandler):
58-
def _open(self):
57+
class GzipRotatingFileHandler(logging.handlers.RotatingFileHandler):
58+
"""
59+
A rotating file handler that compresses rotated files with gzip.
60+
Checks file size only periodically to improve performance.
61+
"""
62+
63+
def __init__(self, *args, **kwargs):
64+
super().__init__(*args, **kwargs)
65+
self._msg_count = 0
66+
self._check_interval = 1000 # Check size every 1000 messages
67+
68+
def rotation_filename(self, default_name):
69+
"""
70+
Modify the rotated filename to include .gz extension
71+
"""
72+
return default_name + ".gz"
73+
74+
def rotate(self, source, dest):
75+
"""
76+
Compress the source file and move it to the destination.
77+
"""
5978
import gzip
6079

61-
return gzip.open(f"{self.baseFilename}.gz", mode="at", encoding=self.encoding)
80+
with open(source, "rb") as f_in:
81+
with gzip.open(dest, "wb") as f_out:
82+
f_out.writelines(f_in)
83+
os.remove(source)
84+
85+
def emit(self, record):
86+
"""
87+
Emit a record, checking for rollover only periodically using modulo.
88+
"""
89+
self._msg_count += 1
90+
91+
# Only check for rollover periodically to save compute
92+
if self._msg_count % self._check_interval == 0:
93+
if self.shouldRollover(record):
94+
self.doRollover()
95+
96+
# Continue with normal emit process
97+
super().emit(record)

bbot/scanner/scanner.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1229,9 +1229,13 @@ def log_level(self):
12291229
def _log_handlers(self):
12301230
if self.__log_handlers is None:
12311231
self.helpers.mkdir(self.home)
1232-
main_handler = GzipRotatingFileHandler(str(self.home / "scan.log"), when="d", interval=1, backupCount=14)
1232+
main_handler = GzipRotatingFileHandler(
1233+
str(self.home / "scan.log"), maxBytes=1024 * 1024 * 100, backupCount=100
1234+
)
12331235
main_handler.addFilter(lambda x: x.levelno != logging.TRACE and x.levelno >= logging.VERBOSE)
1234-
debug_handler = GzipRotatingFileHandler(str(self.home / "debug.log"), when="d", interval=1, backupCount=14)
1236+
debug_handler = GzipRotatingFileHandler(
1237+
str(self.home / "debug.log"), maxBytes=1024 * 1024 * 100, backupCount=100
1238+
)
12351239
debug_handler.addFilter(lambda x: x.levelno >= logging.DEBUG)
12361240
self.__log_handlers = [main_handler, debug_handler]
12371241
return self.__log_handlers

bbot/test/bbot_fixtures.py

Lines changed: 0 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -50,25 +50,6 @@ def tempapkfile():
5050
return apk_file
5151

5252

53-
def read_gzipped_file(file_path):
54-
"""
55-
Read and decompress a gzipped file, tolerating missing end markers.
56-
57-
Args:
58-
file_path: Path to the gzipped file
59-
60-
Returns:
61-
The decompressed content as a string
62-
"""
63-
with open(file_path, "rb") as f:
64-
data = f.read()
65-
decompressor = zlib.decompressobj(
66-
16 + zlib.MAX_WBITS
67-
) # This is needed because the file doesn't have an end marker
68-
content = decompressor.decompress(data).decode("utf-8", errors="ignore")
69-
return content
70-
71-
7253
@pytest.fixture
7354
def clean_default_config(monkeypatch):
7455
clean_config = OmegaConf.merge(

bbot/test/test_step_1/test_cli.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -130,9 +130,9 @@ async def test_cli_scan(monkeypatch):
130130
assert ip_success and dns_success, "IP_ADDRESS and/or DNS_NAME are not present in output.txt"
131131

132132
# Check for gzipped scan log file
133-
scan_log_gz = scan_home / "scan.log.gz"
134-
assert scan_log_gz.is_file(), "scan.log.gz not found"
135-
assert "[INFO]" in read_gzipped_file(scan_log_gz)
133+
scan_log = scan_home / "scan.log"
134+
assert scan_log.is_file(), "scan.log not found"
135+
assert "[INFO]" in open(scan_log).read()
136136

137137

138138
@pytest.mark.asyncio
@@ -202,9 +202,9 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config):
202202
assert "[SCAN]" in open(scan_dir / "output.txt").read()
203203

204204
# Check for gzipped scan log file
205-
scan_log_gz = scan_dir / "scan.log.gz"
206-
assert scan_log_gz.is_file(), "scan.log.gz not found"
207-
assert "[INFO]" in read_gzipped_file(scan_log_gz)
205+
scan_log = scan_dir / "scan.log"
206+
assert scan_log.is_file(), "scan.log not found"
207+
assert "[INFO]" in open(scan_log).read()
208208
shutil.rmtree(output_dir)
209209

210210
# list module options

bbot/test/test_step_1/test_python_api.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -17,28 +17,28 @@ async def test_python_api():
1717
scan_home = scan2.helpers.scans_dir / "python_api_test"
1818
out_file = scan_home / "output.json"
1919
assert list(scan2.helpers.read_file(out_file))
20-
scan_log = scan_home / "scan.log.gz"
21-
debug_log = scan_home / "debug.log.gz"
20+
scan_log = scan_home / "scan.log"
21+
debug_log = scan_home / "debug.log"
2222
assert scan_log.is_file()
23-
assert "python_api_test" in read_gzipped_file(scan_log)
23+
assert "python_api_test" in open(scan_log).read()
2424
assert debug_log.is_file()
25-
assert "python_api_test" in read_gzipped_file(debug_log)
25+
assert "python_api_test" in open(debug_log).read()
2626

2727
scan3 = Scanner("127.0.0.1", output_modules=["json"], scan_name="scan_logging_test")
2828
await scan3.async_start_without_generator()
2929

30-
assert "scan_logging_test" not in read_gzipped_file(scan_log)
31-
assert "scan_logging_test" not in read_gzipped_file(debug_log)
30+
assert "scan_logging_test" not in open(scan_log).read()
31+
assert "scan_logging_test" not in open(debug_log).read()
3232

3333
scan_home = scan3.helpers.scans_dir / "scan_logging_test"
3434
out_file = scan_home / "output.json"
3535
assert list(scan3.helpers.read_file(out_file))
36-
scan_log = scan_home / "scan.log.gz"
37-
debug_log = scan_home / "debug.log.gz"
36+
scan_log = scan_home / "scan.log"
37+
debug_log = scan_home / "debug.log"
3838
assert scan_log.is_file()
3939
assert debug_log.is_file()
40-
assert "scan_logging_test" in read_gzipped_file(scan_log)
41-
assert "scan_logging_test" in read_gzipped_file(debug_log)
40+
assert "scan_logging_test" in open(scan_log).read()
41+
assert "scan_logging_test" in open(debug_log).read()
4242

4343
# make sure config loads properly
4444
bbot_home = "/tmp/.bbot_python_api_test"

bbot/test/test_step_2/module_tests/test_module_excavate.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1034,7 +1034,7 @@ async def setup_after_prep(self, module_test):
10341034
module_test.set_expect_requests({"uri": "/"}, {"response_data": self.bad_url_data})
10351035

10361036
def check(self, module_test, events):
1037-
debug_log_content = read_gzipped_file(module_test.scan.home / "debug.log.gz")
1037+
debug_log_content = open(module_test.scan.home / "debug.log").read()
10381038
# make sure our logging is working
10391039
assert "Setting scan status to STARTING" in debug_log_content
10401040
# make sure we don't have any URL validation errors

bbot/test/test_step_2/module_tests/test_module_nuclei.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ async def setup_before_prep(self, module_test):
101101

102102
def check(self, module_test, events):
103103
assert any(e.type == "TECHNOLOGY" and "apache" in e.data["technology"].lower() for e in events)
104-
assert "Using Interactsh Server" not in read_gzipped_file(module_test.scan.home / "debug.log.gz")
104+
assert "Using Interactsh Server" not in open(module_test.scan.home / "debug.log").read()
105105

106106

107107
class TestNucleiBudget(TestNucleiManual):
@@ -140,7 +140,7 @@ async def setup_before_prep(self, module_test):
140140
module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args)
141141

142142
def check(self, module_test, events):
143-
assert "-retries 0" in read_gzipped_file(module_test.scan.home / "debug.log.gz")
143+
assert "-retries 0" in open(module_test.scan.home / "debug.log").read()
144144

145145

146146
class TestNucleiRetriesCustom(TestNucleiRetries):
@@ -150,7 +150,7 @@ class TestNucleiRetriesCustom(TestNucleiRetries):
150150
}
151151

152152
def check(self, module_test, events):
153-
assert "-retries 1" in read_gzipped_file(module_test.scan.home / "debug.log.gz")
153+
assert "-retries 1" in open(module_test.scan.home / "debug.log").read()
154154

155155

156156
class TestNucleiCustomHeaders(TestNucleiManual):

0 commit comments

Comments
 (0)