diff --git a/aikido_firewall/__init__.py b/aikido_firewall/__init__.py index 8eccc6e18..d9248ff62 100644 --- a/aikido_firewall/__init__.py +++ b/aikido_firewall/__init__.py @@ -4,6 +4,8 @@ from dotenv import load_dotenv +# Constants +PKG_VERSION = "0.0.1" # Import logger from aikido_firewall.helpers.logging import logger diff --git a/aikido_firewall/background_process/aikido_background_process.py b/aikido_firewall/background_process/aikido_background_process.py index 1e760af9e..a9a2a553b 100644 --- a/aikido_firewall/background_process/aikido_background_process.py +++ b/aikido_firewall/background_process/aikido_background_process.py @@ -6,11 +6,17 @@ import os import time import signal +import sched from threading import Thread from queue import Queue from aikido_firewall.helpers.logging import logger +from aikido_firewall.background_process.reporter import Reporter +from aikido_firewall.helpers.should_block import should_block +from aikido_firewall.helpers.token import get_token_from_env +from aikido_firewall.background_process.api.http_api import ReportingApiHTTP -REPORT_SEC_INTERVAL = 600 # 10 minutes + +EMPTY_QUEUE_INTERVAL = 5 # 5 seconds class AikidoBackgroundProcess: @@ -31,6 +37,7 @@ def __init__(self, address, key): pid = os.getpid() os.kill(pid, signal.SIGTERM) # Kill this subprocess self.queue = Queue() + self.reporter = None # Start reporting thread : Thread(target=self.reporting_thread).start() @@ -52,21 +59,40 @@ def __init__(self, address, key): conn.close() pid = os.getpid() os.kill(pid, signal.SIGTERM) # Kill this subprocess + elif data[0] == "READ_PROPERTY": # meant to get config props + if hasattr(self.reporter, data[1]): + conn.send(self.reporter.__dict__[data[1]]) def reporting_thread(self): """Reporting thread""" logger.debug("Started reporting thread") - while True: - self.send_to_reporter() - time.sleep(REPORT_SEC_INTERVAL) + event_scheduler = sched.scheduler( + time.time, time.sleep + ) # Create an event scheduler + self.send_to_reporter(event_scheduler) + + api = ReportingApiHTTP("http://app.local.aikido.io/") + # We need to pass along the scheduler so that the heartbeat also gets sent + self.reporter = Reporter( + block=should_block(), + api=api, + token=get_token_from_env(), + serverless=False, + event_scheduler=event_scheduler, + ) + + event_scheduler.run() - def send_to_reporter(self): + def send_to_reporter(self, event_scheduler): """ Reports the found data to an Aikido server """ - items_to_report = [] + # Add back to event scheduler in EMPTY_QUEUE_INTERVAL secs : + event_scheduler.enter( + EMPTY_QUEUE_INTERVAL, 1, self.send_to_reporter, (event_scheduler,) + ) + logger.debug("Checking queue") while not self.queue.empty(): - items_to_report.append(self.queue.get()) - logger.debug("Reporting to aikido server") - logger.critical("Items to report : %s", items_to_report) - # Currently not making API calls + attack = self.queue.get() + logger.debug("Reporting attack : %s", attack) + self.reporter.on_detected_attack(attack[0], attack[1]) diff --git a/aikido_firewall/background_process/api/__init__.py b/aikido_firewall/background_process/api/__init__.py new file mode 100644 index 000000000..3f8324b18 --- /dev/null +++ b/aikido_firewall/background_process/api/__init__.py @@ -0,0 +1,28 @@ +""" +init.py file for api/ folder. Includes abstract class ReportingApi +""" + +import json +from aikido_firewall.helpers.logging import logger + + +class ReportingApi: + """This is the super class for the reporting API's""" + + def to_api_response(self, res): + """Converts results into an Api response obj""" + status = res.status_code + if status == 429: + return {"success": False, "error": "rate_limited"} + elif status == 401: + return {"success": False, "error": "invalid_token"} + elif status == 200: + try: + return json.loads(res.text) + except Exception as e: + logger.debug(e) + logger.debug(res.text) + return {"success": False, "error": "unknown_error"} + + def report(self, token, event, timeout_in_sec): + """Report event to aikido server""" diff --git a/aikido_firewall/background_process/api/http_api.py b/aikido_firewall/background_process/api/http_api.py new file mode 100644 index 000000000..3a1a74fb2 --- /dev/null +++ b/aikido_firewall/background_process/api/http_api.py @@ -0,0 +1,34 @@ +""" +Exports the HTTP API class +""" + +import requests +from aikido_firewall.background_process.api import ReportingApi +from aikido_firewall.helpers.logging import logger + + +class ReportingApiHTTP(ReportingApi): + """HTTP Reporting API""" + + def __init__(self, reporting_url): + self.reporting_url = reporting_url + + def report(self, token, event, timeout_in_sec): + try: + res = requests.post( + self.reporting_url + "api/runtime/events", + json=event, + timeout=timeout_in_sec, + headers=get_headers(token), + ) + except requests.exceptions.ConnectionError: + return {"success": False, "error": "timeout"} + except Exception as e: + logger.error(e) + return {"success": False, "error": "unknown"} + return self.to_api_response(res) + + +def get_headers(token): + """Returns headers""" + return {"Content-Type": "application/json", "Authorization": str(token)} diff --git a/aikido_firewall/background_process/api/http_api_test.py b/aikido_firewall/background_process/api/http_api_test.py new file mode 100644 index 000000000..b7eeba980 --- /dev/null +++ b/aikido_firewall/background_process/api/http_api_test.py @@ -0,0 +1,72 @@ +import pytest +import requests +from unittest.mock import patch +from aikido_firewall.background_process.api.http_api import ( + ReportingApiHTTP, +) # Replace with the actual module name + +# Sample event data for testing +sample_event = {"event_type": "test_event", "data": {"key": "value"}} + + +def test_report_data_401_code(monkeypatch): + # Create an instance of ReportingApiHTTP + api = ReportingApiHTTP("http://mocked-url.com/") + + # Mock the requests.post method to return a successful response + class MockResponse: + def __init__(self, json_data, status_code): + self.json_data = json_data + self.status_code = status_code + + def json(self): + return self.json_data + + def mock_post(url, json, timeout, headers): + return MockResponse({"success": False}, 401) + + monkeypatch.setattr(requests, "post", mock_post) + + # Call the report method + response = api.report("mocked_token", sample_event, 5) + + # Assert the response + assert response == {"success": False, "error": "invalid_token"} + + +def test_report_connection_error(monkeypatch): + # Create an instance of ReportingApiHTTP + api = ReportingApiHTTP("http://mocked-url.com/") + + # Mock the requests.post method to raise a ConnectionError + monkeypatch.setattr( + requests, + "post", + lambda *args, **kwargs: (_ for _ in ()).throw( + requests.exceptions.ConnectionError + ), + ) + + # Call the report method + response = api.report("mocked_token", sample_event, 5) + + # Assert the response + assert response == {"success": False, "error": "timeout"} + + +def test_report_other_exception(monkeypatch): + # Create an instance of ReportingApiHTTP + api = ReportingApiHTTP("http://mocked-url.com/") + + # Mock the requests.post method to raise a generic exception + def mock_post(url, json, timeout, headers): + raise Exception("Some error occurred") + + monkeypatch.setattr(requests, "post", mock_post) + + # Call the report method + response = api.report("mocked_token", sample_event, 5) + + # Assert that the response is None (or however you want to handle it) + assert response["error"] is "unknown" + assert not response["success"] diff --git a/aikido_firewall/background_process/api/init_test.py b/aikido_firewall/background_process/api/init_test.py new file mode 100644 index 000000000..18b5f39ea --- /dev/null +++ b/aikido_firewall/background_process/api/init_test.py @@ -0,0 +1,44 @@ +import pytest +from aikido_firewall.background_process.api import ReportingApi + +# Test ReportingApi Class : +from requests.models import Response + + +@pytest.fixture +def reporting_api(): + return ReportingApi() + + +def test_to_api_response_rate_limited(reporting_api): + res = Response() + res.status_code = 429 + assert reporting_api.to_api_response(res) == { + "success": False, + "error": "rate_limited", + } + + +def test_to_api_response_invalid_token(reporting_api): + res = Response() + res.status_code = 401 + assert reporting_api.to_api_response(res) == { + "success": False, + "error": "invalid_token", + } + + +def test_to_api_response_unknown_error(reporting_api): + res = Response() + res.status_code = 500 # Simulating an unknown error status code + assert reporting_api.to_api_response(res) == { + "success": False, + "error": "unknown_error", + } + + +def test_to_api_response_valid_json(reporting_api): + res = Response() + res.status_code = 200 + res._content = b'{"key": "value"}' # Simulating valid JSON response + assert reporting_api.to_api_response(res) == {"key": "value"} diff --git a/aikido_firewall/background_process/comms.py b/aikido_firewall/background_process/comms.py index ec951c6df..2c08d4be4 100644 --- a/aikido_firewall/background_process/comms.py +++ b/aikido_firewall/background_process/comms.py @@ -81,3 +81,15 @@ def target(address, key, data_array): t.start() # This joins the thread for 3 seconds, afterwards the thread is forced to close (daemon=True) t.join(timeout=3) + + def poll_config(self, prop): + """ + This will poll the config from the Background Process + """ + conn = con.Client(self.address, authkey=self.key) + conn.send(("READ_PROPERTY", prop)) + prop_value = conn.recv() + conn.send(("CLOSE", {})) + conn.close() + logger.debug("Received property %s as %s", prop, prop_value) + return prop_value diff --git a/aikido_firewall/background_process/heartbeats.py b/aikido_firewall/background_process/heartbeats.py new file mode 100644 index 000000000..310022c18 --- /dev/null +++ b/aikido_firewall/background_process/heartbeats.py @@ -0,0 +1,37 @@ +""" +The code to send out a heartbeat is in here +""" + +from aikido_firewall.helpers.logging import logger + + +def send_heartbeats_every_x_secs(reporter, interval_in_secs, event_scheduler): + """ + Start sending out heartbeats every x seconds + """ + if reporter.serverless: + logger.debug("Running in serverless environment, not starting heartbeats") + return + if not reporter.token: + logger.debug("No token provided, not starting heartbeats") + return + + logger.debug("Starting heartbeats") + + event_scheduler.enter( + 0, 1, send_heartbeat_wrapper, (reporter, interval_in_secs, event_scheduler) + ) + + +def send_heartbeat_wrapper(rep, interval_in_secs, event_scheduler): + """ + Wrapper function for send_heartbeat so we get an interval + """ + event_scheduler.enter( + interval_in_secs, + 1, + send_heartbeat_wrapper, + (rep, interval_in_secs, event_scheduler), + ) + logger.debug("Heartbeat...") + rep.send_heartbeat() diff --git a/aikido_firewall/background_process/heartbeats_test.py b/aikido_firewall/background_process/heartbeats_test.py new file mode 100644 index 000000000..34eca8f4c --- /dev/null +++ b/aikido_firewall/background_process/heartbeats_test.py @@ -0,0 +1,62 @@ +import pytest +from unittest.mock import Mock, patch +from aikido_firewall.background_process.heartbeats import ( + send_heartbeats_every_x_secs, + send_heartbeat_wrapper, +) + + +def test_send_heartbeats_serverless(): + reporter = Mock() + reporter.serverless = True + reporter.token = "mocked_token" + event_scheduler = Mock() + + with patch("aikido_firewall.helpers.logging.logger.debug") as mock_debug: + send_heartbeats_every_x_secs(reporter, 5, event_scheduler) + + mock_debug.assert_called_once_with( + "Running in serverless environment, not starting heartbeats" + ) + event_scheduler.enter.assert_not_called() + + +def test_send_heartbeats_no_token(): + reporter = Mock() + reporter.serverless = False + reporter.token = None + event_scheduler = Mock() + + with patch("aikido_firewall.helpers.logging.logger.debug") as mock_debug: + send_heartbeats_every_x_secs(reporter, 5, event_scheduler) + + mock_debug.assert_called_once_with("No token provided, not starting heartbeats") + event_scheduler.enter.assert_not_called() + + +def test_send_heartbeats_success(): + reporter = Mock() + reporter.serverless = False + reporter.token = "mocked_token" + event_scheduler = Mock() + + with patch("aikido_firewall.helpers.logging.logger.debug") as mock_debug: + send_heartbeats_every_x_secs(reporter, 5, event_scheduler) + + mock_debug.assert_called_with("Starting heartbeats") + event_scheduler.enter.assert_called_once_with( + 0, 1, send_heartbeat_wrapper, (reporter, 5, event_scheduler) + ) + + +def test_send_heartbeat_wrapper(): + reporter = Mock() + reporter.send_heartbeat = Mock() + event_scheduler = Mock() + + send_heartbeat_wrapper(reporter, 5, event_scheduler) + + reporter.send_heartbeat.assert_called_once() + event_scheduler.enter.assert_called_once_with( + 5, 1, send_heartbeat_wrapper, (reporter, 5, event_scheduler) + ) diff --git a/aikido_firewall/background_process/reporter.py b/aikido_firewall/background_process/reporter.py new file mode 100644 index 000000000..5f2abed51 --- /dev/null +++ b/aikido_firewall/background_process/reporter.py @@ -0,0 +1,154 @@ +""" This file simply exports the Reporter class""" + +import time +import socket +import platform +import json +from copy import deepcopy +from aikido_firewall.helpers.logging import logger +from aikido_firewall.helpers.limit_length_metadata import limit_length_metadata +from aikido_firewall.helpers.token import Token +from aikido_firewall.helpers.get_machine_ip import get_ip +from aikido_firewall.helpers.get_ua_from_context import get_ua_from_context +from aikido_firewall.helpers.get_current_unixtime_ms import get_unixtime_ms +from aikido_firewall import PKG_VERSION +from aikido_firewall.background_process.heartbeats import send_heartbeats_every_x_secs + + +class Reporter: + """Reporter class""" + + timeout_in_sec = 5 # Timeout of API calls to Aikido Server + heartbeat_secs = 600 # Heartbeat every 10 minutes + + def __init__(self, block, api, token, serverless, event_scheduler): + self.block = block + self.api = api + self.token = token # Should be instance of the Token class! + + if isinstance(serverless, str) and len(serverless) == 0: + raise ValueError("Serverless cannot be an empty string") + self.serverless = serverless + + self.on_start() + send_heartbeats_every_x_secs(self, self.heartbeat_secs, event_scheduler) + + def on_detected_attack(self, attack, context): + """ + This will send something to the API when an attack is detected + """ + if not self.token: + return + # Modify attack so we can send it out : + try: + attack["user"] = None + attack["payload"] = json.dumps(attack["payload"])[:4096] + attack["metadata"] = limit_length_metadata(attack["metadata"], 4096) + attack["blocked"] = self.block + + payload = { + "type": "detected_attack", + "time": get_unixtime_ms(), + "agent": self.get_reporter_info(), + "attack": attack, + "request": { + "method": context.method, + "url": context.url, + "ipAddress": context.remote_address, + "userAgent": get_ua_from_context(context), + "body": context.body, + "headers": context.headers, + "source": context.source, + "route": context.route, + }, + } + logger.debug(json.dumps(payload)) + result = self.api.report( + self.token, + payload, + self.timeout_in_sec, + ) + logger.debug("Result : %s", result) + except Exception as e: + logger.debug(e) + logger.info("Failed to report attack") + + def send_heartbeat(self): + """ + This will send a heartbeat to the server + """ + if not self.token: + return + logger.debug("Aikido Reporter : Sending out heartbeat") + res = self.api.report( + self.token, + { + "type": "heartbeat", + "time": get_unixtime_ms(), + "agent": self.get_reporter_info(), + "stats": { + "sinks": {}, + "startedAt": 0, + "endedAt": 0, + "requests": { + "total": 0, + "aborted": 0, + "attacksDetected": { + "total": 0, + "blocked": 0, + }, + }, + }, + "hostnames": [], + "routes": [], + "users": [], + }, + self.timeout_in_sec, + ) + self.update_service_config(res) + + def on_start(self): + """ + This will send out an Event signalling the start to the server + """ + if not self.token: + return + res = self.api.report( + self.token, + { + "type": "started", + "time": get_unixtime_ms(), + "agent": self.get_reporter_info(), + }, + self.timeout_in_sec, + ) + self.update_service_config(res) + + def get_reporter_info(self): + """ + This returns info about the reporter + """ + return { + "dryMode": not self.block, + "hostname": socket.gethostname(), + "version": PKG_VERSION, + "library": "firewall_python", + "ipAddress": get_ip(), + "packages": [], + "serverless": bool(self.serverless), + "stack": [], + "os": {"name": platform.system(), "version": platform.release()}, + "preventedPrototypePollution": False, # Get this out of the API maybe? + "nodeEnv": "", + } + + def update_service_config(self, res): + """ + Update configuration based on the server's response + """ + if res["success"] is False: + logger.debug(res) + return + if "block" in res.keys() and res["block"] != self.block: + logger.debug("Updating blocking, setting blocking to : %s", res["block"]) + self.block = bool(res["block"]) diff --git a/aikido_firewall/context/__init__.py b/aikido_firewall/context/__init__.py index c0d9dde2c..ce6862fbe 100644 --- a/aikido_firewall/context/__init__.py +++ b/aikido_firewall/context/__init__.py @@ -5,7 +5,8 @@ import threading from urllib.parse import parse_qs from http.cookies import SimpleCookie - +from aikido_firewall.helpers.build_route_from_url import build_route_from_url +from aikido_firewall.helpers.get_subdomains_from_url import get_subdomains_from_url SUPPORTED_SOURCES = ["django", "flask", "django-gunicorn"] UINPUT_SOURCES = ["body", "cookies", "query", "headers"] @@ -51,7 +52,11 @@ class Context: for vulnerability detection """ - def __init__(self, req, source): + def __init__(self, context_obj=None, req=None, source=None): + if context_obj: + self.__dict__.update(context_obj) + return + if not source in SUPPORTED_SOURCES: raise ValueError(f"Source {source} not supported") self.source = source @@ -63,6 +68,8 @@ def __init__(self, req, source): self.set_django_attrs(req) elif source == "django-gunicorn": self.set_django_gunicorn_attrs(req) + self.route = build_route_from_url(self.url) + self.subdomains = get_subdomains_from_url(self.url) def set_django_gunicorn_attrs(self, req): """Set properties that are specific to django-gunicorn""" @@ -93,14 +100,20 @@ def __reduce__(self): return ( self.__class__, ( - self.method, - self.remote_address, - self.url, - self.body, - self.headers, - self.query, - self.cookies, - self.source, + { + "method": self.method, + "remote_address": self.remote_address, + "url": self.url, + "body": self.body, + "headers": self.headers, + "query": self.query, + "cookies": self.cookies, + "source": self.source, + "route": self.route, + "subdomains": self.subdomains, + }, + None, + None, ), ) diff --git a/aikido_firewall/context/init_test.py b/aikido_firewall/context/init_test.py index f73c06e33..829e3e237 100644 --- a/aikido_firewall/context/init_test.py +++ b/aikido_firewall/context/init_test.py @@ -1,4 +1,5 @@ import pytest +import pickle from aikido_firewall.context import Context, get_current_context @@ -10,7 +11,7 @@ def test_get_current_context_no_context(): def test_set_as_current_context(mocker): # Test set_as_current_context() method sample_request = mocker.MagicMock() - context = Context(sample_request, "flask") + context = Context(req=sample_request, source="flask") context.set_as_current_context() assert get_current_context() == context @@ -18,7 +19,7 @@ def test_set_as_current_context(mocker): def test_get_current_context_with_context(mocker): # Test get_current_context() when a context is set sample_request = mocker.MagicMock() - context = Context(sample_request, "flask") + context = Context(req=sample_request, source="flask") context.set_as_current_context() assert get_current_context() == context @@ -33,7 +34,7 @@ def test_context_init_flask(mocker): req.args.to_dict.return_value = {"key": "value"} req.cookies.to_dict.return_value = {"cookie": "value"} - context = Context(req, "flask") + context = Context(req=req, source="flask") assert context.source == "flask" assert context.method == "GET" assert context.remote_address == "127.0.0.1" @@ -54,7 +55,7 @@ def test_context_init_django(mocker): req.GET = {"key": "value"} req.COOKIES = {"cookie": "value"} - context = Context(req, "django") + context = Context(req=req, source="django") assert context.source == "django" assert context.method == "POST" assert context.remote_address == "127.0.0.1" @@ -63,3 +64,26 @@ def test_context_init_django(mocker): assert context.headers == {"Content-Type": "application/json"} assert context.query == {"key": "value"} assert context.cookies == {"cookie": "value"} + + +def test_context_is_picklable(mocker): + req = mocker.MagicMock() + req.method = "POST" + req.META.get.return_value = "127.0.0.1" + req.build_absolute_uri.return_value = "http://example.com" + req.POST = {"key": "value"} + req.headers = {"Content-Type": "application/json"} + req.GET = {"key": "value"} + req.COOKIES = {"cookie": "value"} + context = Context(req=req, source="django") + + pickled_obj = pickle.dumps(context) + unpickled_obj = pickle.loads(pickled_obj) + assert unpickled_obj.source == "django" + assert unpickled_obj.method == "POST" + assert unpickled_obj.remote_address == "127.0.0.1" + assert unpickled_obj.url == "http://example.com" + assert unpickled_obj.body == {"key": "value"} + assert unpickled_obj.headers == {"Content-Type": "application/json"} + assert unpickled_obj.query == {"key": "value"} + assert unpickled_obj.cookies == {"cookie": "value"} diff --git a/aikido_firewall/helpers/get_current_unixtime_ms.py b/aikido_firewall/helpers/get_current_unixtime_ms.py new file mode 100644 index 000000000..a2cb48c11 --- /dev/null +++ b/aikido_firewall/helpers/get_current_unixtime_ms.py @@ -0,0 +1,10 @@ +""" +Helper function file, see function docstring +""" + +import time + + +def get_unixtime_ms(): + """Get the current unix time but in ms""" + return int(time.time() * 1000) diff --git a/aikido_firewall/helpers/get_current_unixtime_ms_test.py b/aikido_firewall/helpers/get_current_unixtime_ms_test.py new file mode 100644 index 000000000..6aa08678c --- /dev/null +++ b/aikido_firewall/helpers/get_current_unixtime_ms_test.py @@ -0,0 +1,23 @@ +import pytest +from aikido_firewall.helpers.get_current_unixtime_ms import get_unixtime_ms +import time + + +def test_get_unixtime_ms(monkeypatch): + # Mock time.time to return a specific timestamp + monkeypatch.setattr( + time, "time", lambda: 1633072800.123 + ) # Example timestamp in seconds + + # Calculate the expected result in milliseconds + expected_result = int(1633072800.123 * 1000) + + assert get_unixtime_ms() == expected_result + + +def test_get_unixtime_ms_zero(monkeypatch): + # Mock time.time to return zero + monkeypatch.setattr(time, "time", lambda: 0.0) + + # The expected result should be 0 milliseconds + assert get_unixtime_ms() == 0 diff --git a/aikido_firewall/helpers/get_machine_ip.py b/aikido_firewall/helpers/get_machine_ip.py new file mode 100644 index 000000000..d36eef729 --- /dev/null +++ b/aikido_firewall/helpers/get_machine_ip.py @@ -0,0 +1,13 @@ +""" +Helper function file, see function docstring +""" + +import socket + + +def get_ip(): + """Tries to fetch the IP and returns x.x.x.x on failure""" + try: + return socket.gethostbyname(socket.gethostname()) + except Exception: + return "x.x.x.x" diff --git a/aikido_firewall/helpers/get_machine_ip_test.py b/aikido_firewall/helpers/get_machine_ip_test.py new file mode 100644 index 000000000..2fe2b99ec --- /dev/null +++ b/aikido_firewall/helpers/get_machine_ip_test.py @@ -0,0 +1,25 @@ +import pytest +import socket +from aikido_firewall.helpers.get_machine_ip import get_ip + + +def test_get_ip_success(monkeypatch): + # Mock the socket.gethostname to return a specific hostname + monkeypatch.setattr(socket, "gethostname", lambda: "mocked_hostname") + # Mock the socket.gethostbyname to return a specific IP address + monkeypatch.setattr(socket, "gethostbyname", lambda hostname: "192.168.1.1") + + assert get_ip() == "192.168.1.1" + + +def test_get_ip_failure(monkeypatch): + # Mock the socket.gethostname to return a specific hostname + monkeypatch.setattr(socket, "gethostname", lambda: "mocked_hostname") + # Mock the socket.gethostbyname to raise an exception + monkeypatch.setattr( + socket, + "gethostbyname", + lambda hostname: (_ for _ in ()).throw(Exception("Mocked exception")), + ) + + assert get_ip() == "x.x.x.x" diff --git a/aikido_firewall/helpers/get_subdomains_from_url.py b/aikido_firewall/helpers/get_subdomains_from_url.py new file mode 100644 index 000000000..c43e02765 --- /dev/null +++ b/aikido_firewall/helpers/get_subdomains_from_url.py @@ -0,0 +1,18 @@ +""" +Helper function file, see function docstring +""" + +from urllib.parse import urlparse + + +def get_subdomains_from_url(url): + """ + Returns a list with subdomains from url + """ + if not isinstance(url, str): + return [] + host = urlparse(url).hostname + if not host: + return [] + parts = host.split(".") + return parts[:-2] diff --git a/aikido_firewall/helpers/get_subdomains_from_url_test.py b/aikido_firewall/helpers/get_subdomains_from_url_test.py new file mode 100644 index 000000000..06d7bc681 --- /dev/null +++ b/aikido_firewall/helpers/get_subdomains_from_url_test.py @@ -0,0 +1,29 @@ +import pytest +from aikido_firewall.helpers.get_subdomains_from_url import get_subdomains_from_url + + +def test_get_subdomains_from_url(): + # Test cases with expected results + test_cases = [ + # Test with a standard URL + ("http://tobi.ferrets.example.com", ["tobi", "ferrets"]), + # Test with a URL that has no subdomains + ("http://example.com", []), + # Test with a URL that has multiple subdomains + ("http://a.b.c.example.com", ["a", "b", "c"]), + # Test with a URL that has a port + ("http://tobi.ferrets.example.com:8080", ["tobi", "ferrets"]), + # Test with a URL that has only the main domain + ("http://localhost", []), + # Test with an invalid URL + ("http://.com", []), + # Test with an empty string + ("", []), + # Test with a URL with subdomains and a path + ("http://tobi.ferrets.example.com/path/to/resource", ["tobi", "ferrets"]), + ({}, []), + (None, []), + ] + + for url, expected in test_cases: + assert get_subdomains_from_url(url) == expected diff --git a/aikido_firewall/helpers/get_ua_from_context.py b/aikido_firewall/helpers/get_ua_from_context.py new file mode 100644 index 000000000..5c182f172 --- /dev/null +++ b/aikido_firewall/helpers/get_ua_from_context.py @@ -0,0 +1,6 @@ +def get_ua_from_context(context): + """Tries to retrieve the user agent from context""" + for k, v in context.headers.items(): + if k.lower() == "user-agent": + return v + return None diff --git a/aikido_firewall/helpers/get_ua_from_context_test.py b/aikido_firewall/helpers/get_ua_from_context_test.py new file mode 100644 index 000000000..9a99d4239 --- /dev/null +++ b/aikido_firewall/helpers/get_ua_from_context_test.py @@ -0,0 +1,54 @@ +import pytest +from aikido_firewall.helpers.get_ua_from_context import get_ua_from_context + + +class Context: + def __init__(self, headers): + self.headers = headers + + +def test_user_agent_present(): + context = Context( + { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3" + } + ) + assert ( + get_ua_from_context(context) + == "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3" + ) + + +def test_user_agent_present_case_insensitive(): + context = Context( + { + "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3" + } + ) + assert ( + get_ua_from_context(context) + == "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3" + ) + + +def test_user_agent_not_present(): + context = Context({"Accept": "text/html", "Content-Type": "application/json"}) + assert get_ua_from_context(context) is None + + +def test_user_agent_empty_value(): + context = Context({"User-Agent": ""}) + assert get_ua_from_context(context) == "" + + +def test_user_agent_with_other_headers(): + context = Context( + { + "Accept": "text/html", + "User-Agent": "Mozilla/5.0 (Linux; Android 10; Pixel 3 XL) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Mobile Safari/537.36", + } + ) + assert ( + get_ua_from_context(context) + == "Mozilla/5.0 (Linux; Android 10; Pixel 3 XL) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Mobile Safari/537.36" + ) diff --git a/aikido_firewall/helpers/limit_length_metadata.py b/aikido_firewall/helpers/limit_length_metadata.py new file mode 100644 index 000000000..4df689544 --- /dev/null +++ b/aikido_firewall/helpers/limit_length_metadata.py @@ -0,0 +1,15 @@ +""" +Helper function file, see function docstring +""" + + +def limit_length_metadata(metadata, max_length): + """ + Limits the length of the metadata obj so it can be sent out + """ + for key in metadata: + print(len(metadata[key])) + if len(metadata[key]) > max_length: + metadata[key] = metadata[key][:max_length] + + return metadata diff --git a/aikido_firewall/helpers/limit_length_metadata_test.py b/aikido_firewall/helpers/limit_length_metadata_test.py new file mode 100644 index 000000000..524c71b68 --- /dev/null +++ b/aikido_firewall/helpers/limit_length_metadata_test.py @@ -0,0 +1,21 @@ +import pytest +from aikido_firewall.helpers.limit_length_metadata import limit_length_metadata + + +def test_limit_length_metadata(): + # Test case 1: Check if values are truncated correctly + metadata = {"key1": "value1", "key2": "value2longvalue", "key3": "value3"} + max_length = 6 + expected_result = {"key1": "value1", "key2": "value2", "key3": "value3"} + assert limit_length_metadata(metadata, max_length) == expected_result + + # Test case 2: Check if values are not truncated if within max length + metadata = {"key1": "value1", "key2": "value2", "key3": "value3"} + max_length = 10 + expected_result = {"key1": "value1", "key2": "value2", "key3": "value3"} + assert limit_length_metadata(metadata, max_length) == expected_result + + metadata = {} + max_length = 5 + expected_result = {} + assert limit_length_metadata(metadata, max_length) == expected_result diff --git a/aikido_firewall/helpers/should_block.py b/aikido_firewall/helpers/should_block.py new file mode 100644 index 000000000..e044283f1 --- /dev/null +++ b/aikido_firewall/helpers/should_block.py @@ -0,0 +1,16 @@ +""" +Helper function file, see function docstring +""" + +import os + + +def should_block(): + """ + Checks the environment variable "AIKIDO_BLOCKING" + """ + # Set log level + aikido_blocking_env = os.getenv("AIKIDO_BLOCKING") + if aikido_blocking_env is not None: + return aikido_blocking_env.lower() in ["true", "1"] + return False diff --git a/aikido_firewall/helpers/token.py b/aikido_firewall/helpers/token.py new file mode 100644 index 000000000..ab39c1c14 --- /dev/null +++ b/aikido_firewall/helpers/token.py @@ -0,0 +1,29 @@ +""" +Helper module for token +""" + +import os + + +class Token: + """Class that encapsulates the token""" + + def __init__(self, token): + if not isinstance(token, str): + raise ValueError("Token should be an instance of string") + if len(token) == 0: + raise ValueError("Token cannot be an empty string") + self.token = token + + def __str__(self): + return self.token + + +def get_token_from_env(): + """ + Fetches the token from the env variable "AIKIDO_TOKEN" + """ + aikido_token_env = os.getenv("AIKIDO_TOKEN") + if aikido_token_env is not None: + return Token(aikido_token_env) + return None diff --git a/aikido_firewall/helpers/token_test.py b/aikido_firewall/helpers/token_test.py new file mode 100644 index 000000000..f710f8f26 --- /dev/null +++ b/aikido_firewall/helpers/token_test.py @@ -0,0 +1,25 @@ +import pytest +from aikido_firewall.helpers.token import Token + + +# Test Token Class : +def test_token_valid_string(): + token_str = "my_token" + token = Token(token_str) + assert str(token) == token_str + + +def test_token_empty_string(): + with pytest.raises(ValueError): + Token("") + + +def test_token_invalid_type(): + with pytest.raises(ValueError): + Token(123) + + +def test_token_instance(): + token_str = "my_token" + token = Token(token_str) + assert isinstance(token, Token) diff --git a/aikido_firewall/middleware/django.py b/aikido_firewall/middleware/django.py index 276387b36..2fcb2ce7c 100644 --- a/aikido_firewall/middleware/django.py +++ b/aikido_firewall/middleware/django.py @@ -17,7 +17,7 @@ def __init__(self, get_response): def __call__(self, request, *args, **kwargs): logger.debug("Aikido middleware for `django` was called : __call__") - context = Context(request, "django") + context = Context(req=request, source="django") context.set_as_current_context() return self.get_response(request) diff --git a/aikido_firewall/sinks/mysqlclient.py b/aikido_firewall/sinks/mysqlclient.py index 801df40fe..a8d999c30 100644 --- a/aikido_firewall/sinks/mysqlclient.py +++ b/aikido_firewall/sinks/mysqlclient.py @@ -7,11 +7,12 @@ from importlib.metadata import version import importhook from aikido_firewall.context import get_current_context -from aikido_firewall.vulnerabilities.sql_injection.check_context_for_sql_injection import ( - check_context_for_sql_injection, +from aikido_firewall.vulnerabilities.sql_injection.context_contains_sql_injection import ( + context_contains_sql_injection, ) from aikido_firewall.vulnerabilities.sql_injection.dialects import MySQL from aikido_firewall.helpers.logging import logger +from aikido_firewall.background_process import get_comms @importhook.on_import("MySQLdb.connections") @@ -30,13 +31,17 @@ def aikido_new_query(_self, sql): logger.debug("Wrapper - `mysqlclient` version : %s", version("mysqlclient")) context = get_current_context() - result = check_context_for_sql_injection( + contains_injection = context_contains_sql_injection( sql.decode("utf-8"), "MySQLdb.connections.query", context, MySQL() ) - logger.debug("sql_injection results : %s", json.dumps(result)) - if result: - raise Exception("SQL Injection [aikido_firewall]") + logger.debug("sql_injection results : %s", json.dumps(contains_injection)) + if contains_injection: + get_comms().send_data_to_bg_process("ATTACK", (contains_injection, context)) + should_block = get_comms().poll_config("block") + if should_block: + raise Exception("SQL Injection [aikido_firewall]") + return prev_query_function(_self, sql) # pylint: disable=no-member diff --git a/aikido_firewall/sinks/pymysql.py b/aikido_firewall/sinks/pymysql.py index ef41e28f6..88f5e1353 100644 --- a/aikido_firewall/sinks/pymysql.py +++ b/aikido_firewall/sinks/pymysql.py @@ -8,8 +8,8 @@ from importlib.metadata import version import importhook from aikido_firewall.context import get_current_context -from aikido_firewall.vulnerabilities.sql_injection.check_context_for_sql_injection import ( - check_context_for_sql_injection, +from aikido_firewall.vulnerabilities.sql_injection.context_contains_sql_injection import ( + context_contains_sql_injection, ) from aikido_firewall.vulnerabilities.sql_injection.dialects import MySQL from aikido_firewall.background_process import get_comms @@ -33,14 +33,17 @@ def aikido_new_query(_self, sql, unbuffered=False): logger.debug("Wrapper - `pymysql` version : %s", version("pymysql")) context = get_current_context() - result = check_context_for_sql_injection( + contains_injection = context_contains_sql_injection( sql, "pymysql.connections.query", context, MySQL() ) - logger.info("sql_injection results : %s", json.dumps(result)) - if result: - get_comms().send_data_to_bg_process("ATTACK", result) - raise Exception("SQL Injection [aikido_firewall]") + logger.info("sql_injection results : %s", json.dumps(contains_injection)) + if contains_injection: + get_comms().send_data_to_bg_process("ATTACK", (contains_injection, context)) + should_block = get_comms().poll_config("block") + if should_block: + raise Exception("SQL Injection [aikido_firewall]") + return prev_query_function(_self, sql, unbuffered=False) # pylint: disable=no-member diff --git a/aikido_firewall/sources/flask.py b/aikido_firewall/sources/flask.py index d5d92cced..bc0de0eb0 100644 --- a/aikido_firewall/sources/flask.py +++ b/aikido_firewall/sources/flask.py @@ -21,7 +21,7 @@ def __init__(self): def dispatch(self, request, call_next): """Dispatch function""" logger.debug("Aikido middleware for `flask` was called") - context = Context(request, "flask") + context = Context(req=request, source="flask") context.set_as_current_context() response = call_next(request) diff --git a/aikido_firewall/vulnerabilities/sql_injection/check_context_for_sql_injection.py b/aikido_firewall/vulnerabilities/sql_injection/context_contains_sql_injection.py similarity index 94% rename from aikido_firewall/vulnerabilities/sql_injection/check_context_for_sql_injection.py rename to aikido_firewall/vulnerabilities/sql_injection/context_contains_sql_injection.py index 77c205ae2..41d761156 100644 --- a/aikido_firewall/vulnerabilities/sql_injection/check_context_for_sql_injection.py +++ b/aikido_firewall/vulnerabilities/sql_injection/context_contains_sql_injection.py @@ -11,7 +11,7 @@ from aikido_firewall.context import UINPUT_SOURCES as SOURCES -def check_context_for_sql_injection(sql, operation, context, dialect): +def context_contains_sql_injection(sql, operation, context, dialect): """ This will check the context of the request for SQL Injections """ diff --git a/poetry.lock b/poetry.lock index 652a43162..992b7b5c5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -66,6 +66,116 @@ files = [ {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, ] +[[package]] +name = "certifi" +version = "2024.7.4" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + [[package]] name = "click" version = "8.1.7" @@ -207,6 +317,17 @@ files = [ flask = "*" werkzeug = "*" +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + [[package]] name = "importhook" version = "1.0.9" @@ -596,6 +717,27 @@ files = [ {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"}, ] +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + [[package]] name = "tomlkit" version = "0.13.0" @@ -607,6 +749,23 @@ files = [ {file = "tomlkit-0.13.0.tar.gz", hash = "sha256:08ad192699734149f5b97b45f1f18dad7eb1b6d16bc72ad0c2335772650d7b72"}, ] +[[package]] +name = "urllib3" +version = "2.2.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + [[package]] name = "werkzeug" version = "3.0.3" @@ -627,4 +786,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "834b3f397a99c7e910e20f7f9904408c3a27141d136decaf4fb7233253984306" +content-hash = "b56496afffc10819de821835a4369beebda982e19e04f5519c669e36c81f5968" diff --git a/pyproject.toml b/pyproject.toml index 7a7caa139..307bbfceb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,7 @@ pytest-mock = "^3.14.0" werkzeug = "^3.0.3" flask-http-middleware = "^0.4.2" regex = "^2024.5.15" +requests = "^2.32.3" [tool.poetry.group.dev.dependencies] black = "^24.4.2" diff --git a/sample-apps/flask-mysql/docker-compose.yml b/sample-apps/flask-mysql/docker-compose.yml index 1f2478edd..86f34326c 100644 --- a/sample-apps/flask-mysql/docker-compose.yml +++ b/sample-apps/flask-mysql/docker-compose.yml @@ -25,7 +25,7 @@ services: context: ./../../ dockerfile: ./sample-apps/flask-mysql/Dockerfile container_name: flask_mysql_backend - command: sh -c "flask --app app run --debug --host=0.0.0.0" + command: sh -c "flask --app app run --debug --host=0.0.0.0 --no-reload" restart: always volumes: - .:/app @@ -33,6 +33,8 @@ services: - "8080:5000" depends_on: - db + extra_hosts: + - "app.local.aikido.io:host-gateway" volumes: - db_data: \ No newline at end of file + db_data: