Skip to content

Fix Python formatting in tests #2866

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Jun 17, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 39 additions & 12 deletions tests/AutoscalingTests/common.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
import unittest
import random
import time
import subprocess
import signal
import concurrent.futures
import csv
import os
import random
import signal
import subprocess
import time
import unittest

from csv2md.table import Table
from selenium import webdriver
from selenium.webdriver.firefox.options import Options as FirefoxOptions
from selenium.webdriver.edge.options import Options as EdgeOptions
from selenium.webdriver.chrome.options import Options as ChromeOptions
from selenium.webdriver.edge.options import Options as EdgeOptions
from selenium.webdriver.firefox.options import Options as FirefoxOptions
from selenium.webdriver.remote.client_config import ClientConfig
from csv2md.table import Table

BROWSER = {
"chrome": ChromeOptions(),
Expand All @@ -27,16 +28,32 @@
timeout=3600,
)

FIELD_NAMES = ["Iteration", "New request sessions", "Sessions created time", "Sessions failed to create", "New pods scaled up", "Total running sessions", "Total running pods", "Max sessions per pod", "Gaps", "Sessions closed"]
FIELD_NAMES = [
"Iteration",
"New request sessions",
"Sessions created time",
"Sessions failed to create",
"New pods scaled up",
"Total running sessions",
"Total running pods",
"Max sessions per pod",
"Gaps",
"Sessions closed",
]


def get_pod_count():
result = subprocess.run(["kubectl", "get", "pods", "-A", "--no-headers"], capture_output=True, text=True)
return len([line for line in result.stdout.splitlines() if "selenium-node-" in line and "Running" in line])


def create_session(browser_name):
options = BROWSER[browser_name]
options.set_capability("platformName", "Linux")
return webdriver.Remote(command_executor=CLIENT_CONFIG.remote_server_addr, options=options, client_config=CLIENT_CONFIG)
return webdriver.Remote(
command_executor=CLIENT_CONFIG.remote_server_addr, options=options, client_config=CLIENT_CONFIG
)


def wait_for_count_matches(sessions, timeout=10, interval=5):
elapsed = 0
Expand All @@ -48,20 +65,26 @@ def wait_for_count_matches(sessions, timeout=10, interval=5):
time.sleep(interval)
elapsed += interval
if pod_count != len(sessions):
print(f"WARN: Mismatch between pod count and session count after {timeout} seconds. Gaps: {pod_count - len(sessions)}")
print(
f"WARN: Mismatch between pod count and session count after {timeout} seconds. Gaps: {pod_count - len(sessions)}"
)
else:
print(f"PASS: Pod count matches session count after {elapsed} seconds.")


def close_all_sessions(sessions):
for session in sessions:
session.quit()
sessions.clear()
return sessions


def create_sessions_in_parallel(new_request_sessions):
failed_jobs = 0
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = [executor.submit(create_session, random.choice(list(BROWSER.keys()))) for _ in range(new_request_sessions)]
futures = [
executor.submit(create_session, random.choice(list(BROWSER.keys()))) for _ in range(new_request_sessions)
]
sessions = []
for future in concurrent.futures.as_completed(futures):
try:
Expand All @@ -72,6 +95,7 @@ def create_sessions_in_parallel(new_request_sessions):
print(f"Total failed jobs: {failed_jobs}")
return sessions


def randomly_quit_sessions(sessions, sublist_size):
if sessions:
sessions_to_quit = random.sample(sessions, min(sublist_size, len(sessions)))
Expand All @@ -82,15 +106,18 @@ def randomly_quit_sessions(sessions, sublist_size):
return len(sessions_to_quit)
return 0


def get_result_file_name():
return f"tests/autoscaling_results"


def export_results_to_csv(output_file, field_names, results):
with open(output_file, mode="w") as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=field_names)
writer.writeheader()
writer.writerows(results)


def export_results_csv_to_md(csv_file, md_file):
with open(csv_file) as f:
table = Table.parse_csv(f)
Expand Down
38 changes: 23 additions & 15 deletions tests/AutoscalingTests/test_scale_chaos.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,28 @@
import unittest
import csv
import random
import time
import signal
import csv
import time
import unittest

from csv2md.table import Table

from .common import *

SESSIONS = []
RESULTS = []
TEST_NODE_MAX_SESSIONS = int(os.getenv("TEST_NODE_MAX_SESSIONS", 1))
TEST_AUTOSCALING_ITERATIONS = int(os.getenv("TEST_AUTOSCALING_ITERATIONS", 20))


def signal_handler(signum, frame):
print("Signal received, quitting all sessions...")
close_all_sessions(SESSIONS)


signal.signal(signal.SIGTERM, signal_handler)
signal.signal(signal.SIGINT, signal_handler)


class SeleniumAutoscalingTests(unittest.TestCase):
def test_run_tests(self):
try:
Expand All @@ -38,18 +43,20 @@ def test_run_tests(self):
print(f"INFO: Total sessions: {total_sessions}")
print(f"INFO: Total pods: {total_pods}")
closed_session = randomly_quit_sessions(SESSIONS, random.randint(3, 12))
RESULTS.append({
FIELD_NAMES[0]: iteration + 1,
FIELD_NAMES[1]: new_request_sessions,
FIELD_NAMES[2]: f"{elapsed_time:.2f} s",
FIELD_NAMES[3]: failed_sessions,
FIELD_NAMES[4]: new_scaled_pods,
FIELD_NAMES[5]: total_sessions,
FIELD_NAMES[6]: total_pods,
FIELD_NAMES[7]: TEST_NODE_MAX_SESSIONS,
FIELD_NAMES[8]: (total_pods * TEST_NODE_MAX_SESSIONS) - total_sessions,
FIELD_NAMES[9]: closed_session,
})
RESULTS.append(
{
FIELD_NAMES[0]: iteration + 1,
FIELD_NAMES[1]: new_request_sessions,
FIELD_NAMES[2]: f"{elapsed_time:.2f} s",
FIELD_NAMES[3]: failed_sessions,
FIELD_NAMES[4]: new_scaled_pods,
FIELD_NAMES[5]: total_sessions,
FIELD_NAMES[6]: total_pods,
FIELD_NAMES[7]: TEST_NODE_MAX_SESSIONS,
FIELD_NAMES[8]: (total_pods * TEST_NODE_MAX_SESSIONS) - total_sessions,
FIELD_NAMES[9]: closed_session,
}
)
time.sleep(15)
finally:
print(f"FINISH: Closing {len(SESSIONS)} sessions.")
Expand All @@ -58,5 +65,6 @@ def test_run_tests(self):
export_results_to_csv(f"{output_file}.csv", FIELD_NAMES, RESULTS)
export_results_csv_to_md(f"{output_file}.csv", f"{output_file}.md")


if __name__ == "__main__":
unittest.main()
38 changes: 23 additions & 15 deletions tests/AutoscalingTests/test_scale_up.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,28 @@
import unittest
import csv
import random
import time
import signal
import csv
import time
import unittest

from csv2md.table import Table

from .common import *

SESSIONS = []
RESULTS = []
TEST_NODE_MAX_SESSIONS = int(os.getenv("TEST_NODE_MAX_SESSIONS", 1))
TEST_AUTOSCALING_ITERATIONS = int(os.getenv("TEST_AUTOSCALING_ITERATIONS", 20))


def signal_handler(signum, frame):
print("Signal received, quitting all sessions...")
close_all_sessions(SESSIONS)


signal.signal(signal.SIGTERM, signal_handler)
signal.signal(signal.SIGINT, signal_handler)


class SeleniumAutoscalingTests(unittest.TestCase):
def test_run_tests(self):
try:
Expand All @@ -41,18 +46,20 @@ def test_run_tests(self):
closed_session = randomly_quit_sessions(SESSIONS, 20)
else:
closed_session = 0
RESULTS.append({
FIELD_NAMES[0]: iteration + 1,
FIELD_NAMES[1]: new_request_sessions,
FIELD_NAMES[2]: f"{elapsed_time:.2f} s",
FIELD_NAMES[3]: failed_sessions,
FIELD_NAMES[4]: new_scaled_pods,
FIELD_NAMES[5]: total_sessions,
FIELD_NAMES[6]: total_pods,
FIELD_NAMES[7]: TEST_NODE_MAX_SESSIONS,
FIELD_NAMES[8]: (total_pods * TEST_NODE_MAX_SESSIONS) - total_sessions,
FIELD_NAMES[9]: closed_session,
})
RESULTS.append(
{
FIELD_NAMES[0]: iteration + 1,
FIELD_NAMES[1]: new_request_sessions,
FIELD_NAMES[2]: f"{elapsed_time:.2f} s",
FIELD_NAMES[3]: failed_sessions,
FIELD_NAMES[4]: new_scaled_pods,
FIELD_NAMES[5]: total_sessions,
FIELD_NAMES[6]: total_pods,
FIELD_NAMES[7]: TEST_NODE_MAX_SESSIONS,
FIELD_NAMES[8]: (total_pods * TEST_NODE_MAX_SESSIONS) - total_sessions,
FIELD_NAMES[9]: closed_session,
}
)
time.sleep(15)
finally:
print(f"FINISH: Closing {len(SESSIONS)} sessions.")
Expand All @@ -61,5 +68,6 @@ def test_run_tests(self):
export_results_to_csv(f"{output_file}.csv", FIELD_NAMES, RESULTS)
export_results_csv_to_md(f"{output_file}.csv", f"{output_file}.md")


if __name__ == "__main__":
unittest.main()
Loading
Loading