PBM-1525 test SSL_CERT_FILE env var with PBM #256
GitHub Actions / JUnit Test Report
failed
Apr 13, 2025 in 0s
59 tests run, 53 passed, 4 skipped, 2 failed.
Annotations
Check failure on line 71 in pbm-functional/pytest/test_PBM-1439.py
github-actions / JUnit Test Report
test_PBM-1439.test_logical_PBM_T294
AssertionError: Errors found in PBM backup logs
2025-04-13T05:38:54Z E [rs1/rs101:27017] [backup/2025-04-13T05:38:40Z] failed to upload oplog: write data: upload to S3: upload multipart failed, upload id: YWJjNGFmNDMtNDg0Zi00YzgzLWFhNzUtYTBhNmY5ZjVjMGRkLjdjZGZjY2MwLTIyYWItNGFiNS1iNmY2LTUwNzg5ZjU4YTNjZXgxNzQ0NTIyNzMyNzU5MTg4NDYz, cause: exceeded total allowed configured MaxUploadParts (5). Adjust PartSize to fit in this limit.
Raw output
start_cluster = True, cluster = <cluster.Cluster object at 0x7fc0bf7fe850>
@pytest.mark.timeout(300, func_only=True)
def test_logical_PBM_T294(start_cluster, cluster):
"""
Test case for to check multipart upload of backup files with custom maxUploadParts option.
It checks that file is divided with proper part size allowing file to grow during backup.
"""
cluster.exec_pbm_cli("config --set storage.s3.maxUploadParts=5")
client = pymongo.MongoClient(cluster.connection)
collection = client["test"]["bigdata"]
doc_size_kb = 10
total_size_mb = 1000
total_docs = (total_size_mb * 1024) // doc_size_kb
batch_size = 1000
docs_before = total_docs // 3
docs_during = total_docs - docs_before
def insert_random_data(start_id, count):
Cluster.log(f"Inserting {count} documents")
for i in range(start_id, start_id + count, batch_size):
batch = []
for j in range(min(batch_size, (start_id + count) - i)):
random_data = os.urandom(doc_size_kb * 1024).hex()
batch.append({"_id": i + j, "payload": random_data})
collection.insert_many(batch)
Cluster.log(f"Inserted {count} documents")
insert_random_data(start_id=0, count=docs_before)
insert_thread = threading.Thread(target=insert_random_data, args=(docs_before, docs_during))
insert_thread.start()
Cluster.log("Starting backup")
result = cluster.exec_pbm_cli("backup --wait")
insert_thread.join()
assert result.rc == 0, f"PBM backup failed\nstderr:\n{result.stderr}"
logs=cluster.exec_pbm_cli("logs -sD -t0 -e backup")
error_lines = [line for line in logs.stdout.splitlines() if " E " in line]
if error_lines:
error_summary = "\n".join(error_lines)
> raise AssertionError(f"Errors found in PBM backup logs\n{error_summary}")
E AssertionError: Errors found in PBM backup logs
E 2025-04-13T05:38:54Z E [rs1/rs101:27017] [backup/2025-04-13T05:38:40Z] failed to upload oplog: write data: upload to S3: upload multipart failed, upload id: YWJjNGFmNDMtNDg0Zi00YzgzLWFhNzUtYTBhNmY5ZjVjMGRkLjdjZGZjY2MwLTIyYWItNGFiNS1iNmY2LTUwNzg5ZjU4YTNjZXgxNzQ0NTIyNzMyNzU5MTg4NDYz, cause: exceeded total allowed configured MaxUploadParts (5). Adjust PartSize to fit in this limit.
test_PBM-1439.py:71: AssertionError
Check failure on line 105 in pbm-functional/pytest/test_PBM-1439.py
github-actions / JUnit Test Report
test_PBM-1439.test_logical_PBM_T295
AssertionError: Errors found in PBM backup logs
2025-04-13T05:39:28Z E [rs1/rs101:27017] [backup/2025-04-13T05:39:23Z] failed to upload oplog: write data: upload to S3: upload multipart failed, upload id: YWJjNGFmNDMtNDg0Zi00YzgzLWFhNzUtYTBhNmY5ZjVjMGRkLjQ2Zjc1NjZjLTRlZTQtNGE0Zi1hMDRmLTNmMTFiZDc2NTcwMHgxNzQ0NTIyNzY3ODU5MjM3NDMy, cause: exceeded total allowed configured MaxUploadParts (5). Adjust PartSize to fit in this limit.
Raw output
start_cluster = True, cluster = <cluster.Cluster object at 0x7fc0bf7fe850>
@pytest.mark.timeout(300, func_only=True)
def test_logical_PBM_T295(start_cluster, cluster):
"""
Test case for to check multipart upload of oplog with custom maxUploadParts option
"""
cluster.exec_pbm_cli("config --set storage.s3.maxUploadParts=5")
client = pymongo.MongoClient(cluster.connection)
collection = client["test"]["bigdata"]
doc_size_kb = 10
total_size_mb = 1000
total_docs = (total_size_mb * 1024) // doc_size_kb
batch_size = 1000
def insert_random_data(count):
Cluster.log(f"Inserting {count} documents")
for i in range(0, count, batch_size):
batch = []
for j in range(min(batch_size, count - i)):
random_data = os.urandom(doc_size_kb * 1024).hex()
batch.append({"_id": i + j, "payload": random_data})
collection.insert_many(batch)
Cluster.log(f"Inserted {count} documents")
insert_thread = threading.Thread(target=insert_random_data, args=(total_docs,))
insert_thread.start()
Cluster.log("Starting backup")
result = cluster.exec_pbm_cli("backup --ns=random.* --wait")
insert_thread.join()
assert result.rc == 0, f"PBM backup failed\nstderr:\n{result.stderr}"
logs = cluster.exec_pbm_cli("logs -sD -t0 -e backup")
error_lines = [line for line in logs.stdout.splitlines() if " E " in line]
if error_lines:
error_summary = "\n".join(error_lines)
> raise AssertionError(f"Errors found in PBM backup logs\n{error_summary}")
E AssertionError: Errors found in PBM backup logs
E 2025-04-13T05:39:28Z E [rs1/rs101:27017] [backup/2025-04-13T05:39:23Z] failed to upload oplog: write data: upload to S3: upload multipart failed, upload id: YWJjNGFmNDMtNDg0Zi00YzgzLWFhNzUtYTBhNmY5ZjVjMGRkLjQ2Zjc1NjZjLTRlZTQtNGE0Zi1hMDRmLTNmMTFiZDc2NTcwMHgxNzQ0NTIyNzY3ODU5MjM3NDMy, cause: exceeded total allowed configured MaxUploadParts (5). Adjust PartSize to fit in this limit.
test_PBM-1439.py:105: AssertionError
Loading