diff --git a/.github/workflows/ci-pull-request.yml b/.github/workflows/ci-pull-request.yml index 1e42804c..175f8c4f 100644 --- a/.github/workflows/ci-pull-request.yml +++ b/.github/workflows/ci-pull-request.yml @@ -15,11 +15,12 @@ jobs: matrix: python_version: # https://python-release-cycle.glitch.me/ - - "3.7" - "3.8" - "3.9" - "3.10" - "3.11" + - "3.12" + - "3.13" runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 diff --git a/examples/get_data_promql_advanced.py b/examples/get_data_promql_advanced.py new file mode 100755 index 00000000..0a818c6b --- /dev/null +++ b/examples/get_data_promql_advanced.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python +# +# This script shows the basics of getting data out of Sysdig Monitor by executing a PromQL query +# that returns the top 5 Kubernetes workloads consuming the highest percentage of their allocated CPU +# by comparing actual usage to defined CPU limits. The query is executed over a 5-minute time window. +# + +import sys +import time +from datetime import datetime + +from sdcclient import SdcClient + + +def print_prometheus_results_as_table(results): + if not results: + print("No data found for the query.") + return + + # Store time series data + all_timestamps = set() + label_keys = [] + time_series_by_label = {} + + for series in results: + metric = series.get("metric", {}) + label = ','.join(f'{k}={v}' for k, v in sorted(metric.items())) + label_keys.append(label) + time_series_by_label[label] = {} + + for timestamp, value in series.get("values", []): + ts = int(float(timestamp)) + all_timestamps.add(ts) + time_series_by_label[label][ts] = value + + # Prepare header + label_keys = sorted(set(label_keys)) + all_timestamps = sorted(all_timestamps) + + print(f"{'Timestamp':<25} | " + " | ".join(f"{label}" for label in label_keys)) + print("-" * (26 + len(label_keys) * 25)) + + # Print each row, filling in missing values with "N/A" + for ts in all_timestamps: + dt = datetime.fromtimestamp(ts).isoformat() + row_values = [] + for label in label_keys: + value = time_series_by_label.get(label, {}).get(ts, "N/A") + row_values.append(value) + print(f"{dt:<25} | " + " | ".join(f"{val:>20}" for val in row_values)) + + +# +# Parse arguments +# +if len(sys.argv) != 3: + print(('usage: %s ' % sys.argv[0])) + print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + sys.exit(1) + +sdc_token = sys.argv[1] +hostname = sys.argv[2] + +sdclient = SdcClient(sdc_token, hostname) + +# +# A PromQL query to execute. The query retrieves the top 5 workloads in a specific Kubernetes +# cluster that are using the highest percentage of their allocated CPU resources. It calculates +# this by comparing the actual CPU usage of each workload to the CPU limits set for them and +# then ranks the results to show the top 5. +# +query = ''' +topk (5, + sum by (kube_cluster_name, kube_namespace_name, kube_workload_name) ( + rate( + sysdig_container_cpu_cores_used{ + kube_cluster_name="dev-cluster" + }[10m] + ) + ) + / + sum by (kube_cluster_name, kube_namespace_name, kube_workload_name) ( + kube_pod_container_resource_limits{ + kube_cluster_name="dev-cluster", + resource="cpu" + } + ) +) +''' + +# +# Time window: +# - end is the current time +# - start is the current time minus 5 minutes +# +end = int(time.time()) +start = end - 5 * 60 # 5 minutes ago + +# +# Step: +# - resolution step, how far should timestamp of each resulting sample be apart +# +step = 60 + +# +# Load data +# +ok, response_json = sdclient.get_data_promql(query, start, end, step) + +# +# Show the result +# +if ok: + # + # Read the response. The JSON looks like this: + # + # { + # "data": { + # "result": [ + # { + # "metric": {}, + # "values": [ + # [ + # 1744210080, + # "0.58" + # ], + # [ + # 1744210140, + # "0.58" + # ], + # [ + # 1744210200, + # "0.58" + # ], + # [ + # 1744210260, + # "0.5799999999999998" + # ], + # [ + # 1744210320, + # "0.5799999999999998" + # ], + # [ + # 1744210380, + # "0.5799999999999998" + # ] + # ] + # } + # ], + # "resultType": "matrix" + # }, + # "status": "success" + # } + # + + + # + # Print summary (what, when) + # + results = response_json.get("data", {}).get("result", []) + print_prometheus_results_as_table(results) + +else: + print(response_json) + sys.exit(1) diff --git a/examples/get_data_promql_instant_advanced.py b/examples/get_data_promql_instant_advanced.py new file mode 100644 index 00000000..3e23a516 --- /dev/null +++ b/examples/get_data_promql_instant_advanced.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python +# +# This script shows the basics of getting data out of Sysdig Monitor by executing a PromQL query +# that returns the top 5 Kubernetes workloads consuming the highest percentage of their allocated CPU +# by comparing actual usage to defined CPU limits. The query is executed at a timestamp 5 minutes ago. +# + +import sys +import time +from datetime import datetime + +from sdcclient import SdcClient + + +def print_prometheus_instant_result(result): + if not result: + print("No data found for the instant query.") + return + + # Determine if any result has labels + has_labels = any(entry.get("metric") for entry in result) + + if has_labels: + print(f"{'Timestamp':<25} | {'Metric':<40} | {'Value':>10}") + print("-" * 80) + else: + print(f"{'Timestamp':<25} | {'Value':>10}") + print("-" * 40) + + for entry in result: + timestamp, value = entry.get("value", [None, None]) + dt = datetime.fromtimestamp(float(timestamp)).isoformat() if timestamp else "N/A" + metric = entry.get("metric", {}) + + if has_labels: + label_str = ', '.join(f'{k}="{v}"' for k, v in sorted(metric.items())) + print(f"{dt:<25} | {label_str:<40} | {value:>10}") + else: + print(f"{dt:<25} | {value:>10}") + + +# +# Parse arguments +# +if len(sys.argv) != 3: + print(('usage: %s ' % sys.argv[0])) + print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + sys.exit(1) + +sdc_token = sys.argv[1] +hostname = sys.argv[2] + +sdclient = SdcClient(sdc_token, hostname) + +# +# A PromQL query to execute. The query retrieves the top 5 workloads in a specific Kubernetes +# cluster that are using the highest percentage of their allocated CPU resources. It calculates +# this by comparing the actual CPU usage of each workload to the CPU limits set for them and +# then ranks the results to show the top 5. +# +query = ''' +topk(5, + sum by (kube_cluster_name, kube_namespace_name, kube_workload_name) ( + rate( + sysdig_container_cpu_cores_used{ + kube_cluster_name="dev-cluster" + }[10m] + ) + ) + / + sum by (kube_cluster_name, kube_namespace_name, kube_workload_name) ( + kube_pod_container_resource_limits{ + kube_cluster_name="dev-cluster", + resource="cpu" + } + ) +) +''' + +# +# Time: +# - the parameter is optional; if not set, the current time is used +# +time = int(time.time()) - 5 * 60 # 5 minutes ago + +# +# Load data +# +ok, response_json = sdclient.get_data_promql_instant(query, 1744273000) + +# +# Show the result +# +if ok: + # + # Read the response. The JSON looks like this: + # + # { + # "result": [ + # { + # "metric": {}, + # "value": [ + # 1744272414, + # "0.58" + # ] + # } + # ], + # "resultType": "vector" + # } + # + + + # + # Print summary (what, when) + # + results = response_json.get("data", {}).get("result", []) + print_prometheus_instant_result(results) + +else: + print(response_json) + sys.exit(1) diff --git a/examples/get_data_promql_instant_simple.py b/examples/get_data_promql_instant_simple.py new file mode 100755 index 00000000..bdb8e726 --- /dev/null +++ b/examples/get_data_promql_instant_simple.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python +# +# This script shows the basics of getting data out of Sysdig Monitor by creating a +# simple query that returns the total CPU usage of all containers in all pods in the +# last 10 minutes. The query is executed at a timestamp 5 minutes ago. +# + +import sys +import time +from datetime import datetime + +from sdcclient import SdcClient + + +def print_prometheus_instant_result(result): + if not result: + print("No data found for the instant query.") + return + + # Determine if any result has labels + has_labels = any(entry.get("metric") for entry in result) + + if has_labels: + print(f"{'Timestamp':<25} | {'Metric':<40} | {'Value':>10}") + print("-" * 80) + else: + print(f"{'Timestamp':<25} | {'Value':>10}") + print("-" * 40) + + for entry in result: + timestamp, value = entry.get("value", [None, None]) + dt = datetime.fromtimestamp(float(timestamp)).isoformat() if timestamp else "N/A" + metric = entry.get("metric", {}) + + if has_labels: + label_str = ', '.join(f'{k}="{v}"' for k, v in sorted(metric.items())) + print(f"{dt:<25} | {label_str:<40} | {value:>10}") + else: + print(f"{dt:<25} | {value:>10}") + + +# +# Parse arguments +# +if len(sys.argv) != 3: + print(('usage: %s ' % sys.argv[0])) + print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + sys.exit(1) + +sdc_token = sys.argv[1] +hostname = sys.argv[2] + +sdclient = SdcClient(sdc_token, hostname) + +# +# A PromQL query to execute. In this example, we are querying for the total CPU usage +# of all containers in all pods in the last 10 minutes. +# +query = ''' +sum ( + avg_over_time(kube_pod_container_resource_requests{resource="cpu"}[10m]) +) +''' + +# +# Time: +# - the parameter is optional; if not set, the current time is used +# +time = int(time.time()) - 5 * 60 # 5 minutes ago + +# +# Load data +# +ok, response_json = sdclient.get_data_promql_instant(query, time) + +# +# Show the result +# +if ok: + # + # Read the response. The JSON looks like this: + # + # { + # "result": [ + # { + # "metric": {}, + # "value": [ + # 1744272414, + # "0.58" + # ] + # } + # ], + # "resultType": "vector" + # } + # + + + # + # Print summary (what, when) + # + results = response_json.get("data", {}).get("result", []) + print_prometheus_instant_result(results) + +else: + print(response_json) + sys.exit(1) diff --git a/examples/get_data_promql_simple.py b/examples/get_data_promql_simple.py new file mode 100755 index 00000000..8aa4fd8c --- /dev/null +++ b/examples/get_data_promql_simple.py @@ -0,0 +1,150 @@ +#!/usr/bin/env python +# +# This script shows the basics of getting data out of Sysdig Monitor by creating a +# simple query that returns the total CPU usage of all containers in all pods in the +# last 10 minutes. The query is executed over a 5-minute time window. +# + +import sys +import time +from datetime import datetime + +from sdcclient import SdcClient + + +def print_prometheus_results_as_table(results): + if not results: + print("No data found for the query.") + return + + # Store time series data + all_timestamps = set() + label_keys = [] + time_series_by_label = {} + + for series in results: + metric = series.get("metric", {}) + label = ','.join(f'{k}={v}' for k, v in sorted(metric.items())) + label_keys.append(label) + time_series_by_label[label] = {} + + for timestamp, value in series.get("values", []): + ts = int(float(timestamp)) + all_timestamps.add(ts) + time_series_by_label[label][ts] = value + + # Prepare header + label_keys = sorted(set(label_keys)) + all_timestamps = sorted(all_timestamps) + + print(f"{'Timestamp':<25} | " + " | ".join(f"{label}" for label in label_keys)) + print("-" * (26 + len(label_keys) * 25)) + + # Print each row, filling in missing values with "N/A" + for ts in all_timestamps: + dt = datetime.fromtimestamp(ts).isoformat() + row_values = [] + for label in label_keys: + value = time_series_by_label.get(label, {}).get(ts, "N/A") + row_values.append(value) + print(f"{dt:<25} | " + " | ".join(f"{val:>20}" for val in row_values)) + + +# +# Parse arguments +# +if len(sys.argv) != 3: + print(('usage: %s ' % sys.argv[0])) + print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + sys.exit(1) + +sdc_token = sys.argv[1] +hostname = sys.argv[2] + +sdclient = SdcClient(sdc_token, hostname) + +# +# A PromQL query to execute. In this example, we are querying for the total CPU usage +# of all containers in all pods in the last 10 minutes. +# +query = ''' +sum ( + avg_over_time(kube_pod_container_resource_requests{resource="cpu"}[10m]) +) +''' + +# +# Time window: +# - end is the current time +# - start is the current time minus 5 minutes +# +end = int(time.time()) +start = end - 5 * 60 # 5 minutes ago + +# +# Step: +# - resolution step, how far should timestamp of each resulting sample be apart +# +step = 60 + +# +# Load data +# +ok, response_json = sdclient.get_data_promql(query, start, end, step) + +# +# Show the result +# +if ok: + # + # Read the response. The JSON looks like this: + # + # { + # "data": { + # "result": [ + # { + # "metric": {}, + # "values": [ + # [ + # 1744210080, + # "0.58" + # ], + # [ + # 1744210140, + # "0.58" + # ], + # [ + # 1744210200, + # "0.58" + # ], + # [ + # 1744210260, + # "0.5799999999999998" + # ], + # [ + # 1744210320, + # "0.5799999999999998" + # ], + # [ + # 1744210380, + # "0.5799999999999998" + # ] + # ] + # } + # ], + # "resultType": "matrix" + # }, + # "status": "success" + # } + # + + + # + # Print summary (what, when) + # + results = response_json.get("data", {}).get("result", []) + print_prometheus_results_as_table(results) + +else: + print(response_json) + sys.exit(1) diff --git a/examples/get_data_simple.py b/examples/get_data_simple.py index 3a9fabad..e6138842 100755 --- a/examples/get_data_simple.py +++ b/examples/get_data_simple.py @@ -63,7 +63,7 @@ # if ok: # - # Read response. The JSON looks like this: + # Read the response. The JSON looks like this: # # { # start: timestamp, diff --git a/examples/get_label_values.py b/examples/get_label_values.py new file mode 100644 index 00000000..1b43c181 --- /dev/null +++ b/examples/get_label_values.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python +# +# This script demonstrates how to use the `get_label_values` function to retrieve values +# for a specific label and render the output as a table. +# + +import sys +from sdcclient import SdcClient + + +def render_label_values_as_table(label_values): + if not label_values: + print("No values found for the specified label.") + return + + # Calculate the maximum width for the value column + max_width = max(len(value) for value in label_values) + + # Create a horizontal separator + separator = "+" + "-" * (max_width + 2) + "+" + + # Create the header row + header = f"| {'Value'.ljust(max_width)} |" + + # Create the rows for each label value + rows = [f"| {value.ljust(max_width)} |" for value in label_values] + + # Combine everything into a table + print(f"{separator}\n{header}\n{separator}\n" + "\n".join(rows) + f"\n{separator}") + + +# +# Parse arguments +# +if len(sys.argv) != 3: + print(('usage: %s ' % sys.argv[0])) + print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + sys.exit(1) + +sdc_token = sys.argv[1] +hostname = sys.argv[2] + +sdclient = SdcClient(sdc_token, hostname) + +# +# The label name to fetch values for +# +label_name = "job" + +# +# Optional matchers to filter the label values +# +match = None # Replace with a list of matchers if needed + +# +# Optional limit +# +limit = 10 # Set to None to disable the limit + +# +# Fetch label values +# +ok, response_json = sdclient.get_label_values(label_name, match=match, limit=limit) + +# +# Show the result +# +if ok: + # + # Read the response. The JSON looks like this: + # + # { + # "data": [ + # "fluentd-default", + # "harbor-registry-default", + # "k8s-cadvisor-default", + # "k8s-kubelet-default", + # "k8s-pods", + # "k8s-pvc-default", + # ], + # "status": "success" + # } + # + label_values = response_json.get("data", []) + render_label_values_as_table(label_values) +else: + print("Error retrieving label values:", response_json) diff --git a/examples/get_labels.py b/examples/get_labels.py new file mode 100644 index 00000000..6d6e8f50 --- /dev/null +++ b/examples/get_labels.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python +# +# This script demonstrates how to use the `get_labels` function to retrieve metadata +# about label names and render the output as a table. +# + +import sys +from sdcclient import SdcClient + + +def render_labels_as_table(labels): + if not labels: + print("No labels found.") + return + + # Calculate the maximum width for the label column + max_width = max(len(label) for label in labels) + + # Create a horizontal separator + separator = "+" + "-" * (max_width + 2) + "+" + + # Create the header row + header = f"| {'Label'.ljust(max_width)} |" + + # Create the rows for each label + rows = [f"| {label.ljust(max_width)} |" for label in labels] + + # Combine everything into a table + print(f"{separator}\n{header}\n{separator}\n" + "\n".join(rows) + f"\n{separator}") + + +# +# Parse arguments +# +if len(sys.argv) != 2: + print(('usage: %s ' % sys.argv[0])) + print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + sys.exit(1) + +sdc_token = sys.argv[1] +hostname = sys.argv[2] + +sdclient = SdcClient(sdc_token, hostname) + +# +# Optional matchers to filter the labels +# +match = [ + 'up' +] # Replace with a list of matchers if needed + +# +# Optional limit +# +limit = 10 # Set to None to disable the limit + +# +# Fetch labels +# +ok, response_json = sdclient.get_labels(match=match, limit=limit) + +# +# Show the result +# +if ok: + # + # Read the response. The JSON looks like this: + # + # { + # "status": "success", + # "data": [ + # "agent_id", + # "k8s_app", + # "kube_pod_uid", + # "kubernetes_io_cluster_service", + # "container_image_tag", + # "cloud_provider_tag_team", + # "cloud_provider_tag_expirationDate", + # "kube_pod_label_kubernetes_azure_com_managedby", + # ] + # } + # + labels = response_json.get("data", []) + render_labels_as_table(labels) +else: + print("Error retrieving labels:", response_json) diff --git a/examples/get_metadata.py b/examples/get_metadata.py new file mode 100644 index 00000000..daec2636 --- /dev/null +++ b/examples/get_metadata.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python +# +# This script demonstrates how to use the `get_metadata` function to retrieve metadata +# about metrics and render the output as a table. +# + +import sys +from sdcclient import SdcClient + + +def render_metadata_as_table(metadata): + if not metadata: + print("No metadata found.") + return + + # Extract all metric names and their metadata + rows = [] + for metric, details in metadata.items(): + for detail in details: + rows.append({ + "Metric": metric, + "Type": detail.get("type", ""), + "Unit": detail.get("unit", ""), + "Help": detail.get("help", "") + }) + + # Extract column names + columns = ["Metric", "Type", "Unit", "Help"] + + # Calculate the maximum width for each column + column_widths = {col: max(len(col), max(len(str(row[col])) for row in rows)) for col in columns} + + # Create a horizontal separator + separator = "+" + "+".join("-" * (column_widths[col] + 2) for col in columns) + "+" + + # Create the header row + header = "|" + "|".join(f" {col.ljust(column_widths[col])} " for col in columns) + "|" + + # Create the rows for each metadata entry + table_rows = [ + "|" + "|".join(f" {str(row[col]).ljust(column_widths[col])} " for col in columns) + "|" + for row in rows + ] + + # Combine everything into a table + print(f"{separator}\n{header}\n{separator}\n" + "\n".join(table_rows) + f"\n{separator}") + + +# +# Parse arguments +# +if len(sys.argv) != 2: + print(('usage: %s ' % sys.argv[0])) + print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + sys.exit(1) + +sdc_token = sys.argv[1] +hostname = sys.argv[2] + +sdclient = SdcClient(sdc_token, hostname) + + +# +# Optional metric name to filter metadata +# +metric_name = "up" # Replace with a specific metric name if needed + +# +# Optional limit +# +limit = 10 # Set to None to disable the limit + +# +# Fetch metadata +# +ok, response_json = sdclient.get_metadata(metric_name=metric_name, limit=limit) + +# +# Show the result +# +if ok: + # + # Read the response. The JSON looks like this: + # + # { + # "data": [ + # "up": [ + # { + # "type": "gauge", + # "unit": "number", + # "help": "" + # } + # ] + # ... + # ], + # "status": "success" + # } + # + metadata = response_json.get("data", []) + render_metadata_as_table(metadata) +else: + print("Error retrieving metadata:", response_json) diff --git a/examples/get_series.py b/examples/get_series.py new file mode 100644 index 00000000..c7f6d79f --- /dev/null +++ b/examples/get_series.py @@ -0,0 +1,100 @@ +#!/usr/bin/env python +# +# This script demonstrates how to use the `get_series` function to retrieve metadata +# about time series that match a set of label matchers. +# + +import sys +from sdcclient import SdcClient + + +def render_json_as_table(results): + if not data or len(data) == 0: + print("No series found for the given matchers.") + return + + # Extract all keys from the JSON objects + keys = list(results[0].keys()) + + # Calculate the maximum width for each column + column_widths = {key: max(len(key), max(len(str(row.get(key, ""))) for row in results)) for key in keys} + + # Create a horizontal separator + separator = "+".join("-" * (column_widths[key] + 2) for key in keys) + + # Create the header row + header = "|".join(f" {key.ljust(column_widths[key])} " for key in keys) + + # Create the rows for each JSON object + rows = [] + for row in results: + rows.append("|".join(f" {str(row.get(key, '')).ljust(column_widths[key])} " for key in keys)) + + # Combine everything into a table + print(f"+{separator}+\n|{header}|\n+{separator}+\n" + "\n".join(f"|{row}|" for row in rows) + f"\n+{separator}+") + + +# +# Parse arguments +# +if len(sys.argv) != 2: + print(('usage: %s ' % sys.argv[0])) + print('You can find your token at https://app.sysdigcloud.com/#/settings/user') + sys.exit(1) + +sdc_token = sys.argv[1] +hostname = sys.argv[2] + +sdclient = SdcClient(sdc_token, hostname) + +# +# Matchers to filter the series. Example: `up` and `process_start_time_seconds{job="prometheus"}` +# +match = [ + 'up', + 'process_start_time_seconds{job="prometheus"}' +] + +# +# Optional time range +# +start = None # Replace with a timestamp if needed +end = None # Replace with a timestamp if needed + +# +# Optional limit +# +limit = 5 # Set to None to disable the limit + +# +# Fetch series +# +ok, response_json = sdclient.get_series(match, start=start, end=end, limit=limit) + +# +# Show the result +# +if ok: + # + # Read the response. The JSON looks like this: + # + # { + # "data": [ + # { + # "__name__": "up", + # "_sysdig_custom_metric": "true", + # "_sysdig_datasource": "prometheus_remote_write", + # "agent_version": "thereal-test-x86_64", + # "instance": "default.test.svc.cluster.local:9544", + # "jenkins_job_build_number": "1234", + # "job": "default-test-20250428112951594", + # "remote_write": "true" + # } + # ], + # "status": "success" + # } + # + data = response_json.get("data", {}) + render_json_as_table(data) +else: + print("Error retrieving series:", response_json) diff --git a/poetry.lock b/poetry.lock index cc2b18bf..630f0916 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. [[package]] name = "alabaster" @@ -6,6 +6,8 @@ version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, @@ -17,6 +19,7 @@ version = "0.1.0" description = "Command Arguments for Humans." optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "args-0.1.0.tar.gz", hash = "sha256:a785b8d837625e9b61c39108532d95b85274acd679693b71ebb5156848fcf814"}, ] @@ -27,6 +30,8 @@ version = "2.15.0" description = "Internationalization utilities" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, @@ -44,6 +49,7 @@ version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, @@ -55,6 +61,7 @@ version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" +groups = ["main"] files = [ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, @@ -154,6 +161,7 @@ version = "0.5.1" description = "Python Command Line Interface Tools" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "clint-0.5.1.tar.gz", hash = "sha256:05224c32b1075563d0b16d0015faaf9da43aa214e4a2140e51f08789e7a4c5aa"}, ] @@ -167,6 +175,8 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] +markers = "extra == \"docs\" and sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -178,6 +188,7 @@ version = "6.5.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, @@ -232,7 +243,7 @@ files = [ ] [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "docutils" @@ -240,6 +251,8 @@ version = "0.19" description = "Docutils -- Python Documentation Utilities" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, @@ -251,6 +264,7 @@ version = "1.9.6.1" description = "Python test doubles" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "doublex-1.9.6.1.tar.gz", hash = "sha256:48fbc633598eb913a6eb0c4694f8c040ba57bae9653d45643a84243e0c9f3268"}, ] @@ -265,6 +279,7 @@ version = "0.7.1" description = "Expects matchers for Doublex test doubles assertions" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "doublex-expects-0.7.1.tar.gz", hash = "sha256:8040682d97f0a66f632c5df982f78d09aee36b2c4a1eb275b0c596d115f200aa"}, ] @@ -279,6 +294,7 @@ version = "0.9.0" description = "Expressive and extensible TDD/BDD assertion library for Python" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "expects-0.9.0.tar.gz", hash = "sha256:419902ccafe81b7e9559eeb6b7a07ef9d5c5604eddb93000f0642b3b2d594f4c"}, ] @@ -289,6 +305,7 @@ version = "3.9.2" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +groups = ["dev"] files = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, @@ -305,6 +322,7 @@ version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, @@ -316,6 +334,8 @@ version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, @@ -327,6 +347,8 @@ version = "7.1.0" description = "Read metadata from Python packages" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\" and extra == \"docs\"" files = [ {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, @@ -338,7 +360,7 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy ; platform_python_implementation != \"PyPy\"", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "jinja2" @@ -346,6 +368,8 @@ version = "3.1.4" description = "A very fast and expressive template engine." optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, @@ -363,6 +387,7 @@ version = "0.11.3" description = "The definitive testing tool for Python. Born under the banner of Behavior Driven Development." optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "mamba-0.11.3.tar.gz", hash = "sha256:4dcf69e9a53e78d4aa5ec3dee0bb2c65f02ea68a6b62c4275653d7170b8f5fe2"}, ] @@ -377,6 +402,8 @@ version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, @@ -446,6 +473,7 @@ version = "0.6.1" description = "McCabe checker, plugin for flake8" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, @@ -457,6 +485,8 @@ version = "24.0" description = "Core utilities for Python packages" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, @@ -468,6 +498,7 @@ version = "21.10.1" description = "PyYAML-based module to produce pretty and readable YAML-serialized data" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pyaml-21.10.1-py2.py3-none-any.whl", hash = "sha256:19985ed303c3a985de4cf8fd329b6d0a5a5b5c9035ea240eccc709ebacbaf4a0"}, {file = "pyaml-21.10.1.tar.gz", hash = "sha256:c6519fee13bf06e3bb3f20cacdea8eba9140385a7c2546df5dbae4887f768383"}, @@ -482,6 +513,7 @@ version = "2.7.0" description = "Python style guide checker" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, @@ -493,6 +525,7 @@ version = "2.3.1" description = "passive checker of Python programs" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, @@ -504,6 +537,8 @@ version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, @@ -518,6 +553,7 @@ version = "2.1.0" description = "Hamcrest framework for matcher objects" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "pyhamcrest-2.1.0-py3-none-any.whl", hash = "sha256:f6913d2f392e30e0375b3ecbd7aee79e5d1faa25d345c8f4ff597665dcac2587"}, {file = "pyhamcrest-2.1.0.tar.gz", hash = "sha256:c6acbec0923d0cb7e72c22af1926f3e7c97b8e8d69fc7498eabacaf7c975bd9c"}, @@ -526,7 +562,7 @@ files = [ [package.extras] dev = ["black", "doc2dash", "flake8", "pyhamcrest[docs,tests]", "pytest-mypy", "towncrier", "tox", "tox-asdf", "twine"] docs = ["alabaster (>=0.7,<1.0)", "sphinx (>=4.0,<5.0)"] -tests = ["coverage[toml]", "dataclasses", "mypy (!=0.940)", "pytest (>=5.0)", "pytest-mypy-plugins", "pytest-sugar", "pytest-xdist", "pyyaml", "types-dataclasses", "types-mock"] +tests = ["coverage[toml]", "dataclasses ; python_version < \"3.7\"", "mypy (!=0.940) ; platform_python_implementation != \"PyPy\"", "pytest (>=5.0)", "pytest-mypy-plugins ; platform_python_implementation != \"PyPy\"", "pytest-sugar", "pytest-xdist", "pyyaml", "types-dataclasses ; python_version < \"3.7\"", "types-mock"] tests-numpy = ["numpy", "pyhamcrest[tests]"] [[package]] @@ -535,6 +571,8 @@ version = "2024.1" description = "World timezone definitions, modern and historical" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"docs\" and python_version == \"3.8\"" files = [ {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, @@ -546,6 +584,7 @@ version = "6.0.1" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, @@ -606,6 +645,7 @@ version = "2.31.0" description = "Python HTTP for Humans." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, @@ -627,6 +667,7 @@ version = "1.0.0" description = "A utility belt for advanced users of python-requests" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, @@ -641,6 +682,7 @@ version = "1.16.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["dev"] files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -652,6 +694,8 @@ version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, @@ -663,6 +707,8 @@ version = "5.3.0" description = "Python documentation generator" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, @@ -690,7 +736,7 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] +test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast ; python_version < \"3.8\""] [[package]] name = "sphinx-rtd-theme" @@ -698,6 +744,8 @@ version = "0.5.1" description = "Read the Docs theme for Sphinx" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "sphinx_rtd_theme-0.5.1-py2.py3-none-any.whl", hash = "sha256:fa6bebd5ab9a73da8e102509a86f3fcc36dec04a0b52ea80e5a033b2aba00113"}, {file = "sphinx_rtd_theme-0.5.1.tar.gz", hash = "sha256:eda689eda0c7301a80cf122dad28b1861e5605cbf455558f3775e1e8200e83a5"}, @@ -715,6 +763,8 @@ version = "1.0.4" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, @@ -730,6 +780,8 @@ version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." optional = true python-versions = ">=3.5" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, @@ -745,6 +797,8 @@ version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, @@ -760,6 +814,8 @@ version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" optional = true python-versions = ">=3.5" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, @@ -774,6 +830,8 @@ version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." optional = true python-versions = ">=3.5" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, @@ -789,6 +847,8 @@ version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." optional = true python-versions = ">=3.5" +groups = ["main"] +markers = "extra == \"docs\"" files = [ {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, @@ -804,6 +864,8 @@ version = "5.6.1" description = "TatSu takes a grammar in a variation of EBNF as input, and outputs a memoizing PEG/Packrat parser in Python." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\"" files = [ {file = "TatSu-5.6.1-py2.py3-none-any.whl", hash = "sha256:7cf03e15ab170fd91489b855cd8a4942b6cd1ac5e0eabcb852793a716600dbf0"}, {file = "TatSu-5.6.1.zip", hash = "sha256:6a4f07aa7bfe9dfbee8015824feaf13f0b1a89577e2ee5a4a62c18630c309d4e"}, @@ -818,6 +880,8 @@ version = "5.8.3" description = "TatSu takes a grammar in a variation of EBNF as input, and outputs a memoizing PEG/Packrat parser in Python." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version >= \"3.10\"" files = [ {file = "TatSu-5.8.3-py2.py3-none-any.whl", hash = "sha256:0a836692e67247cad9f251e083b045b13345cc715e69a7fbc16522beaa0f2163"}, {file = "TatSu-5.8.3.zip", hash = "sha256:571ecbcdf33b7828c05e5cd95a8e8ad06af111c2c83a6a245be4d8f7c43de7bb"}, @@ -832,13 +896,14 @@ version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -849,6 +914,8 @@ version = "3.18.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\" and extra == \"docs\"" files = [ {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, @@ -856,12 +923,12 @@ files = [ [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy ; platform_python_implementation != \"PyPy\"", "pytest-ruff (>=0.2.1)"] [extras] -docs = ["sphinx"] +docs = ["sphinx", "sphinx-rtd-theme"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.8" -content-hash = "cfe44b8465fad9dbbf17e2b2fec64c09f1457e274915e5d342c398d7adf35946" +content-hash = "39512ca5e49ff5549bf68392da28592478e048145152266581a774f25d2b0cc9" diff --git a/sdcclient/_common.py b/sdcclient/_common.py index 72897411..2f0307fa 100644 --- a/sdcclient/_common.py +++ b/sdcclient/_common.py @@ -92,6 +92,8 @@ def _checkResponse(self, res): self.lasterr = '\n'.join(error_msgs) elif 'message' in j: self.lasterr = j['message'] + elif 'error' in j: + self.lasterr = j['error'] else: self.lasterr = 'status code ' + str(errorcode) return False @@ -440,6 +442,181 @@ def get_data(self, metrics, start_ts, end_ts=0, sampling_s=0, verify=self.ssl_verify) return self._request_result(res) + def get_data_promql(self, query, start, end, step, timeout=None, limit=None): + '''**Description** + Evaluate an expression query over a specified time range. + + **Arguments** + - **query**: the PromQL query to execute. + - **start**: the inclusive start timestamp of the query range as RFC3339 or a unix timestamp. + - **end**: the inclusive end timestamp of the query range as RFC3339 or a unix timestamp. + - **step**: the query resolution step width, specified as a duration or a floating-point number of seconds. + - **timeout**: the evaluation timeout. Defaults to and is capped at 2m. + - **limit**: the maximum number of returned series. A value of 0 disables the limit. + + **Success Return Value** + A list of time series that matched the PromQL query, where each series is defined by a unique set of labels (metric) and a list + of timestamped values (values). Each entry represents one time series over the queried range, with values sampled at regular intervals. + + **Examples** + - `examples/get_data_promql_simple.py `_ + - `examples/get_data_promql_advanced.py `_ + ''' + params = { + "query": query, + "start": start, + "end": end, + "step": step, + } + + if timeout: + params["timeout"] = timeout + if limit: + params["limit"] = limit + + url = f"{self.url}/prometheus/api/v1/query_range" + res = self.http.get(url, headers=self.hdrs, params=params) + return self._request_result(res) + + def get_data_promql_instant(self, query, time=None, timeout=None, limit=None): + '''**Description** + Evaluate an instant query at a single point in time. + + **Arguments** + - **query**: the PromQL query to execute. + - **time**: The evaluation timestamp as RFC3339 or a unix timestamp. If omitted, the current server time is used. + - **timeout**: the evaluation timeout. Defaults to and is capped at 2m. + - **limit**: the maximum number of returned series. A value of 0 disables the limit. + + **Success Return Value** + A list of time series that matched the PromQL query, where each series is defined by a unique set of labels (metric) and a list + of timestamped values (values). Each entry represents one time series over the queried range, with values sampled at regular intervals. + + **Examples** + - `examples/get_data_promql_instant_simple.py `_ + ''' + params = { + "query": query, + } + + if time: + params["time"] = time + if timeout: + params["timeout"] = timeout + if limit: + params["limit"] = limit + + url = f"{self.url}/prometheus/api/v1/query" + res = self.http.get(url, headers=self.hdrs, params=params) + return self._request_result(res) + + def get_series(self, match, start=None, end=None, limit=None): + '''**Description** + Retrieve metadata about time series that match a set of label matchers. + + **Arguments** + - **match**: a list of PromQL matchers (e.g., `['up', 'node_cpu_seconds_total']`). + - **start**: the inclusive start timestamp of the series query as RFC3339 or a unix timestamp. + - **end**: the inclusive end timestamp of the series query as RFC3339 or a unix timestamp. + - **limit**: the maximum number of returned series. The limit is capped at 10,000. To disable the limit, set the value to 0. + + **Success Return Value** + A list of series that match the provided matchers. + + **Examples** + - `examples/get_series.py` + ''' + params = { + "match[]": match, # `match` should be a list of matchers + } + + if start: + params["start"] = start + if end: + params["end"] = end + if limit: + params["limit"] = limit + + url = f"{self.url}/prometheus/api/v1/series" + res = self.http.get(url, headers=self.hdrs, params=params) + return self._request_result(res) + + def get_labels(self, match=None, limit=None): + '''**Description** + Retrieve metadata about label names. + + **Arguments** + - **match**: a list of PromQL matchers to filter the labels. + - **limit**: the maximum number of returned labels. A value of 0 disables the limit. + + **Success Return Value** + A list of available labels. + + **Examples** + - `examples/get_labels.py` + ''' + params = {} + + if match: + params["match[]"] = match # `match` should be a list of matchers + if limit: + params["limit"] = limit + + url = f"{self.url}/prometheus/api/v1/labels" + res = self.http.get(url, headers=self.hdrs, params=params) + return self._request_result(res) + + def get_label_values(self, label_name, match=None, limit=None): + '''**Description** + Retrieve the values for a specific label. + + **Arguments** + - **label_name**: the name of the label to retrieve values for. + - **match**: a list of PromQL matchers to filter the label values. + - **limit**: the maximum number of returned values. A value of 0 disables the limit. + + **Success Return Value** + A list of values for the specified label. + + **Examples** + - `examples/get_label_values.py` + ''' + params = {} + + if match: + params["match[]"] = match # `match` should be a list of matchers + if limit: + params["limit"] = limit + + url = f"{self.url}/prometheus/api/v1/label/{label_name}/values" + res = self.http.get(url, headers=self.hdrs, params=params) + return self._request_result(res) + + def get_metadata(self, metric_name=None, limit=None): + '''**Description** + Retrieve metadata about metrics. + + **Arguments** + - **metric_name**: the metric name to filter metadata for. If omitted, metadata for all metrics is retrieved. + - **limit**: the maximum number of returned metadata entries. A value of 0 disables the limit. + + **Success Return Value** + A list of metadata entries for the specified metric(s). + + **Examples** + - `examples/get_metadata.py` + ''' + params = {} + + if metric_name: + params["metric"] = metric_name + if limit: + params["limit"] = limit + + url = f"{self.url}/prometheus/api/v1/metadata" + res = self.http.get(url, headers=self.hdrs, params=params) + return self._request_result(res) + def get_sysdig_captures(self, from_sec=None, to_sec=None, scope_filter=None): '''**Description** Returns the list of sysdig captures for the user.