diff --git a/.devcontainer/withenvfile/devcontainer.json b/.devcontainer/withenvfile/devcontainer.json index c3eba252..0da10629 100644 --- a/.devcontainer/withenvfile/devcontainer.json +++ b/.devcontainer/withenvfile/devcontainer.json @@ -4,14 +4,33 @@ // Features to add to the dev container. More info: https://containers.dev/features. "features": { "ghcr.io/devcontainers/features/terraform:1": {}, - "ghcr.io/devcontainers/features/github-cli:1": {} + "ghcr.io/devcontainers/features/github-cli:1": {}, + "ghcr.io/devcontainers/features/python:1": {}, + "ghcr.io/devcontainers/features/go:1": {} }, "customizations": { "vscode": { - "settings": {}, "extensions": [ + "ms-python.vscode-pylance", + "ms-python.python", + "charliermarsh.ruff", + "Github.copilot", "HashiCorp.terraform" - ] + ], + // Set *default* container specific settings.json values on container create. + "settings": { + "python.defaultInterpreterPath": "/usr/local/bin/python", + "python.linting.enabled": true, + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + "[python]": { + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.fixAll": "always" + } + }, + "python.formatting.provider": "black" + } }, "codespaces": {} }, @@ -28,5 +47,7 @@ "runArgs": [ "--env-file", ".devcontainer/devcontainer.env" - ] + ], + "postStartCommand": "go install github.com/hashicorp/terraform-config-inspect@latest" + } \ No newline at end of file diff --git a/.gitignore b/.gitignore index 13842d55..32b15ebb 100644 --- a/.gitignore +++ b/.gitignore @@ -15,4 +15,166 @@ terraform.tfvars # Any packaged files *.zip my_* -.env \ No newline at end of file +.env + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + diff --git a/tests/compliance/libs/constants/files_folders.py b/tests/compliance/libs/constants/files_folders.py new file mode 100644 index 00000000..2a9c9af5 --- /dev/null +++ b/tests/compliance/libs/constants/files_folders.py @@ -0,0 +1,7 @@ +# Description: Constants for file paths of the CSV and PNG files +from pathlib import Path + +# Folders +ROOT = Path(__file__, "..", "..", "..", "..", "..").resolve() + +FOLDER_SCRIPTS = Path(ROOT, "released", "discovery_center") diff --git a/tests/compliance/libs/constants/providers.py b/tests/compliance/libs/constants/providers.py new file mode 100644 index 00000000..2c5e0602 --- /dev/null +++ b/tests/compliance/libs/constants/providers.py @@ -0,0 +1,2 @@ +PROVIDER_BTP = "btp" +PROVIDER_CLOUDFOUNDRY = "cloudfoundry" diff --git a/tests/compliance/libs/constants/variables.py b/tests/compliance/libs/constants/variables.py new file mode 100644 index 00000000..9344a512 --- /dev/null +++ b/tests/compliance/libs/constants/variables.py @@ -0,0 +1,20 @@ +BTP_PROVIDER_MANDATORY_VARIABLES = ["globalaccount", "region", "custom_idp", "cli_server_url", + "subaccount_admins", "subaccount_service_admins", "subaccount_name", "subaccount_id"] + +CF_PROVIDER_MANDATORY_VARIABLES = ["cf_org_admins", "cf_org_name", "cf_space_developers", "cf_landscape_label", "cf_org_id", "cf_api_url", + "cf_space_managers", "cf_space_name", "cli_server_url", "origin"] + +QAS_STEP1_BTP_PROVIDER_MANDATORY_VARIABLES = ["cf_org_admins", "cf_org_name", "cf_space_developers", "cf_space_managers", + "cf_space_name", "cli_server_url", "custom_idp", "globalaccount", "region", "subaccount_admins", + "subaccount_service_admins", "subaccount_name", "subaccount_id", "cf_landscape_label", "origin"] +QAS_STEP2_BTP_PROVIDER_MANDATORY_VARIABLES = [] + +QAS_STEP1_CF_PROVIDER_MANDATORY_VARIABLES = [] +QAS_STEP2_CF_PROVIDER_MANDATORY_VARIABLES = [ + "cf_api_url", "cf_landscape_label", "cf_org_id", "cf_org_name", "subaccount_id"] + +BTP_PROVIDER_MANDATORY_RESOURCES = [ + "btp_subaccount", "btp_subaccount_trust_configuration"] + +CF_PROVIDER_MANDATORY_RESOURCES = [ + "cloudfoundry_space", "cloudfoundry_org_role", "cloudfoundry_space_role"] diff --git a/tests/compliance/libs/inspect.py b/tests/compliance/libs/inspect.py new file mode 100644 index 00000000..513d0752 --- /dev/null +++ b/tests/compliance/libs/inspect.py @@ -0,0 +1,13 @@ +import subprocess +import json +from pathlib import Path + +def get_tf_definitions(folder: Path) -> json: + + # run a command in a shell and fetch the output + output = subprocess.run(["terraform-config-inspect", "--json" , folder], capture_output=True) + + # convert output to json + output = output.stdout.decode("utf-8") + output = json.loads(output) + return output \ No newline at end of file diff --git a/tests/compliance/libs/inspection/provider.py b/tests/compliance/libs/inspection/provider.py new file mode 100644 index 00000000..2fc474bd --- /dev/null +++ b/tests/compliance/libs/inspection/provider.py @@ -0,0 +1,89 @@ +from libs.model.provider import TF_provider as ProviderDefinition +from dataclasses import dataclass +from libs.constants.variables import BTP_PROVIDER_MANDATORY_VARIABLES, BTP_PROVIDER_MANDATORY_RESOURCES, QAS_STEP1_BTP_PROVIDER_MANDATORY_VARIABLES, QAS_STEP2_BTP_PROVIDER_MANDATORY_VARIABLES +from libs.constants.variables import CF_PROVIDER_MANDATORY_VARIABLES, CF_PROVIDER_MANDATORY_RESOURCES, QAS_STEP1_CF_PROVIDER_MANDATORY_VARIABLES, QAS_STEP2_CF_PROVIDER_MANDATORY_VARIABLES +from libs.model.finding import Finding + + +@dataclass +class TF_Provider(ProviderDefinition): + + def __init__(self, folder, provider, tf_definitions): + super().__init__(folder, tf_definitions) + + self.mandatory_variables, self.mandatory_resources = determine_variables_and_resources( + folder=folder, provider=provider) + + # only execute if the provider is btp or cloudfoundry + if provider in ["btp", "cloudfoundry"]: + self._check_variables_mandatory(provider, tf_definitions) + self._check_resources_mandatory(provider, tf_definitions) + self.folder = folder + else: + self = None + + def _check_variables_mandatory(self, provider, tf_definitions): + + if self.mandatory_variables: + for variable in self.mandatory_variables: + if variable not in tf_definitions["variables"]: + finding = Finding(provider=provider, + folder=self.folder, + asset=variable, + type="variable not defined", + severity="error") + self.findings.append(finding) + + def _check_resources_mandatory(self, provider, tf_definitions): + + if self.mandatory_resources: + for resource in self.mandatory_resources: + # check whether the resource is in the tf_definitions["managed_resources"] or in the tf_definitions["managed_resources"] split with a "." + if resource not in tf_definitions["managed_resources"] and not any([resource in managed_resource.split(".") for managed_resource in tf_definitions["managed_resources"]]): + finding = Finding(provider=provider, + folder=self.folder, + asset=resource, + type="resource not defined", + severity="error") + self.findings.append(finding) + + +def determine_variables_and_resources(folder, provider): + + variables = None + resources = None + + qas_two_step_approach = None + + if "step1" in str(folder): + qas_two_step_approach = "step1" + if "step2" in str(folder): + qas_two_step_approach = "step2" + + if provider == "btp": + if qas_two_step_approach is None: + variables = BTP_PROVIDER_MANDATORY_VARIABLES + resources = BTP_PROVIDER_MANDATORY_RESOURCES + return variables, resources + else: + if qas_two_step_approach == "step1": + variables = QAS_STEP1_BTP_PROVIDER_MANDATORY_VARIABLES + return variables, resources + if qas_two_step_approach == "step2": + variables = QAS_STEP2_BTP_PROVIDER_MANDATORY_VARIABLES + return variables, resources + + if provider == "cloudfoundry": + if qas_two_step_approach is None: + variables = CF_PROVIDER_MANDATORY_VARIABLES + resources = CF_PROVIDER_MANDATORY_RESOURCES + return variables, resources + else: + if qas_two_step_approach == "step1": + variables = QAS_STEP1_CF_PROVIDER_MANDATORY_VARIABLES + return variables, resources + if qas_two_step_approach == "step2": + variables = QAS_STEP2_CF_PROVIDER_MANDATORY_VARIABLES + return variables, resources + + return None, None diff --git a/tests/compliance/libs/io/files.py b/tests/compliance/libs/io/files.py new file mode 100644 index 00000000..43650f48 --- /dev/null +++ b/tests/compliance/libs/io/files.py @@ -0,0 +1,29 @@ +from pathlib import Path +import re +from typing import List + +# function to write a string into a file +def write_string_to_file(string_data, file_path): + """ + Writes a given string into a file specified by the file_path. + If the file does not exist, it will be created. + If the file exists, it will be overwritten. + + :param file_path: Path to the file where the string will be written. + :param string_data: The string data to write into the file. + """ + with open(file_path, 'w') as file: + file.write(string_data) + +# read all folders in the tfscripts folder +def get_folders(folder_to_scan: Path) -> List[Path]: + """ + Get all folders and subfolders in the FOLDER_SCRIPTS folder that contain at least one .tf file. + + Returns: + List[Path]: A list of Path objects representing the folders that contain .tf files. + """ + folders = [folder for folder in folder_to_scan.glob("**/*") if folder.is_dir( + ) and any([re.match(r".*\.tf", file.name) for file in folder.iterdir()])] + + return folders \ No newline at end of file diff --git a/tests/compliance/libs/model/finding.py b/tests/compliance/libs/model/finding.py new file mode 100644 index 00000000..6636e3bb --- /dev/null +++ b/tests/compliance/libs/model/finding.py @@ -0,0 +1,21 @@ +from dataclasses import dataclass +from pathlib import Path + + +@dataclass +class Finding: + """ + Represents a finding in the system. + + Attributes: + provider (str): The provider of the finding. + folder (Path): The folder where the finding is located. + asset (str): The asset associated with the finding. + type (str): The type of the finding. + severity (str): The severity level of the finding. + """ + provider: str + folder: Path + asset: str + type: str + severity: str diff --git a/tests/compliance/libs/model/provider.py b/tests/compliance/libs/model/provider.py new file mode 100644 index 00000000..d8e22fd0 --- /dev/null +++ b/tests/compliance/libs/model/provider.py @@ -0,0 +1,28 @@ +from dataclasses import dataclass, field +from pathlib import Path +from libs.model.finding import Finding + + +@dataclass +class TF_provider: + """ + Represents a Terraform provider. + + Attributes: + folder (Path): The folder path of the provider. + mandatory_variables (list): A list of mandatory variables. + forbidden_variables (list): A list of forbidden variables. + required_resources (list): A list of required resources. + forbidden_resources (list): A list of forbidden resources. + required_provider (str): The required provider. + required_version (str): The required version. + findings (list[Finding]): A list of findings. + """ + folder: Path + mandatory_variables: list = field(default_factory=list) + forbidden_variables: list = field(default_factory=list) + required_resources: list = field(default_factory=list) + forbidden_resources: list = field(default_factory=list) + required_provider: str = None + required_version: str = None + findings: list[Finding] = field(default_factory=list) diff --git a/tests/compliance/scan_tf.py b/tests/compliance/scan_tf.py new file mode 100644 index 00000000..1fededf0 --- /dev/null +++ b/tests/compliance/scan_tf.py @@ -0,0 +1,57 @@ +from libs.constants.files_folders import FOLDER_SCRIPTS +from libs.inspect import get_tf_definitions +from libs.inspection.provider import TF_Provider +from libs.io.files import get_folders +import sys + +folder_to_scan = None + +# if a parameter is provided, use it as the folder to start scanning +if len(sys.argv) > 1: + folder_to_scan = sys.argv[1] +else: + # Otherwise take the default folder defined in FOLDER_SCRIPTS + folder_to_scan = FOLDER_SCRIPTS + +# get all folders +folders = get_folders(folder_to_scan) + +found_findings = False + +# iterate through all folders +for folder in folders: + all_findings = [] + # run the inspect_folder function + defs = get_tf_definitions(folder) + + for provider in defs["required_providers"]: + result = TF_Provider(folder=folder, provider=provider, + tf_definitions=defs) + if len(result.findings) > 0: + all_findings.extend(result.findings) + + # exit the code with an error code if there are any findings + if len(all_findings) > 0: + # loop through all findings and print them + message_text = "# " + "-" * 120 + "\n" + message_text += f"# Findings in {folder}\n" + message_text += "# " + "-" * 120 + "\n" + + for finding in all_findings: + # if the severity is error, print the finding in red + if finding.severity == "error": + message_text += f"# - {finding.type} ({finding.provider} provider) '{finding.asset}'\n" + # Set flag to indicate that findings were found + found_findings = True + + print(message_text) + # Store file in folder + # filename = Path(folder, "TF_compliance_TODO.txt") + # write_string_to_file(string_data=message_text, file_path=filename) + +# exit the code with an error code if there are any findings +if found_findings: + sys.exit(1) +# exit the code with a success code if there are no findings +else: + sys.exit(0)