From 9249a6fe8d86fa7f1b9d69d30126771ec65a179a Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Wed, 23 Apr 2025 09:43:49 +0200 Subject: [PATCH 01/13] Add branch parameter to clone methods --- changelog/+398b0883.added.md | 1 + infrahub_sdk/client.py | 13 +++++++++---- infrahub_sdk/generator.py | 4 +--- infrahub_sdk/recorder.py | 3 +++ tests/unit/sdk/test_client.py | 28 +++++++++++++++++++++++++++- 5 files changed, 41 insertions(+), 8 deletions(-) create mode 100644 changelog/+398b0883.added.md diff --git a/changelog/+398b0883.added.md b/changelog/+398b0883.added.md new file mode 100644 index 00000000..f9554fab --- /dev/null +++ b/changelog/+398b0883.added.md @@ -0,0 +1 @@ +Added a "branch" parameter to the client.clone() method to allow properly cloning a client that targets another branch. diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index fffa8164..8d9de6d2 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -271,6 +271,11 @@ def _build_ip_prefix_allocation_query( input_data={"data": input_data}, ) + def _clone_config(self, branch: str | None = None) -> Config: + config = copy.deepcopy(self.config) + config.default_branch = branch or config.default_branch + return config + class InfrahubClient(BaseClient): """GraphQL Client to interact with Infrahub.""" @@ -847,9 +852,9 @@ async def process_non_batch() -> tuple[list[InfrahubNode], list[InfrahubNode]]: self.store.set(node=node) return nodes - def clone(self) -> InfrahubClient: + def clone(self, branch: str | None = None) -> InfrahubClient: """Return a cloned version of the client using the same configuration""" - return InfrahubClient(config=self.config) + return InfrahubClient(config=self._clone_config(branch=branch)) async def execute_graphql( self, @@ -1591,9 +1596,9 @@ def delete(self, kind: str | type[SchemaTypeSync], id: str, branch: str | None = node = InfrahubNodeSync(client=self, schema=schema, branch=branch, data={"id": id}) node.delete() - def clone(self) -> InfrahubClientSync: + def clone(self, branch: str | None = None) -> InfrahubClientSync: """Return a cloned version of the client using the same configuration""" - return InfrahubClientSync(config=self.config) + return InfrahubClientSync(config=self._clone_config(branch=branch)) def execute_graphql( self, diff --git a/infrahub_sdk/generator.py b/infrahub_sdk/generator.py index 854b3cb4..98fa689f 100644 --- a/infrahub_sdk/generator.py +++ b/infrahub_sdk/generator.py @@ -38,9 +38,7 @@ def __init__( self.params = params or {} self.root_directory = root_directory or os.getcwd() self.generator_instance = generator_instance - self._init_client = client.clone() - self._init_client.config.default_branch = self._init_client.default_branch = self.branch_name - self._init_client.store._default_branch = self.branch_name + self._init_client = client.clone(branch=self.branch_name) self._client: InfrahubClient | None = None self._nodes: list[InfrahubNode] = [] self._related_nodes: list[InfrahubNode] = [] diff --git a/infrahub_sdk/recorder.py b/infrahub_sdk/recorder.py index bf2a715a..40c45dd3 100644 --- a/infrahub_sdk/recorder.py +++ b/infrahub_sdk/recorder.py @@ -31,6 +31,9 @@ def record(response: httpx.Response) -> None: def default(cls) -> NoRecorder: return cls() + def __eq__(self, other: object) -> bool: + return isinstance(other, NoRecorder) + class JSONRecorder(BaseSettings): model_config = SettingsConfigDict(env_prefix="INFRAHUB_JSON_RECORDER_") diff --git a/tests/unit/sdk/test_client.py b/tests/unit/sdk/test_client.py index 4f7f3ac3..31c38294 100644 --- a/tests/unit/sdk/test_client.py +++ b/tests/unit/sdk/test_client.py @@ -6,6 +6,7 @@ from infrahub_sdk import InfrahubClient, InfrahubClientSync from infrahub_sdk.exceptions import NodeNotFoundError from infrahub_sdk.node import InfrahubNode, InfrahubNodeSync +from tests.unit.sdk.conftest import BothClients pytestmark = pytest.mark.httpx_mock(can_send_already_matched_responses=True) @@ -761,12 +762,37 @@ async def test_query_echo(httpx_mock: HTTPXMock, echo_clients, client_type): @pytest.mark.parametrize("client_type", client_types) -async def test_clone(clients, client_type): +async def test_clone(clients: BothClients, client_type: str) -> None: + """Validate that the configuration of a cloned client is a replica of the original client""" if client_type == "standard": clone = clients.standard.clone() assert clone.config == clients.standard.config assert isinstance(clone, InfrahubClient) + assert clients.standard.default_branch == clone.default_branch else: clone = clients.sync.clone() assert clone.config == clients.sync.config assert isinstance(clone, InfrahubClientSync) + assert clients.sync.default_branch == clone.default_branch + + +@pytest.mark.parametrize("client_type", client_types) +async def test_clone_define_branch(clients: BothClients, client_type: str) -> None: + """Validate that the clone branch parameter sets the correct branch of the cloned client""" + clone_branch = "my_other_branch" + if client_type == "standard": + original_branch = clients.standard.default_branch + clone = clients.standard.clone(branch=clone_branch) + assert clients.standard.store._default_branch == original_branch + assert isinstance(clone, InfrahubClient) + assert clients.standard.default_branch != clone.default_branch + else: + original_branch = clients.standard.default_branch + clone = clients.sync.clone(branch="my_other_branch") + assert clients.sync.store._default_branch == original_branch + assert isinstance(clone, InfrahubClientSync) + assert clients.sync.default_branch != clone.default_branch + + assert clone.default_branch == clone_branch + assert original_branch != clone_branch + assert clone.store._default_branch == clone_branch From 0a0b54040bb3ec3be66f3160755d05efb24c47b1 Mon Sep 17 00:00:00 2001 From: Brett Lykins Date: Mon, 28 Apr 2025 10:48:14 -0400 Subject: [PATCH 02/13] 1.11.1 release (#378) * 1.11.1 release prep --- CHANGELOG.md | 6 ++++++ .../+hfid_support_cardinality_many_relationships.changed.md | 1 - pyproject.toml | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) delete mode 100644 changelog/+hfid_support_cardinality_many_relationships.changed.md diff --git a/CHANGELOG.md b/CHANGELOG.md index 1ce592d0..fe05e3ff 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,12 @@ This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the chang +## [1.11.1](https://github.com/opsmill/infrahub-sdk-python/tree/v1.11.1) - 2025-04-28 + +### Changed + +- Set the HFID on related nodes for cardinality many relationships, and add HFID support to the RelationshipManager `add`, `extend` and `remove` methods. + ## [1.11.0](https://github.com/opsmill/infrahub-sdk-python/tree/v1.11.0) - 2025-04-17 ### Deprecated diff --git a/changelog/+hfid_support_cardinality_many_relationships.changed.md b/changelog/+hfid_support_cardinality_many_relationships.changed.md deleted file mode 100644 index 373120d2..00000000 --- a/changelog/+hfid_support_cardinality_many_relationships.changed.md +++ /dev/null @@ -1 +0,0 @@ -Set the HFID on related nodes for cardinality many relationships and add hfid support to the RelationshipManager add, extend and remove methods diff --git a/pyproject.toml b/pyproject.toml index c4b929e7..c3ccc170 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "infrahub-sdk" -version = "1.11.0" +version = "1.11.1" description = "Python Client to interact with Infrahub" authors = ["OpsMill "] readme = "README.md" From cfbb2176639630c9b45c9843fefbc13eef73269f Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Thu, 24 Apr 2025 16:22:35 +0200 Subject: [PATCH 03/13] Add ability to use convert_query_response with Python Transforms Fixes #281 --- changelog/281.added.md | 1 + infrahub_sdk/client.py | 9 +- infrahub_sdk/config.py | 17 ++ infrahub_sdk/ctl/cli_commands.py | 8 +- infrahub_sdk/ctl/generator.py | 4 +- infrahub_sdk/generator.py | 76 ++------ infrahub_sdk/operation.py | 80 +++++++++ infrahub_sdk/protocols.py | 12 ++ infrahub_sdk/schema/repository.py | 4 + infrahub_sdk/transforms.py | 42 ++--- .../repos/ctl_integration/.infrahub.yml | 32 ++++ .../generators/tag_generator.py | 15 ++ .../generators/tag_generator_convert.py | 16 ++ .../ctl_integration/queries/animal_person.gql | 27 +++ .../transforms/animal_person.py | 16 ++ .../ctl_integration/transforms/converted.py | 18 ++ tests/integration/test_infrahubctl.py | 163 ++++++++++++++++++ 17 files changed, 439 insertions(+), 101 deletions(-) create mode 100644 changelog/281.added.md create mode 100644 infrahub_sdk/operation.py create mode 100644 tests/fixtures/repos/ctl_integration/.infrahub.yml create mode 100644 tests/fixtures/repos/ctl_integration/generators/tag_generator.py create mode 100644 tests/fixtures/repos/ctl_integration/generators/tag_generator_convert.py create mode 100644 tests/fixtures/repos/ctl_integration/queries/animal_person.gql create mode 100644 tests/fixtures/repos/ctl_integration/transforms/animal_person.py create mode 100644 tests/fixtures/repos/ctl_integration/transforms/converted.py create mode 100644 tests/integration/test_infrahubctl.py diff --git a/changelog/281.added.md b/changelog/281.added.md new file mode 100644 index 00000000..00338f66 --- /dev/null +++ b/changelog/281.added.md @@ -0,0 +1 @@ +Added ability to convert the query response to InfrahubNode objects when using Python Transforms in the same way you can with Generators. diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index 8d9de6d2..4f6aa8a2 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -271,11 +271,6 @@ def _build_ip_prefix_allocation_query( input_data={"data": input_data}, ) - def _clone_config(self, branch: str | None = None) -> Config: - config = copy.deepcopy(self.config) - config.default_branch = branch or config.default_branch - return config - class InfrahubClient(BaseClient): """GraphQL Client to interact with Infrahub.""" @@ -854,7 +849,7 @@ async def process_non_batch() -> tuple[list[InfrahubNode], list[InfrahubNode]]: def clone(self, branch: str | None = None) -> InfrahubClient: """Return a cloned version of the client using the same configuration""" - return InfrahubClient(config=self._clone_config(branch=branch)) + return InfrahubClient(config=self.config.clone(branch=branch)) async def execute_graphql( self, @@ -1598,7 +1593,7 @@ def delete(self, kind: str | type[SchemaTypeSync], id: str, branch: str | None = def clone(self, branch: str | None = None) -> InfrahubClientSync: """Return a cloned version of the client using the same configuration""" - return InfrahubClientSync(config=self._clone_config(branch=branch)) + return InfrahubClientSync(config=self.config.clone(branch=branch)) def execute_graphql( self, diff --git a/infrahub_sdk/config.py b/infrahub_sdk/config.py index 51c790dc..b0a2402a 100644 --- a/infrahub_sdk/config.py +++ b/infrahub_sdk/config.py @@ -1,5 +1,6 @@ from __future__ import annotations +from copy import deepcopy from typing import Any from pydantic import Field, field_validator, model_validator @@ -158,3 +159,19 @@ def set_custom_recorder(cls, values: dict[str, Any]) -> dict[str, Any]: elif values.get("recorder") == RecorderType.JSON and "custom_recorder" not in values: values["custom_recorder"] = JSONRecorder() return values + + def clone(self, branch: str | None = None) -> Config: + config: dict[str, Any] = { + "default_branch": branch or self.default_branch, + "recorder": self.recorder, + "custom_recorder": self.custom_recorder, + "requester": self.requester, + "sync_requester": self.sync_requester, + "log": self.log, + } + covered_keys = list(config.keys()) + for field in Config.model_fields.keys(): + if field not in covered_keys: + config[field] = deepcopy(getattr(self, field)) + + return Config(**config) diff --git a/infrahub_sdk/ctl/cli_commands.py b/infrahub_sdk/ctl/cli_commands.py index 13910621..605743fa 100644 --- a/infrahub_sdk/ctl/cli_commands.py +++ b/infrahub_sdk/ctl/cli_commands.py @@ -41,6 +41,7 @@ ) from ..ctl.validate import app as validate_app from ..exceptions import GraphQLError, ModuleImportError +from ..node import InfrahubNode from ..protocols_generator.generator import CodeGenerator from ..schema import MainSchemaTypesAll, SchemaRoot from ..template import Jinja2Template @@ -330,7 +331,12 @@ def transform( console.print(f"[red]{exc.message}") raise typer.Exit(1) from exc - transform = transform_class(client=client, branch=branch) + transform = transform_class( + client=client, + branch=branch, + infrahub_node=InfrahubNode, + convert_query_response=transform_config.convert_query_response, + ) # Get data query_str = repository_config.get_query(name=transform.query).load_query() data = asyncio.run( diff --git a/infrahub_sdk/ctl/generator.py b/infrahub_sdk/ctl/generator.py index 22501568..49019196 100644 --- a/infrahub_sdk/ctl/generator.py +++ b/infrahub_sdk/ctl/generator.py @@ -62,7 +62,7 @@ async def run( generator = generator_class( query=generator_config.query, client=client, - branch=branch, + branch=branch or "", params=variables_dict, convert_query_response=generator_config.convert_query_response, infrahub_node=InfrahubNode, @@ -91,7 +91,7 @@ async def run( generator = generator_class( query=generator_config.query, client=client, - branch=branch, + branch=branch or "", params=params, convert_query_response=generator_config.convert_query_response, infrahub_node=InfrahubNode, diff --git a/infrahub_sdk/generator.py b/infrahub_sdk/generator.py index 98fa689f..3c9d26d7 100644 --- a/infrahub_sdk/generator.py +++ b/infrahub_sdk/generator.py @@ -1,22 +1,19 @@ from __future__ import annotations import logging -import os from abc import abstractmethod from typing import TYPE_CHECKING -from infrahub_sdk.repository import GitRepoManager - from .exceptions import UninitializedError +from .operation import InfrahubOperation if TYPE_CHECKING: from .client import InfrahubClient from .context import RequestContext from .node import InfrahubNode - from .store import NodeStore -class InfrahubGenerator: +class InfrahubGenerator(InfrahubOperation): """Infrahub Generator class""" def __init__( @@ -24,7 +21,7 @@ def __init__( query: str, client: InfrahubClient, infrahub_node: type[InfrahubNode], - branch: str | None = None, + branch: str = "", root_directory: str = "", generator_instance: str = "", params: dict | None = None, @@ -33,35 +30,21 @@ def __init__( request_context: RequestContext | None = None, ) -> None: self.query = query - self.branch = branch - self.git: GitRepoManager | None = None + + super().__init__( + client=client, + infrahub_node=infrahub_node, + convert_query_response=convert_query_response, + branch=branch, + root_directory=root_directory, + ) + self.params = params or {} - self.root_directory = root_directory or os.getcwd() self.generator_instance = generator_instance - self._init_client = client.clone(branch=self.branch_name) self._client: InfrahubClient | None = None - self._nodes: list[InfrahubNode] = [] - self._related_nodes: list[InfrahubNode] = [] - self.infrahub_node = infrahub_node - self.convert_query_response = convert_query_response self.logger = logger if logger else logging.getLogger("infrahub.tasks") self.request_context = request_context - @property - def store(self) -> NodeStore: - """The store will be populated with nodes based on the query during the collection of data if activated""" - return self._init_client.store - - @property - def nodes(self) -> list[InfrahubNode]: - """Returns nodes collected and parsed during the data collection process if this feature is enables""" - return self._nodes - - @property - def related_nodes(self) -> list[InfrahubNode]: - """Returns nodes collected and parsed during the data collection process if this feature is enables""" - return self._related_nodes - @property def subscribers(self) -> list[str] | None: if self.generator_instance: @@ -78,20 +61,6 @@ def client(self) -> InfrahubClient: def client(self, value: InfrahubClient) -> None: self._client = value - @property - def branch_name(self) -> str: - """Return the name of the current git branch.""" - - if self.branch: - return self.branch - - if not self.git: - self.git = GitRepoManager(self.root_directory) - - self.branch = str(self.git.active_branch) - - return self.branch - async def collect_data(self) -> dict: """Query the result of the GraphQL Query defined in self.query and return the result""" @@ -117,27 +86,6 @@ async def run(self, identifier: str, data: dict | None = None) -> None: ) as self.client: await self.generate(data=unpacked) - async def process_nodes(self, data: dict) -> None: - if not self.convert_query_response: - return - - await self._init_client.schema.all(branch=self.branch_name) - - for kind in data: - if kind in self._init_client.schema.cache[self.branch_name].nodes.keys(): - for result in data[kind].get("edges", []): - node = await self.infrahub_node.from_graphql( - client=self._init_client, branch=self.branch_name, data=result - ) - self._nodes.append(node) - await node._process_relationships( - node_data=result, branch=self.branch_name, related_nodes=self._related_nodes - ) - - for node in self._nodes + self._related_nodes: - if node.id: - self._init_client.store.set(node=node) - @abstractmethod async def generate(self, data: dict) -> None: """Code to run the generator diff --git a/infrahub_sdk/operation.py b/infrahub_sdk/operation.py new file mode 100644 index 00000000..f52db43d --- /dev/null +++ b/infrahub_sdk/operation.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +import os +from typing import TYPE_CHECKING + +from .repository import GitRepoManager + +if TYPE_CHECKING: + from . import InfrahubClient + from .node import InfrahubNode + from .store import NodeStore + + +class InfrahubOperation: + def __init__( + self, + client: InfrahubClient, + infrahub_node: type[InfrahubNode], + convert_query_response: bool, + branch: str, + root_directory: str, + ): + self.branch = branch + self.convert_query_response = convert_query_response + self.root_directory = root_directory or os.getcwd() + self.infrahub_node = infrahub_node + self._nodes: list[InfrahubNode] = [] + self._related_nodes: list[InfrahubNode] = [] + self._init_client = client.clone(branch=self.branch_name) + self.git: GitRepoManager | None = None + + @property + def branch_name(self) -> str: + """Return the name of the current git branch.""" + + if self.branch: + return self.branch + + if not hasattr(self, "git") or not self.git: + self.git = GitRepoManager(self.root_directory) + + self.branch = str(self.git.active_branch) + + return self.branch + + @property + def store(self) -> NodeStore: + """The store will be populated with nodes based on the query during the collection of data if activated""" + return self._init_client.store + + @property + def nodes(self) -> list[InfrahubNode]: + """Returns nodes collected and parsed during the data collection process if this feature is enabled""" + return self._nodes + + @property + def related_nodes(self) -> list[InfrahubNode]: + """Returns nodes collected and parsed during the data collection process if this feature is enabled""" + return self._related_nodes + + async def process_nodes(self, data: dict) -> None: + if not self.convert_query_response: + return + + await self._init_client.schema.all(branch=self.branch_name) + + for kind in data: + if kind in self._init_client.schema.cache[self.branch_name].nodes.keys(): + for result in data[kind].get("edges", []): + node = await self.infrahub_node.from_graphql( + client=self._init_client, branch=self.branch_name, data=result + ) + self._nodes.append(node) + await node._process_relationships( + node_data=result, branch=self.branch_name, related_nodes=self._related_nodes + ) + + for node in self._nodes + self._related_nodes: + if node.id: + self._init_client.store.set(node=node) diff --git a/infrahub_sdk/protocols.py b/infrahub_sdk/protocols.py index 2ec1d0f3..7a69b5f8 100644 --- a/infrahub_sdk/protocols.py +++ b/infrahub_sdk/protocols.py @@ -154,6 +154,10 @@ class CoreMenu(CoreNode): children: RelationshipManager +class CoreObjectComponentTemplate(CoreNode): + template_name: String + + class CoreObjectTemplate(CoreNode): template_name: String @@ -205,6 +209,7 @@ class CoreWebhook(CoreNode): name: String event_type: Enum branch_scope: Dropdown + node_kind: StringOptional description: StringOptional url: URL validate_certificates: BooleanOptional @@ -479,6 +484,7 @@ class CoreTransformJinja2(CoreTransformation): class CoreTransformPython(CoreTransformation): file_path: String class_name: String + convert_query_response: BooleanOptional class CoreUserValidator(CoreValidator): @@ -625,6 +631,10 @@ class CoreMenuSync(CoreNodeSync): children: RelationshipManagerSync +class CoreObjectComponentTemplateSync(CoreNodeSync): + template_name: String + + class CoreObjectTemplateSync(CoreNodeSync): template_name: String @@ -676,6 +686,7 @@ class CoreWebhookSync(CoreNodeSync): name: String event_type: Enum branch_scope: Dropdown + node_kind: StringOptional description: StringOptional url: URL validate_certificates: BooleanOptional @@ -950,6 +961,7 @@ class CoreTransformJinja2Sync(CoreTransformationSync): class CoreTransformPythonSync(CoreTransformationSync): file_path: String class_name: String + convert_query_response: BooleanOptional class CoreUserValidatorSync(CoreValidatorSync): diff --git a/infrahub_sdk/schema/repository.py b/infrahub_sdk/schema/repository.py index 1628fd6d..b5c58d2f 100644 --- a/infrahub_sdk/schema/repository.py +++ b/infrahub_sdk/schema/repository.py @@ -117,6 +117,10 @@ class InfrahubPythonTransformConfig(InfrahubRepositoryConfigElement): name: str = Field(..., description="The name of the Transform") file_path: Path = Field(..., description="The file within the repository with the transform code.") class_name: str = Field(default="Transform", description="The name of the transform class to run.") + convert_query_response: bool = Field( + default=False, + description="Decide if the transform should convert the result of the GraphQL query to SDK InfrahubNode objects.", + ) def load_class(self, import_root: str | None = None, relative_path: str | None = None) -> type[InfrahubTransform]: module = import_module(module_path=self.file_path, import_root=import_root, relative_path=relative_path) diff --git a/infrahub_sdk/transforms.py b/infrahub_sdk/transforms.py index 79ad74f5..29ed1136 100644 --- a/infrahub_sdk/transforms.py +++ b/infrahub_sdk/transforms.py @@ -5,34 +5,38 @@ from abc import abstractmethod from typing import TYPE_CHECKING, Any -from infrahub_sdk.repository import GitRepoManager - -from .exceptions import UninitializedError +from .operation import InfrahubOperation if TYPE_CHECKING: from . import InfrahubClient + from .node import InfrahubNode INFRAHUB_TRANSFORM_VARIABLE_TO_IMPORT = "INFRAHUB_TRANSFORMS" -class InfrahubTransform: +class InfrahubTransform(InfrahubOperation): name: str | None = None query: str timeout: int = 10 def __init__( self, + client: InfrahubClient, + infrahub_node: type[InfrahubNode], + convert_query_response: bool = False, branch: str = "", root_directory: str = "", server_url: str = "", - client: InfrahubClient | None = None, ): - self.git: GitRepoManager + super().__init__( + client=client, + infrahub_node=infrahub_node, + convert_query_response=convert_query_response, + branch=branch, + root_directory=root_directory, + ) - self.branch = branch self.server_url = server_url or os.environ.get("INFRAHUB_URL", "http://127.0.0.1:8000") - self.root_directory = root_directory or os.getcwd() - self._client = client if not self.name: @@ -43,24 +47,7 @@ def __init__( @property def client(self) -> InfrahubClient: - if self._client: - return self._client - - raise UninitializedError("The client has not been initialized") - - @property - def branch_name(self) -> str: - """Return the name of the current git branch.""" - - if self.branch: - return self.branch - - if not hasattr(self, "git") or not self.git: - self.git = GitRepoManager(self.root_directory) - - self.branch = str(self.git.active_branch) - - return self.branch + return self._init_client @abstractmethod def transform(self, data: dict) -> Any: @@ -86,6 +73,7 @@ async def run(self, data: dict | None = None) -> Any: data = await self.collect_data() unpacked = data.get("data") or data + await self.process_nodes(data=unpacked) if asyncio.iscoroutinefunction(self.transform): return await self.transform(data=unpacked) diff --git a/tests/fixtures/repos/ctl_integration/.infrahub.yml b/tests/fixtures/repos/ctl_integration/.infrahub.yml new file mode 100644 index 00000000..605cdff4 --- /dev/null +++ b/tests/fixtures/repos/ctl_integration/.infrahub.yml @@ -0,0 +1,32 @@ +# yaml-language-server: $schema=https://schema.infrahub.app/python-sdk/repository-config/develop.json +--- +python_transforms: + - name: animal_person + class_name: AnimalPerson + file_path: "transforms/animal_person.py" + convert_query_response: false + - name: animal_person_converted + class_name: ConvertedAnimalPerson + file_path: "transforms/converted.py" + convert_query_response: true + +generator_definitions: + - name: animal_tags + file_path: "generators/tag_generator.py" + targets: pet_owners + query: animal_person + convert_query_response: false + parameters: + name: "name__value" + - name: animal_tags_convert + file_path: "generators/tag_generator_convert.py" + targets: pet_owners + query: animal_person + convert_query_response: true + parameters: + name: "name__value" + + +queries: + - name: animal_person + file_path: queries/animal_person.gql diff --git a/tests/fixtures/repos/ctl_integration/generators/tag_generator.py b/tests/fixtures/repos/ctl_integration/generators/tag_generator.py new file mode 100644 index 00000000..50641402 --- /dev/null +++ b/tests/fixtures/repos/ctl_integration/generators/tag_generator.py @@ -0,0 +1,15 @@ +from infrahub_sdk.generator import InfrahubGenerator + + +class Generator(InfrahubGenerator): + async def generate(self, data: dict) -> None: + response_person = data["TestingPerson"]["edges"][0]["node"] + name: str = response_person["name"]["value"] + + for animal in data["TestingPerson"]["edges"][0]["node"]["animals"]["edges"]: + payload = { + "name": f"raw-{name.lower().replace(' ', '-')}-{animal['node']['name']['value'].lower()}", + "description": "Without converting query response", + } + obj = await self.client.create(kind="BuiltinTag", data=payload) + await obj.save(allow_upsert=True) diff --git a/tests/fixtures/repos/ctl_integration/generators/tag_generator_convert.py b/tests/fixtures/repos/ctl_integration/generators/tag_generator_convert.py new file mode 100644 index 00000000..a0ac6d4b --- /dev/null +++ b/tests/fixtures/repos/ctl_integration/generators/tag_generator_convert.py @@ -0,0 +1,16 @@ +from infrahub_sdk.generator import InfrahubGenerator + + +class Generator(InfrahubGenerator): + async def generate(self, data: dict) -> None: + response_person = data["TestingPerson"]["edges"][0]["node"] + name: str = response_person["name"]["value"] + person = self.store.get(key=name, kind="TestingPerson") + + for animal in person.animals.peers: + payload = { + "name": f"converted-{name.lower().replace(' ', '-')}-{animal.peer.name.value.lower()}", + "description": "Using convert_query_response", + } + obj = await self.client.create(kind="BuiltinTag", data=payload) + await obj.save(allow_upsert=True) diff --git a/tests/fixtures/repos/ctl_integration/queries/animal_person.gql b/tests/fixtures/repos/ctl_integration/queries/animal_person.gql new file mode 100644 index 00000000..c8e4ab86 --- /dev/null +++ b/tests/fixtures/repos/ctl_integration/queries/animal_person.gql @@ -0,0 +1,27 @@ +query TestPersonQuery($name: String!) { + TestingPerson(name__value: $name) { + edges { + node { + __typename + id + name { + value + } + height { + value + } + animals { + edges { + node { + __typename + id + name { + value + } + } + } + } + } + } + } +} diff --git a/tests/fixtures/repos/ctl_integration/transforms/animal_person.py b/tests/fixtures/repos/ctl_integration/transforms/animal_person.py new file mode 100644 index 00000000..667dbaaa --- /dev/null +++ b/tests/fixtures/repos/ctl_integration/transforms/animal_person.py @@ -0,0 +1,16 @@ +from typing import Any + +from infrahub_sdk.transforms import InfrahubTransform + + +class AnimalPerson(InfrahubTransform): + query = "animal_person" + + async def transform(self, data) -> dict[str, Any]: + response_person = data["TestingPerson"]["edges"][0]["node"] + name: str = response_person["name"]["value"] + animal_names = sorted( + animal["node"]["name"]["value"] for animal in data["TestingPerson"]["edges"][0]["node"]["animals"]["edges"] + ) + + return {"person": name, "pets": animal_names} diff --git a/tests/fixtures/repos/ctl_integration/transforms/converted.py b/tests/fixtures/repos/ctl_integration/transforms/converted.py new file mode 100644 index 00000000..fd4eadb9 --- /dev/null +++ b/tests/fixtures/repos/ctl_integration/transforms/converted.py @@ -0,0 +1,18 @@ +from operator import itemgetter +from typing import Any + +from infrahub_sdk.transforms import InfrahubTransform + + +class ConvertedAnimalPerson(InfrahubTransform): + query = "animal_person" + + async def transform(self, data) -> dict[str, Any]: + response_person = data["TestingPerson"]["edges"][0]["node"] + name: str = response_person["name"]["value"] + person = self.store.get(key=name, kind="TestingPerson") + + animals = [{"type": animal.peer.typename, "name": animal.peer.name.value} for animal in person.animals.peers] + animals.sort(key=itemgetter("name")) + + return {"person": person.name.value, "herd_size": len(animals), "animals": animals} diff --git a/tests/integration/test_infrahubctl.py b/tests/integration/test_infrahubctl.py new file mode 100644 index 00000000..9d58d5ce --- /dev/null +++ b/tests/integration/test_infrahubctl.py @@ -0,0 +1,163 @@ +from __future__ import annotations + +import json +import os +import shutil +import tempfile +from collections.abc import Generator +from pathlib import Path +from typing import TYPE_CHECKING + +import pytest +from typer.testing import CliRunner + +from infrahub_sdk.ctl import config +from infrahub_sdk.ctl.cli_commands import app, generator +from infrahub_sdk.ctl.parameters import load_configuration +from infrahub_sdk.repository import GitRepoManager +from infrahub_sdk.testing.docker import TestInfrahubDockerClient +from infrahub_sdk.testing.schemas.animal import SchemaAnimal +from tests.helpers.utils import change_directory, strip_color + +if TYPE_CHECKING: + from infrahub_sdk import InfrahubClient + +FIXTURE_BASE_DIR = Path(Path(os.path.abspath(__file__)).parent / ".." / "fixtures") + + +runner = CliRunner() + + +class TestInfrahubCtl(TestInfrahubDockerClient, SchemaAnimal): + @pytest.fixture(scope="class") + async def base_dataset( + self, + client: InfrahubClient, + load_schema, + person_liam, + person_ethan, + person_sophia, + cat_luna, + cat_bella, + dog_daisy, + dog_rocky, + ctl_client_config, + ): + await client.branch.create(branch_name="branch01") + + @pytest.fixture(scope="class") + def repository(self) -> Generator[str]: + temp_dir = tempfile.mkdtemp() + + try: + fixture_path = Path(FIXTURE_BASE_DIR / "repos" / "ctl_integration") + shutil.copytree(fixture_path, temp_dir, dirs_exist_ok=True) + # Initialize fixture as git repository. This is necessary to run some infrahubctl commands. + GitRepoManager(temp_dir) + + yield temp_dir + + finally: + shutil.rmtree(temp_dir) + + @pytest.fixture(scope="class") + def ctl_client_config(self, client: InfrahubClient) -> Generator: + load_configuration(value="infrahubctl.toml") + assert config.SETTINGS._settings + config.SETTINGS._settings.server_address = client.config.address + original_username = os.environ.get("INFRAHUB_USERNAME") + original_password = os.environ.get("INFRAHUB_PASSWORD") + if client.config.username and client.config.password: + os.environ["INFRAHUB_USERNAME"] = client.config.username + os.environ["INFRAHUB_PASSWORD"] = client.config.password + yield + if original_username: + os.environ["INFRAHUB_USERNAME"] = original_username + if original_password: + os.environ["INFRAHUB_PASSWORD"] = original_password + + def test_infrahubctl_transform_cmd_animal_person(self, repository: str, base_dataset: None) -> None: + """Test infrahubctl transform without converting nodes.""" + + with change_directory(repository): + ethans_output = runner.invoke(app, ["transform", "animal_person", "name=Ethan Carter"]) + structured_ethan_output = json.loads(strip_color(ethans_output.stdout)) + + liams_output = runner.invoke(app, ["transform", "animal_person", "name=Liam Walker"]) + structured_liam_output = json.loads(strip_color(liams_output.stdout)) + + assert structured_ethan_output == {"person": "Ethan Carter", "pets": ["Bella", "Daisy", "Luna"]} + assert structured_liam_output == {"person": "Liam Walker", "pets": []} + + def test_infrahubctl_transform_cmd_convert_animal_person(self, repository: str, base_dataset: None) -> None: + """Test infrahubctl transform when converting nodes.""" + + with change_directory(repository): + ethans_output = runner.invoke(app, ["transform", "animal_person_converted", "name=Ethan Carter"]) + structured_ethan_output = json.loads(strip_color(ethans_output.stdout)) + + liams_output = runner.invoke(app, ["transform", "animal_person_converted", "name=Liam Walker"]) + structured_liam_output = json.loads(strip_color(liams_output.stdout)) + + assert structured_ethan_output == { + "animals": [ + {"name": "Bella", "type": "TestingCat"}, + {"name": "Daisy", "type": "TestingDog"}, + {"name": "Luna", "type": "TestingCat"}, + ], + "herd_size": 3, + "person": "Ethan Carter", + } + assert structured_liam_output == { + "animals": [], + "herd_size": 0, + "person": "Liam Walker", + } + + async def test_infrahubctl_generator_cmd_animal_tags( + self, repository: str, base_dataset: None, client: InfrahubClient + ) -> None: + """Test infrahubctl generator without converting nodes.""" + + expected_generated_tags = ["raw-ethan-carter-bella", "raw-ethan-carter-daisy", "raw-ethan-carter-luna"] + initial_tags = await client.all(kind="BuiltinTag") + + with change_directory(repository): + await generator( + generator_name="animal_tags", variables=["name=Ethan Carter"], list_available=False, path="." + ) + + final_tags = await client.all(kind="BuiltinTag") + + initial_tag_names = [tag.name.value for tag in initial_tags] + final_tag_names = [tag.name.value for tag in final_tags] + + for tag in expected_generated_tags: + assert tag not in initial_tag_names + assert tag in final_tag_names + + async def test_infrahubctl_generator_cmd_animal_tags_convert_query( + self, repository: str, base_dataset: None, client: InfrahubClient + ) -> None: + """Test infrahubctl generator with conversion of nodes.""" + + expected_generated_tags = [ + "converted-ethan-carter-bella", + "converted-ethan-carter-daisy", + "converted-ethan-carter-luna", + ] + initial_tags = await client.all(kind="BuiltinTag") + + with change_directory(repository): + await generator( + generator_name="animal_tags_convert", variables=["name=Ethan Carter"], list_available=False, path="." + ) + + final_tags = await client.all(kind="BuiltinTag") + + initial_tag_names = [tag.name.value for tag in initial_tags] + final_tag_names = [tag.name.value for tag in final_tags] + + for tag in expected_generated_tags: + assert tag not in initial_tag_names + assert tag in final_tag_names From 5aa8f8407576fb098f89666a0259d61d24b4ece5 Mon Sep 17 00:00:00 2001 From: wvandeun Date: Tue, 29 Apr 2025 15:21:37 +0200 Subject: [PATCH 04/13] add repository dispatch workflow triggering updates in other repos --- .github/workflows/release.yml | 7 +++ .github/workflows/repository-dispatch.yml | 53 +++++++++++++++++++++++ 2 files changed, 60 insertions(+) create mode 100644 .github/workflows/repository-dispatch.yml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index bb2e6b23..008757b7 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -87,3 +87,10 @@ jobs: secrets: inherit with: version: ${{ github.ref_name }} + + repository-dispatch: + needs: check_release + uses: ./.github/workflows/repository-dispatch.yml + secrets: inherit + with: + version: ${{ needs.check_release.outputs.version }} diff --git a/.github/workflows/repository-dispatch.yml b/.github/workflows/repository-dispatch.yml new file mode 100644 index 00000000..0dc9a213 --- /dev/null +++ b/.github/workflows/repository-dispatch.yml @@ -0,0 +1,53 @@ +--- +# yamllint disable rule:truthy rule:line-length +name: Trigger Infrahub SDK update in other repositories + +on: + workflow_dispatch: + inputs: + runs-on: + description: "The OS to run the job on" + required: false + default: "ubuntu-22.04" + type: string + version: + type: string + required: false + description: The string to extract semver from. + default: '' + workflow_call: + inputs: + runs-on: + description: "The OS to run the job on" + required: false + default: "ubuntu-22.04" + type: string + version: + type: string + required: false + description: The string to extract semver from. + default: '' + +jobs: + repository-dispatch: + runs-on: ubuntu-22.04 + strategy: + matrix: + # Either a literal path, or the name of a secret... + repo: + - "opsmill/infrahub-demo-dc-fabric" + - "INFRAHUB_CUSTOMER1_REPOSITORY" + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Repository Dispatch + uses: peter-evans/repository-dispatch@v3 + with: + token: ${{ secrets.GH_UPDATE_PACKAGE_OTTO }} + # if matrix.repo contains a slash, use it literally; otherwise look up the secret named by matrix.repo + repository: ${{ contains(matrix.repo, '/') && matrix.repo || secrets[matrix.repo] }} + event-type: trigger-infrahub-sdk-python-update + client-payload: | + {"version":"${{ inputs.version }}"} From 2002d382efd5dc250050b5b2280bc20ece7759d9 Mon Sep 17 00:00:00 2001 From: Brett Lykins Date: Tue, 29 Apr 2025 11:23:06 -0400 Subject: [PATCH 05/13] v1.12 prep --- CHANGELOG.md | 7 +++++++ changelog/+398b0883.added.md | 1 - changelog/281.added.md | 1 - pyproject.toml | 2 +- 4 files changed, 8 insertions(+), 3 deletions(-) delete mode 100644 changelog/+398b0883.added.md delete mode 100644 changelog/281.added.md diff --git a/CHANGELOG.md b/CHANGELOG.md index fe05e3ff..e7c2bdea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,13 @@ This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the chang +## [1.12.0](https://github.com/opsmill/infrahub-sdk-python/tree/v1.12.0) - 2025-04-29 + +### Added + +- Added the ability to convert the query response to InfrahubNode objects when using Python Transforms in the same way you can with Generators. ([#281](https://github.com/opsmill/infrahub-sdk-python/issues/281)) +- Added a "branch" parameter to the client.clone() method to allow properly cloning a client that targets another branch. + ## [1.11.1](https://github.com/opsmill/infrahub-sdk-python/tree/v1.11.1) - 2025-04-28 ### Changed diff --git a/changelog/+398b0883.added.md b/changelog/+398b0883.added.md deleted file mode 100644 index f9554fab..00000000 --- a/changelog/+398b0883.added.md +++ /dev/null @@ -1 +0,0 @@ -Added a "branch" parameter to the client.clone() method to allow properly cloning a client that targets another branch. diff --git a/changelog/281.added.md b/changelog/281.added.md deleted file mode 100644 index 00338f66..00000000 --- a/changelog/281.added.md +++ /dev/null @@ -1 +0,0 @@ -Added ability to convert the query response to InfrahubNode objects when using Python Transforms in the same way you can with Generators. diff --git a/pyproject.toml b/pyproject.toml index c3ccc170..d9073e38 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "infrahub-sdk" -version = "1.11.1" +version = "1.12.0" description = "Python Client to interact with Infrahub" authors = ["OpsMill "] readme = "README.md" From 3a1d72ff123a07a74f306d956f1de5d51e77b34d Mon Sep 17 00:00:00 2001 From: Damien Garros Date: Mon, 12 May 2025 14:35:33 +0200 Subject: [PATCH 06/13] Split node.py into multiple files --- infrahub_sdk/node/__init__.py | 30 ++ infrahub_sdk/node/attribute.py | 122 +++++ infrahub_sdk/node/constants.py | 21 + infrahub_sdk/{ => node}/node.py | 799 ++---------------------------- infrahub_sdk/node/property.py | 24 + infrahub_sdk/node/related_node.py | 266 ++++++++++ infrahub_sdk/node/relationship.py | 302 +++++++++++ infrahub_sdk/protocols_base.py | 36 +- tests/unit/sdk/test_node.py | 2 +- 9 files changed, 850 insertions(+), 752 deletions(-) create mode 100644 infrahub_sdk/node/__init__.py create mode 100644 infrahub_sdk/node/attribute.py create mode 100644 infrahub_sdk/node/constants.py rename infrahub_sdk/{ => node}/node.py (69%) create mode 100644 infrahub_sdk/node/property.py create mode 100644 infrahub_sdk/node/related_node.py create mode 100644 infrahub_sdk/node/relationship.py diff --git a/infrahub_sdk/node/__init__.py b/infrahub_sdk/node/__init__.py new file mode 100644 index 00000000..6862aac3 --- /dev/null +++ b/infrahub_sdk/node/__init__.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +from .constants import HFID_STR_SEPARATOR +from .node import InfrahubNode, InfrahubNodeBase, InfrahubNodeSync +from .related_node import RelatedNode, RelatedNodeBase, RelatedNodeSync +from .relationship import RelationshipManager, RelationshipManagerBase, RelationshipManagerSync + +__all__ = [ + "InfrahubNode", + "InfrahubNodeBase", + "InfrahubNodeSync", + "RelatedNode", + "RelatedNodeBase", + "RelatedNodeSync", + "RelationshipManager", + "RelationshipManagerBase", + "RelationshipManagerSync", +] + + +def parse_human_friendly_id(hfid: str | list[str]) -> tuple[str | None, list[str]]: + """Parse a human friendly ID into a kind and an identifier.""" + if isinstance(hfid, str): + hfid_parts = hfid.split(HFID_STR_SEPARATOR) + if len(hfid_parts) == 1: + return None, hfid_parts + return hfid_parts[0], hfid_parts[1:] + if isinstance(hfid, list): + return None, hfid + raise ValueError(f"Invalid human friendly ID: {hfid}") diff --git a/infrahub_sdk/node/attribute.py b/infrahub_sdk/node/attribute.py new file mode 100644 index 00000000..5ddc5cbe --- /dev/null +++ b/infrahub_sdk/node/attribute.py @@ -0,0 +1,122 @@ +from __future__ import annotations + +import ipaddress +from typing import TYPE_CHECKING, Any, Callable, get_args + +from ..protocols_base import CoreNodeBase +from ..uuidt import UUIDT +from .constants import IP_TYPES, PROPERTIES_FLAG, PROPERTIES_OBJECT, SAFE_VALUE +from .property import NodeProperty + +if TYPE_CHECKING: + from ..schema import AttributeSchemaAPI + + +class Attribute: + """Represents an attribute of a Node, including its schema, value, and properties.""" + + def __init__(self, name: str, schema: AttributeSchemaAPI, data: Any | dict): + """ + Args: + name (str): The name of the attribute. + schema (AttributeSchema): The schema defining the attribute. + data (Union[Any, dict]): The data for the attribute, either in raw form or as a dictionary. + """ + self.name = name + self._schema = schema + + if not isinstance(data, dict) or "value" not in data.keys(): + data = {"value": data} + + self._properties_flag = PROPERTIES_FLAG + self._properties_object = PROPERTIES_OBJECT + self._properties = self._properties_flag + self._properties_object + + self._read_only = ["updated_at", "is_inherited"] + + self.id: str | None = data.get("id", None) + + self._value: Any | None = data.get("value", None) + self.value_has_been_mutated = False + self.is_default: bool | None = data.get("is_default", None) + self.is_from_profile: bool | None = data.get("is_from_profile", None) + + if self._value: + value_mapper: dict[str, Callable] = { + "IPHost": ipaddress.ip_interface, + "IPNetwork": ipaddress.ip_network, + } + mapper = value_mapper.get(schema.kind, lambda value: value) + self._value = mapper(data.get("value")) + + self.is_inherited: bool | None = data.get("is_inherited", None) + self.updated_at: str | None = data.get("updated_at", None) + + self.is_visible: bool | None = data.get("is_visible", None) + self.is_protected: bool | None = data.get("is_protected", None) + + self.source: NodeProperty | None = None + self.owner: NodeProperty | None = None + + for prop_name in self._properties_object: + if data.get(prop_name): + setattr(self, prop_name, NodeProperty(data=data.get(prop_name))) # type: ignore[arg-type] + + @property + def value(self) -> Any: + return self._value + + @value.setter + def value(self, value: Any) -> None: + self._value = value + self.value_has_been_mutated = True + + def _generate_input_data(self) -> dict | None: + data: dict[str, Any] = {} + variables: dict[str, Any] = {} + + if self.value is None: + return data + + if isinstance(self.value, str): + if SAFE_VALUE.match(self.value): + data["value"] = self.value + else: + var_name = f"value_{UUIDT.new().hex}" + variables[var_name] = self.value + data["value"] = f"${var_name}" + elif isinstance(self.value, get_args(IP_TYPES)): + data["value"] = self.value.with_prefixlen + elif isinstance(self.value, CoreNodeBase) and self.value.is_resource_pool(): + data["from_pool"] = {"id": self.value.id} + else: + data["value"] = self.value + + for prop_name in self._properties_flag: + if getattr(self, prop_name) is not None: + data[prop_name] = getattr(self, prop_name) + + for prop_name in self._properties_object: + if getattr(self, prop_name) is not None: + data[prop_name] = getattr(self, prop_name)._generate_input_data() + + return {"data": data, "variables": variables} + + def _generate_query_data(self, property: bool = False) -> dict | None: + data: dict[str, Any] = {"value": None} + + if property: + data.update({"is_default": None, "is_from_profile": None}) + + for prop_name in self._properties_flag: + data[prop_name] = None + for prop_name in self._properties_object: + data[prop_name] = {"id": None, "display_label": None, "__typename": None} + + return data + + def _generate_mutation_query(self) -> dict[str, Any]: + if isinstance(self.value, CoreNodeBase) and self.value.is_resource_pool(): + # If it points to a pool, ask for the value of the pool allocated resource + return {self.name: {"value": None}} + return {} diff --git a/infrahub_sdk/node/constants.py b/infrahub_sdk/node/constants.py new file mode 100644 index 00000000..7f5217d8 --- /dev/null +++ b/infrahub_sdk/node/constants.py @@ -0,0 +1,21 @@ +import ipaddress +import re +from typing import Union + +PROPERTIES_FLAG = ["is_visible", "is_protected"] +PROPERTIES_OBJECT = ["source", "owner"] +SAFE_VALUE = re.compile(r"(^[\. /:a-zA-Z0-9_-]+$)|(^$)") + +IP_TYPES = Union[ipaddress.IPv4Interface, ipaddress.IPv6Interface, ipaddress.IPv4Network, ipaddress.IPv6Network] + +ARTIFACT_FETCH_FEATURE_NOT_SUPPORTED_MESSAGE = ( + "calling artifact_fetch is only supported for nodes that are Artifact Definition target" +) +ARTIFACT_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE = ( + "calling artifact_generate is only supported for nodes that are Artifact Definition targets" +) +ARTIFACT_DEFINITION_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE = ( + "calling generate is only supported for CoreArtifactDefinition nodes" +) + +HFID_STR_SEPARATOR = "__" diff --git a/infrahub_sdk/node.py b/infrahub_sdk/node/node.py similarity index 69% rename from infrahub_sdk/node.py rename to infrahub_sdk/node/node.py index 6d8160f7..5cb6a99a 100644 --- a/infrahub_sdk/node.py +++ b/infrahub_sdk/node/node.py @@ -1,709 +1,75 @@ from __future__ import annotations -import ipaddress -import re -from collections.abc import Iterable from copy import copy -from typing import TYPE_CHECKING, Any, Callable, Union, get_args +from typing import TYPE_CHECKING, Any -from .constants import InfrahubClientMode -from .exceptions import ( - Error, +from ..constants import InfrahubClientMode +from ..exceptions import ( FeatureNotSupportedError, NodeNotFoundError, - UninitializedError, ) -from .graphql import Mutation, Query -from .schema import GenericSchemaAPI, RelationshipCardinality, RelationshipKind -from .utils import compare_lists, generate_short_id, get_flat_value -from .uuidt import UUIDT +from ..graphql import Mutation, Query +from ..schema import GenericSchemaAPI, RelationshipCardinality, RelationshipKind +from ..utils import compare_lists, generate_short_id, get_flat_value +from .attribute import Attribute +from .constants import ( + ARTIFACT_DEFINITION_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE, + ARTIFACT_FETCH_FEATURE_NOT_SUPPORTED_MESSAGE, + ARTIFACT_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE, + PROPERTIES_OBJECT, +) +from .related_node import RelatedNode, RelatedNodeBase, RelatedNodeSync +from .relationship import RelationshipManager, RelationshipManagerBase, RelationshipManagerSync if TYPE_CHECKING: from typing_extensions import Self - from .client import InfrahubClient, InfrahubClientSync - from .context import RequestContext - from .schema import AttributeSchemaAPI, MainSchemaTypesAPI, RelationshipSchemaAPI - from .types import Order - - -PROPERTIES_FLAG = ["is_visible", "is_protected"] -PROPERTIES_OBJECT = ["source", "owner"] -SAFE_VALUE = re.compile(r"(^[\. /:a-zA-Z0-9_-]+$)|(^$)") - -IP_TYPES = Union[ipaddress.IPv4Interface, ipaddress.IPv6Interface, ipaddress.IPv4Network, ipaddress.IPv6Network] - -ARTIFACT_FETCH_FEATURE_NOT_SUPPORTED_MESSAGE = ( - "calling artifact_fetch is only supported for nodes that are Artifact Definition target" -) -ARTIFACT_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE = ( - "calling artifact_generate is only supported for nodes that are Artifact Definition targets" -) -ARTIFACT_DEFINITION_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE = ( - "calling generate is only supported for CoreArtifactDefinition nodes" -) - -HFID_STR_SEPARATOR = "__" - - -def parse_human_friendly_id(hfid: str | list[str]) -> tuple[str | None, list[str]]: - """Parse a human friendly ID into a kind and an identifier.""" - if isinstance(hfid, str): - hfid_parts = hfid.split(HFID_STR_SEPARATOR) - if len(hfid_parts) == 1: - return None, hfid_parts - return hfid_parts[0], hfid_parts[1:] - if isinstance(hfid, list): - return None, hfid - raise ValueError(f"Invalid human friendly ID: {hfid}") - - -class Attribute: - """Represents an attribute of a Node, including its schema, value, and properties.""" - - def __init__(self, name: str, schema: AttributeSchemaAPI, data: Any | dict): - """ - Args: - name (str): The name of the attribute. - schema (AttributeSchema): The schema defining the attribute. - data (Union[Any, dict]): The data for the attribute, either in raw form or as a dictionary. - """ - self.name = name - self._schema = schema - - if not isinstance(data, dict) or "value" not in data.keys(): - data = {"value": data} + from ..client import InfrahubClient, InfrahubClientSync + from ..context import RequestContext + from ..schema import MainSchemaTypesAPI + from ..types import Order - self._properties_flag = PROPERTIES_FLAG - self._properties_object = PROPERTIES_OBJECT - self._properties = self._properties_flag + self._properties_object - self._read_only = ["updated_at", "is_inherited"] - - self.id: str | None = data.get("id", None) - - self._value: Any | None = data.get("value", None) - self.value_has_been_mutated = False - self.is_default: bool | None = data.get("is_default", None) - self.is_from_profile: bool | None = data.get("is_from_profile", None) - - if self._value: - value_mapper: dict[str, Callable] = { - "IPHost": ipaddress.ip_interface, - "IPNetwork": ipaddress.ip_network, - } - mapper = value_mapper.get(schema.kind, lambda value: value) - self._value = mapper(data.get("value")) - - self.is_inherited: bool | None = data.get("is_inherited", None) - self.updated_at: str | None = data.get("updated_at", None) - - self.is_visible: bool | None = data.get("is_visible", None) - self.is_protected: bool | None = data.get("is_protected", None) - - self.source: NodeProperty | None = None - self.owner: NodeProperty | None = None - - for prop_name in self._properties_object: - if data.get(prop_name): - setattr(self, prop_name, NodeProperty(data=data.get(prop_name))) # type: ignore[arg-type] +def generate_relationship_property(node: InfrahubNode | InfrahubNodeSync, name: str) -> property: + """Generates a property that stores values under a private non-public name. - @property - def value(self) -> Any: - return self._value + Args: + node (Union[InfrahubNode, InfrahubNodeSync]): The node instance. + name (str): The name of the relationship property. - @value.setter - def value(self, value: Any) -> None: - self._value = value - self.value_has_been_mutated = True + Returns: + A property object for managing the relationship. - def _generate_input_data(self) -> dict | None: - data: dict[str, Any] = {} - variables: dict[str, Any] = {} + """ + internal_name = "_" + name.lower() + external_name = name - if self.value is None: - return data + def prop_getter(self: InfrahubNodeBase) -> Any: + return getattr(self, internal_name) - if isinstance(self.value, str): - if SAFE_VALUE.match(self.value): - data["value"] = self.value - else: - var_name = f"value_{UUIDT.new().hex}" - variables[var_name] = self.value - data["value"] = f"${var_name}" - elif isinstance(self.value, get_args(IP_TYPES)): - data["value"] = self.value.with_prefixlen - elif isinstance(self.value, InfrahubNodeBase) and self.value.is_resource_pool(): - data["from_pool"] = {"id": self.value.id} + def prop_setter(self: InfrahubNodeBase, value: Any) -> None: + if isinstance(value, RelatedNodeBase) or value is None: + setattr(self, internal_name, value) else: - data["value"] = self.value - - for prop_name in self._properties_flag: - if getattr(self, prop_name) is not None: - data[prop_name] = getattr(self, prop_name) - - for prop_name in self._properties_object: - if getattr(self, prop_name) is not None: - data[prop_name] = getattr(self, prop_name)._generate_input_data() - - return {"data": data, "variables": variables} - - def _generate_query_data(self, property: bool = False) -> dict | None: - data: dict[str, Any] = {"value": None} - - if property: - data.update({"is_default": None, "is_from_profile": None}) - - for prop_name in self._properties_flag: - data[prop_name] = None - for prop_name in self._properties_object: - data[prop_name] = {"id": None, "display_label": None, "__typename": None} - - return data - - def _generate_mutation_query(self) -> dict[str, Any]: - if isinstance(self.value, InfrahubNodeBase) and self.value.is_resource_pool(): - # If it points to a pool, ask for the value of the pool allocated resource - return {self.name: {"value": None}} - return {} - - -class RelatedNodeBase: - """Base class for representing a related node in a relationship.""" - - def __init__(self, branch: str, schema: RelationshipSchemaAPI, data: Any | dict, name: str | None = None): - """ - Args: - branch (str): The branch where the related node resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Data representing the related node. - name (Optional[str]): The name of the related node. - """ - self.schema = schema - self.name = name - - self._branch = branch - - self._properties_flag = PROPERTIES_FLAG - self._properties_object = PROPERTIES_OBJECT - self._properties = self._properties_flag + self._properties_object - - self._peer = None - self._id: str | None = None - self._hfid: list[str] | None = None - self._display_label: str | None = None - self._typename: str | None = None - - if isinstance(data, (InfrahubNode, InfrahubNodeSync)): - self._peer = data - for prop in self._properties: - setattr(self, prop, None) - elif isinstance(data, list): - data = {"hfid": data} - elif not isinstance(data, dict): - data = {"id": data} - - if isinstance(data, dict): - # To support both with and without pagination, we split data into node_data and properties_data - # We should probably clean that once we'll remove the code without pagination. - node_data = data.get("node", data) - properties_data = data.get("properties", data) - - if node_data: - self._id = node_data.get("id", None) - self._hfid = node_data.get("hfid", None) - self._kind = node_data.get("kind", None) - self._display_label = node_data.get("display_label", None) - self._typename = node_data.get("__typename", None) - - self.updated_at: str | None = data.get("updated_at", data.get("_relation__updated_at", None)) - - # FIXME, we won't need that once we are only supporting paginated results - if self._typename and self._typename.startswith("Related"): - self._typename = self._typename[7:] - - for prop in self._properties: - prop_data = properties_data.get(prop, properties_data.get(f"_relation__{prop}", None)) - if prop_data and isinstance(prop_data, dict) and "id" in prop_data: - setattr(self, prop, prop_data["id"]) - elif prop_data and isinstance(prop_data, (str, bool)): - setattr(self, prop, prop_data) - else: - setattr(self, prop, None) - - @property - def id(self) -> str | None: - if self._peer: - return self._peer.id - return self._id - - @property - def hfid(self) -> list[Any] | None: - if self._peer: - return self._peer.hfid - return self._hfid - - @property - def hfid_str(self) -> str | None: - if self._peer and self.hfid: - return self._peer.get_human_friendly_id_as_string(include_kind=True) - return None - - @property - def is_resource_pool(self) -> bool: - if self._peer: - return self._peer.is_resource_pool() - return False - - @property - def initialized(self) -> bool: - return bool(self.id) or bool(self.hfid) - - @property - def display_label(self) -> str | None: - if self._peer: - return self._peer.display_label - return self._display_label - - @property - def typename(self) -> str | None: - if self._peer: - return self._peer.typename - return self._typename - - def _generate_input_data(self, allocate_from_pool: bool = False) -> dict[str, Any]: - data: dict[str, Any] = {} - - if self.is_resource_pool and allocate_from_pool: - return {"from_pool": {"id": self.id}} - - if self.id is not None: - data["id"] = self.id - elif self.hfid is not None: - data["hfid"] = self.hfid - if self._kind is not None: - data["kind"] = self._kind - - for prop_name in self._properties: - if getattr(self, prop_name) is not None: - data[f"_relation__{prop_name}"] = getattr(self, prop_name) - - return data - - def _generate_mutation_query(self) -> dict[str, Any]: - if self.name and self.is_resource_pool: - # If a related node points to a pool, ask for the ID of the pool allocated resource - return {self.name: {"node": {"id": None, "display_label": None, "__typename": None}}} - return {} - - @classmethod - def _generate_query_data(cls, peer_data: dict[str, Any] | None = None, property: bool = False) -> dict: - """Generates the basic structure of a GraphQL query for a single relationship. - - Args: - peer_data (dict[str, Union[Any, Dict]], optional): Additional data to be included in the query for the node. - This is used to add extra fields when prefetching related node data. - - Returns: - Dict: A dictionary representing the basic structure of a GraphQL query, including the node's ID, display label, - and typename. The method also includes additional properties and any peer_data provided. - """ - data: dict[str, Any] = {"node": {"id": None, "hfid": None, "display_label": None, "__typename": None}} - properties: dict[str, Any] = {} - - if property: - for prop_name in PROPERTIES_FLAG: - properties[prop_name] = None - for prop_name in PROPERTIES_OBJECT: - properties[prop_name] = {"id": None, "display_label": None, "__typename": None} - - if properties: - data["properties"] = properties - if peer_data: - data["node"].update(peer_data) - - return data - - -class RelatedNode(RelatedNodeBase): - """Represents a RelatedNodeBase in an asynchronous context.""" - - def __init__( - self, - client: InfrahubClient, - branch: str, - schema: RelationshipSchemaAPI, - data: Any | dict, - name: str | None = None, - ): - """ - Args: - client (InfrahubClient): The client used to interact with the backend asynchronously. - branch (str): The branch where the related node resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Data representing the related node. - name (Optional[str]): The name of the related node. - """ - self._client = client - super().__init__(branch=branch, schema=schema, data=data, name=name) - - async def fetch(self, timeout: int | None = None) -> None: - if not self.id or not self.typename: - raise Error("Unable to fetch the peer, id and/or typename are not defined") - - self._peer = await self._client.get( - kind=self.typename, id=self.id, populate_store=True, branch=self._branch, timeout=timeout - ) - - @property - def peer(self) -> InfrahubNode: - return self.get() - - def get(self) -> InfrahubNode: - if self._peer: - return self._peer # type: ignore[return-value] - - if self.id and self.typename: - return self._client.store.get(key=self.id, kind=self.typename, branch=self._branch) # type: ignore[return-value] - - if self.hfid_str: - return self._client.store.get(key=self.hfid_str, branch=self._branch) # type: ignore[return-value] - - raise ValueError("Node must have at least one identifier (ID or HFID) to query it.") - - -class RelatedNodeSync(RelatedNodeBase): - """Represents a related node in a synchronous context.""" - - def __init__( - self, - client: InfrahubClientSync, - branch: str, - schema: RelationshipSchemaAPI, - data: Any | dict, - name: str | None = None, - ): - """ - Args: - client (InfrahubClientSync): The client used to interact with the backend synchronously. - branch (str): The branch where the related node resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Data representing the related node. - name (Optional[str]): The name of the related node. - """ - self._client = client - super().__init__(branch=branch, schema=schema, data=data, name=name) - - def fetch(self, timeout: int | None = None) -> None: - if not self.id or not self.typename: - raise Error("Unable to fetch the peer, id and/or typename are not defined") - - self._peer = self._client.get( - kind=self.typename, id=self.id, populate_store=True, branch=self._branch, timeout=timeout - ) - - @property - def peer(self) -> InfrahubNodeSync: - return self.get() - - def get(self) -> InfrahubNodeSync: - if self._peer: - return self._peer # type: ignore[return-value] - - if self.id and self.typename: - return self._client.store.get(key=self.id, kind=self.typename, branch=self._branch) # type: ignore[return-value] - - if self.hfid_str: - return self._client.store.get(key=self.hfid_str, branch=self._branch) # type: ignore[return-value] - - raise ValueError("Node must have at least one identifier (ID or HFID) to query it.") - - -class RelationshipManagerBase: - """Base class for RelationshipManager and RelationshipManagerSync""" - - def __init__(self, name: str, branch: str, schema: RelationshipSchemaAPI): - """ - Args: - name (str): The name of the relationship. - branch (str): The branch where the relationship resides. - schema (RelationshipSchema): The schema of the relationship. - """ - self.initialized: bool = False - self._has_update: bool = False - self.name = name - self.schema = schema - self.branch = branch - - self._properties_flag = PROPERTIES_FLAG - self._properties_object = PROPERTIES_OBJECT - self._properties = self._properties_flag + self._properties_object - - self.peers: list[RelatedNode | RelatedNodeSync] = [] - - @property - def peer_ids(self) -> list[str]: - return [peer.id for peer in self.peers if peer.id] - - @property - def peer_hfids(self) -> list[list[Any]]: - return [peer.hfid for peer in self.peers if peer.hfid] - - @property - def peer_hfids_str(self) -> list[str]: - return [peer.hfid_str for peer in self.peers if peer.hfid_str] - - @property - def has_update(self) -> bool: - return self._has_update - - def _generate_input_data(self, allocate_from_pool: bool = False) -> list[dict]: - return [peer._generate_input_data(allocate_from_pool=allocate_from_pool) for peer in self.peers] - - def _generate_mutation_query(self) -> dict[str, Any]: - # Does nothing for now - return {} - - @classmethod - def _generate_query_data(cls, peer_data: dict[str, Any] | None = None, property: bool = False) -> dict: - """Generates the basic structure of a GraphQL query for relationships with multiple nodes. - - Args: - peer_data (dict[str, Union[Any, Dict]], optional): Additional data to be included in the query for each node. - This is used to add extra fields when prefetching related node data in many-to-many relationships. - - Returns: - Dict: A dictionary representing the basic structure of a GraphQL query for multiple related nodes. - It includes count, edges, and node information (ID, display label, and typename), along with additional properties - and any peer_data provided. - """ - data: dict[str, Any] = { - "count": None, - "edges": {"node": {"id": None, "hfid": None, "display_label": None, "__typename": None}}, - } - - properties: dict[str, Any] = {} - if property: - for prop_name in PROPERTIES_FLAG: - properties[prop_name] = None - for prop_name in PROPERTIES_OBJECT: - properties[prop_name] = {"id": None, "display_label": None, "__typename": None} - - if properties: - data["edges"]["properties"] = properties - if peer_data: - data["edges"]["node"].update(peer_data) - - return data - - -class RelationshipManager(RelationshipManagerBase): - """Manages relationships of a node in an asynchronous context.""" - - def __init__( - self, - name: str, - client: InfrahubClient, - node: InfrahubNode, - branch: str, - schema: RelationshipSchemaAPI, - data: Any | dict, - ): - """ - Args: - name (str): The name of the relationship. - client (InfrahubClient): The client used to interact with the backend. - node (InfrahubNode): The node to which the relationship belongs. - branch (str): The branch where the relationship resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Initial data for the relationships. - """ - self.client = client - self.node = node - - super().__init__(name=name, schema=schema, branch=branch) - - self.initialized = data is not None - self._has_update = False - - if data is None: - return - - if isinstance(data, list): - for item in data: - self.peers.append( - RelatedNode(name=name, client=self.client, branch=self.branch, schema=schema, data=item) + schema = [rel for rel in self._schema.relationships if rel.name == external_name][0] + if isinstance(node, InfrahubNode): + setattr( + self, + internal_name, + RelatedNode( + name=external_name, branch=node._branch, client=node._client, schema=schema, data=value + ), ) - elif isinstance(data, dict) and "edges" in data: - for item in data["edges"]: - self.peers.append( - RelatedNode(name=name, client=self.client, branch=self.branch, schema=schema, data=item) + else: + setattr( + self, + internal_name, + RelatedNodeSync( + name=external_name, branch=node._branch, client=node._client, schema=schema, data=value + ), ) - else: - raise ValueError(f"Unexpected format for {name} found a {type(data)}, {data}") - - def __getitem__(self, item: int) -> RelatedNode: - return self.peers[item] # type: ignore[return-value] - - async def fetch(self) -> None: - if not self.initialized: - exclude = self.node._schema.relationship_names + self.node._schema.attribute_names - exclude.remove(self.schema.name) - node = await self.client.get( - kind=self.node._schema.kind, - id=self.node.id, - branch=self.branch, - include=[self.schema.name], - exclude=exclude, - ) - rm = getattr(node, self.schema.name) - self.peers = rm.peers - self.initialized = True - - for peer in self.peers: - await peer.fetch() # type: ignore[misc] - - def add(self, data: str | RelatedNode | dict) -> None: - """Add a new peer to this relationship.""" - if not self.initialized: - raise UninitializedError("Must call fetch() on RelationshipManager before editing members") - new_node = RelatedNode(schema=self.schema, client=self.client, branch=self.branch, data=data) - - if (new_node.id and new_node.id not in self.peer_ids) or ( - new_node.hfid and new_node.hfid not in self.peer_hfids - ): - self.peers.append(new_node) - self._has_update = True - - def extend(self, data: Iterable[str | RelatedNode | dict]) -> None: - """Add new peers to this relationship.""" - for d in data: - self.add(d) - - def remove(self, data: str | RelatedNode | dict) -> None: - if not self.initialized: - raise UninitializedError("Must call fetch() on RelationshipManager before editing members") - node_to_remove = RelatedNode(schema=self.schema, client=self.client, branch=self.branch, data=data) - - if node_to_remove.id and node_to_remove.id in self.peer_ids: - idx = self.peer_ids.index(node_to_remove.id) - if self.peers[idx].id != node_to_remove.id: - raise IndexError(f"Unexpected situation, the node with the index {idx} should be {node_to_remove.id}") - - self.peers.pop(idx) - self._has_update = True - elif node_to_remove.hfid and node_to_remove.hfid in self.peer_hfids: - idx = self.peer_hfids.index(node_to_remove.hfid) - if self.peers[idx].hfid != node_to_remove.hfid: - raise IndexError(f"Unexpected situation, the node with the index {idx} should be {node_to_remove.hfid}") - - self.peers.pop(idx) - self._has_update = True - - -class RelationshipManagerSync(RelationshipManagerBase): - """Manages relationships of a node in a synchronous context.""" - - def __init__( - self, - name: str, - client: InfrahubClientSync, - node: InfrahubNodeSync, - branch: str, - schema: RelationshipSchemaAPI, - data: Any | dict, - ): - """ - Args: - name (str): The name of the relationship. - client (InfrahubClientSync): The client used to interact with the backend synchronously. - node (InfrahubNodeSync): The node to which the relationship belongs. - branch (str): The branch where the relationship resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Initial data for the relationships. - """ - self.client = client - self.node = node - - super().__init__(name=name, schema=schema, branch=branch) - - self.initialized = data is not None - self._has_update = False - - if data is None: - return - - if isinstance(data, list): - for item in data: - self.peers.append( - RelatedNodeSync(name=name, client=self.client, branch=self.branch, schema=schema, data=item) - ) - elif isinstance(data, dict) and "edges" in data: - for item in data["edges"]: - self.peers.append( - RelatedNodeSync(name=name, client=self.client, branch=self.branch, schema=schema, data=item) - ) - else: - raise ValueError(f"Unexpected format for {name} found a {type(data)}, {data}") - - def __getitem__(self, item: int) -> RelatedNodeSync: - return self.peers[item] # type: ignore[return-value] - - def fetch(self) -> None: - if not self.initialized: - exclude = self.node._schema.relationship_names + self.node._schema.attribute_names - exclude.remove(self.schema.name) - node = self.client.get( - kind=self.node._schema.kind, - id=self.node.id, - branch=self.branch, - include=[self.schema.name], - exclude=exclude, - ) - rm = getattr(node, self.schema.name) - self.peers = rm.peers - self.initialized = True - - for peer in self.peers: - peer.fetch() - - def add(self, data: str | RelatedNodeSync | dict) -> None: - """Add a new peer to this relationship.""" - if not self.initialized: - raise UninitializedError("Must call fetch() on RelationshipManager before editing members") - new_node = RelatedNodeSync(schema=self.schema, client=self.client, branch=self.branch, data=data) - - if (new_node.id and new_node.id not in self.peer_ids) or ( - new_node.hfid and new_node.hfid not in self.peer_hfids - ): - self.peers.append(new_node) - self._has_update = True - - def extend(self, data: Iterable[str | RelatedNodeSync | dict]) -> None: - """Add new peers to this relationship.""" - for d in data: - self.add(d) - - def remove(self, data: str | RelatedNodeSync | dict) -> None: - if not self.initialized: - raise UninitializedError("Must call fetch() on RelationshipManager before editing members") - node_to_remove = RelatedNodeSync(schema=self.schema, client=self.client, branch=self.branch, data=data) - - if node_to_remove.id and node_to_remove.id in self.peer_ids: - idx = self.peer_ids.index(node_to_remove.id) - if self.peers[idx].id != node_to_remove.id: - raise IndexError(f"Unexpected situation, the node with the index {idx} should be {node_to_remove.id}") - self.peers.pop(idx) - self._has_update = True - - elif node_to_remove.hfid and node_to_remove.hfid in self.peer_hfids: - idx = self.peer_hfids.index(node_to_remove.hfid) - if self.peers[idx].hfid != node_to_remove.hfid: - raise IndexError(f"Unexpected situation, the node with the index {idx} should be {node_to_remove.hfid}") - - self.peers.pop(idx) - self._has_update = True + return property(prop_getter, prop_setter) class InfrahubNodeBase: @@ -2179,68 +1545,3 @@ def get_pool_resources_utilization(self) -> list[dict[str, Any]]: if response[graphql_query_name].get("count", 0): return [edge["node"] for edge in response[graphql_query_name]["edges"]] return [] - - -class NodeProperty: - """Represents a property of a node, typically used for metadata like display labels.""" - - def __init__(self, data: dict | str): - """ - Args: - data (Union[dict, str]): Data representing the node property. - """ - self.id = None - self.display_label = None - self.typename = None - - if isinstance(data, str): - self.id = data - elif isinstance(data, dict): - self.id = data.get("id", None) - self.display_label = data.get("display_label", None) - self.typename = data.get("__typename", None) - - def _generate_input_data(self) -> str | None: - return self.id - - -def generate_relationship_property(node: InfrahubNode | InfrahubNodeSync, name: str) -> property: - """Generates a property that stores values under a private non-public name. - - Args: - node (Union[InfrahubNode, InfrahubNodeSync]): The node instance. - name (str): The name of the relationship property. - - Returns: - A property object for managing the relationship. - - """ - internal_name = "_" + name.lower() - external_name = name - - def prop_getter(self: InfrahubNodeBase) -> Any: - return getattr(self, internal_name) - - def prop_setter(self: InfrahubNodeBase, value: Any) -> None: - if isinstance(value, RelatedNodeBase) or value is None: - setattr(self, internal_name, value) - else: - schema = [rel for rel in self._schema.relationships if rel.name == external_name][0] - if isinstance(node, InfrahubNode): - setattr( - self, - internal_name, - RelatedNode( - name=external_name, branch=node._branch, client=node._client, schema=schema, data=value - ), - ) - else: - setattr( - self, - internal_name, - RelatedNodeSync( - name=external_name, branch=node._branch, client=node._client, schema=schema, data=value - ), - ) - - return property(prop_getter, prop_setter) diff --git a/infrahub_sdk/node/property.py b/infrahub_sdk/node/property.py new file mode 100644 index 00000000..8de0ab43 --- /dev/null +++ b/infrahub_sdk/node/property.py @@ -0,0 +1,24 @@ +from __future__ import annotations + + +class NodeProperty: + """Represents a property of a node, typically used for metadata like display labels.""" + + def __init__(self, data: dict | str): + """ + Args: + data (Union[dict, str]): Data representing the node property. + """ + self.id = None + self.display_label = None + self.typename = None + + if isinstance(data, str): + self.id = data + elif isinstance(data, dict): + self.id = data.get("id", None) + self.display_label = data.get("display_label", None) + self.typename = data.get("__typename", None) + + def _generate_input_data(self) -> str | None: + return self.id diff --git a/infrahub_sdk/node/related_node.py b/infrahub_sdk/node/related_node.py new file mode 100644 index 00000000..60d46ca9 --- /dev/null +++ b/infrahub_sdk/node/related_node.py @@ -0,0 +1,266 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from ..exceptions import ( + Error, +) +from ..protocols_base import CoreNodeBase +from .constants import PROPERTIES_FLAG, PROPERTIES_OBJECT + +if TYPE_CHECKING: + from ..client import InfrahubClient, InfrahubClientSync + from ..schema import RelationshipSchemaAPI + from .node import InfrahubNode, InfrahubNodeSync + + +class RelatedNodeBase: + """Base class for representing a related node in a relationship.""" + + def __init__(self, branch: str, schema: RelationshipSchemaAPI, data: Any | dict, name: str | None = None): + """ + Args: + branch (str): The branch where the related node resides. + schema (RelationshipSchema): The schema of the relationship. + data (Union[Any, dict]): Data representing the related node. + name (Optional[str]): The name of the related node. + """ + self.schema = schema + self.name = name + + self._branch = branch + + self._properties_flag = PROPERTIES_FLAG + self._properties_object = PROPERTIES_OBJECT + self._properties = self._properties_flag + self._properties_object + + self._peer = None + self._id: str | None = None + self._hfid: list[str] | None = None + self._display_label: str | None = None + self._typename: str | None = None + + if isinstance(data, (CoreNodeBase)): + self._peer = data + for prop in self._properties: + setattr(self, prop, None) + + elif isinstance(data, list): + data = {"hfid": data} + elif not isinstance(data, dict): + data = {"id": data} + + if isinstance(data, dict): + # To support both with and without pagination, we split data into node_data and properties_data + # We should probably clean that once we'll remove the code without pagination. + node_data = data.get("node", data) + properties_data = data.get("properties", data) + + if node_data: + self._id = node_data.get("id", None) + self._hfid = node_data.get("hfid", None) + self._kind = node_data.get("kind", None) + self._display_label = node_data.get("display_label", None) + self._typename = node_data.get("__typename", None) + + self.updated_at: str | None = data.get("updated_at", data.get("_relation__updated_at", None)) + + # FIXME, we won't need that once we are only supporting paginated results + if self._typename and self._typename.startswith("Related"): + self._typename = self._typename[7:] + + for prop in self._properties: + prop_data = properties_data.get(prop, properties_data.get(f"_relation__{prop}", None)) + if prop_data and isinstance(prop_data, dict) and "id" in prop_data: + setattr(self, prop, prop_data["id"]) + elif prop_data and isinstance(prop_data, (str, bool)): + setattr(self, prop, prop_data) + else: + setattr(self, prop, None) + + @property + def id(self) -> str | None: + if self._peer: + return self._peer.id + return self._id + + @property + def hfid(self) -> list[Any] | None: + if self._peer: + return self._peer.hfid + return self._hfid + + @property + def hfid_str(self) -> str | None: + if self._peer and self.hfid: + return self._peer.get_human_friendly_id_as_string(include_kind=True) + return None + + @property + def is_resource_pool(self) -> bool: + if self._peer: + return self._peer.is_resource_pool() + return False + + @property + def initialized(self) -> bool: + return bool(self.id) or bool(self.hfid) + + @property + def display_label(self) -> str | None: + if self._peer: + return self._peer.display_label + return self._display_label + + @property + def typename(self) -> str | None: + if self._peer: + return self._peer.typename + return self._typename + + def _generate_input_data(self, allocate_from_pool: bool = False) -> dict[str, Any]: + data: dict[str, Any] = {} + + if self.is_resource_pool and allocate_from_pool: + return {"from_pool": {"id": self.id}} + + if self.id is not None: + data["id"] = self.id + elif self.hfid is not None: + data["hfid"] = self.hfid + if self._kind is not None: + data["kind"] = self._kind + + for prop_name in self._properties: + if getattr(self, prop_name) is not None: + data[f"_relation__{prop_name}"] = getattr(self, prop_name) + + return data + + def _generate_mutation_query(self) -> dict[str, Any]: + if self.name and self.is_resource_pool: + # If a related node points to a pool, ask for the ID of the pool allocated resource + return {self.name: {"node": {"id": None, "display_label": None, "__typename": None}}} + return {} + + @classmethod + def _generate_query_data(cls, peer_data: dict[str, Any] | None = None, property: bool = False) -> dict: + """Generates the basic structure of a GraphQL query for a single relationship. + + Args: + peer_data (dict[str, Union[Any, Dict]], optional): Additional data to be included in the query for the node. + This is used to add extra fields when prefetching related node data. + + Returns: + Dict: A dictionary representing the basic structure of a GraphQL query, including the node's ID, display label, + and typename. The method also includes additional properties and any peer_data provided. + """ + data: dict[str, Any] = {"node": {"id": None, "hfid": None, "display_label": None, "__typename": None}} + properties: dict[str, Any] = {} + + if property: + for prop_name in PROPERTIES_FLAG: + properties[prop_name] = None + for prop_name in PROPERTIES_OBJECT: + properties[prop_name] = {"id": None, "display_label": None, "__typename": None} + + if properties: + data["properties"] = properties + if peer_data: + data["node"].update(peer_data) + + return data + + +class RelatedNode(RelatedNodeBase): + """Represents a RelatedNodeBase in an asynchronous context.""" + + def __init__( + self, + client: InfrahubClient, + branch: str, + schema: RelationshipSchemaAPI, + data: Any | dict, + name: str | None = None, + ): + """ + Args: + client (InfrahubClient): The client used to interact with the backend asynchronously. + branch (str): The branch where the related node resides. + schema (RelationshipSchema): The schema of the relationship. + data (Union[Any, dict]): Data representing the related node. + name (Optional[str]): The name of the related node. + """ + self._client = client + super().__init__(branch=branch, schema=schema, data=data, name=name) + + async def fetch(self, timeout: int | None = None) -> None: + if not self.id or not self.typename: + raise Error("Unable to fetch the peer, id and/or typename are not defined") + + self._peer = await self._client.get( + kind=self.typename, id=self.id, populate_store=True, branch=self._branch, timeout=timeout + ) + + @property + def peer(self) -> InfrahubNode: + return self.get() + + def get(self) -> InfrahubNode: + if self._peer: + return self._peer # type: ignore[return-value] + + if self.id and self.typename: + return self._client.store.get(key=self.id, kind=self.typename, branch=self._branch) # type: ignore[return-value] + + if self.hfid_str: + return self._client.store.get(key=self.hfid_str, branch=self._branch) # type: ignore[return-value] + + raise ValueError("Node must have at least one identifier (ID or HFID) to query it.") + + +class RelatedNodeSync(RelatedNodeBase): + """Represents a related node in a synchronous context.""" + + def __init__( + self, + client: InfrahubClientSync, + branch: str, + schema: RelationshipSchemaAPI, + data: Any | dict, + name: str | None = None, + ): + """ + Args: + client (InfrahubClientSync): The client used to interact with the backend synchronously. + branch (str): The branch where the related node resides. + schema (RelationshipSchema): The schema of the relationship. + data (Union[Any, dict]): Data representing the related node. + name (Optional[str]): The name of the related node. + """ + self._client = client + super().__init__(branch=branch, schema=schema, data=data, name=name) + + def fetch(self, timeout: int | None = None) -> None: + if not self.id or not self.typename: + raise Error("Unable to fetch the peer, id and/or typename are not defined") + + self._peer = self._client.get( + kind=self.typename, id=self.id, populate_store=True, branch=self._branch, timeout=timeout + ) + + @property + def peer(self) -> InfrahubNodeSync: + return self.get() + + def get(self) -> InfrahubNodeSync: + if self._peer: + return self._peer # type: ignore[return-value] + + if self.id and self.typename: + return self._client.store.get(key=self.id, kind=self.typename, branch=self._branch) # type: ignore[return-value] + + if self.hfid_str: + return self._client.store.get(key=self.hfid_str, branch=self._branch) # type: ignore[return-value] + + raise ValueError("Node must have at least one identifier (ID or HFID) to query it.") diff --git a/infrahub_sdk/node/relationship.py b/infrahub_sdk/node/relationship.py new file mode 100644 index 00000000..c527dc50 --- /dev/null +++ b/infrahub_sdk/node/relationship.py @@ -0,0 +1,302 @@ +from __future__ import annotations + +from collections.abc import Iterable +from typing import TYPE_CHECKING, Any + +from ..exceptions import ( + UninitializedError, +) +from .constants import PROPERTIES_FLAG, PROPERTIES_OBJECT +from .related_node import RelatedNode, RelatedNodeSync + +if TYPE_CHECKING: + from ..client import InfrahubClient, InfrahubClientSync + from ..schema import RelationshipSchemaAPI + from .node import InfrahubNode, InfrahubNodeSync + + +class RelationshipManagerBase: + """Base class for RelationshipManager and RelationshipManagerSync""" + + def __init__(self, name: str, branch: str, schema: RelationshipSchemaAPI): + """ + Args: + name (str): The name of the relationship. + branch (str): The branch where the relationship resides. + schema (RelationshipSchema): The schema of the relationship. + """ + self.initialized: bool = False + self._has_update: bool = False + self.name = name + self.schema = schema + self.branch = branch + + self._properties_flag = PROPERTIES_FLAG + self._properties_object = PROPERTIES_OBJECT + self._properties = self._properties_flag + self._properties_object + + self.peers: list[RelatedNode | RelatedNodeSync] = [] + + @property + def peer_ids(self) -> list[str]: + return [peer.id for peer in self.peers if peer.id] + + @property + def peer_hfids(self) -> list[list[Any]]: + return [peer.hfid for peer in self.peers if peer.hfid] + + @property + def peer_hfids_str(self) -> list[str]: + return [peer.hfid_str for peer in self.peers if peer.hfid_str] + + @property + def has_update(self) -> bool: + return self._has_update + + def _generate_input_data(self, allocate_from_pool: bool = False) -> list[dict]: + return [peer._generate_input_data(allocate_from_pool=allocate_from_pool) for peer in self.peers] + + def _generate_mutation_query(self) -> dict[str, Any]: + # Does nothing for now + return {} + + @classmethod + def _generate_query_data(cls, peer_data: dict[str, Any] | None = None, property: bool = False) -> dict: + """Generates the basic structure of a GraphQL query for relationships with multiple nodes. + + Args: + peer_data (dict[str, Union[Any, Dict]], optional): Additional data to be included in the query for each node. + This is used to add extra fields when prefetching related node data in many-to-many relationships. + + Returns: + Dict: A dictionary representing the basic structure of a GraphQL query for multiple related nodes. + It includes count, edges, and node information (ID, display label, and typename), along with additional properties + and any peer_data provided. + """ + data: dict[str, Any] = { + "count": None, + "edges": {"node": {"id": None, "hfid": None, "display_label": None, "__typename": None}}, + } + + properties: dict[str, Any] = {} + if property: + for prop_name in PROPERTIES_FLAG: + properties[prop_name] = None + for prop_name in PROPERTIES_OBJECT: + properties[prop_name] = {"id": None, "display_label": None, "__typename": None} + + if properties: + data["edges"]["properties"] = properties + if peer_data: + data["edges"]["node"].update(peer_data) + + return data + + +class RelationshipManager(RelationshipManagerBase): + """Manages relationships of a node in an asynchronous context.""" + + def __init__( + self, + name: str, + client: InfrahubClient, + node: InfrahubNode, + branch: str, + schema: RelationshipSchemaAPI, + data: Any | dict, + ): + """ + Args: + name (str): The name of the relationship. + client (InfrahubClient): The client used to interact with the backend. + node (InfrahubNode): The node to which the relationship belongs. + branch (str): The branch where the relationship resides. + schema (RelationshipSchema): The schema of the relationship. + data (Union[Any, dict]): Initial data for the relationships. + """ + self.client = client + self.node = node + + super().__init__(name=name, schema=schema, branch=branch) + + self.initialized = data is not None + self._has_update = False + + if data is None: + return + + if isinstance(data, list): + for item in data: + self.peers.append( + RelatedNode(name=name, client=self.client, branch=self.branch, schema=schema, data=item) + ) + elif isinstance(data, dict) and "edges" in data: + for item in data["edges"]: + self.peers.append( + RelatedNode(name=name, client=self.client, branch=self.branch, schema=schema, data=item) + ) + else: + raise ValueError(f"Unexpected format for {name} found a {type(data)}, {data}") + + def __getitem__(self, item: int) -> RelatedNode: + return self.peers[item] # type: ignore[return-value] + + async def fetch(self) -> None: + if not self.initialized: + exclude = self.node._schema.relationship_names + self.node._schema.attribute_names + exclude.remove(self.schema.name) + node = await self.client.get( + kind=self.node._schema.kind, + id=self.node.id, + branch=self.branch, + include=[self.schema.name], + exclude=exclude, + ) + rm = getattr(node, self.schema.name) + self.peers = rm.peers + self.initialized = True + + for peer in self.peers: + await peer.fetch() # type: ignore[misc] + + def add(self, data: str | RelatedNode | dict) -> None: + """Add a new peer to this relationship.""" + if not self.initialized: + raise UninitializedError("Must call fetch() on RelationshipManager before editing members") + new_node = RelatedNode(schema=self.schema, client=self.client, branch=self.branch, data=data) + + if (new_node.id and new_node.id not in self.peer_ids) or ( + new_node.hfid and new_node.hfid not in self.peer_hfids + ): + self.peers.append(new_node) + self._has_update = True + + def extend(self, data: Iterable[str | RelatedNode | dict]) -> None: + """Add new peers to this relationship.""" + for d in data: + self.add(d) + + def remove(self, data: str | RelatedNode | dict) -> None: + if not self.initialized: + raise UninitializedError("Must call fetch() on RelationshipManager before editing members") + node_to_remove = RelatedNode(schema=self.schema, client=self.client, branch=self.branch, data=data) + + if node_to_remove.id and node_to_remove.id in self.peer_ids: + idx = self.peer_ids.index(node_to_remove.id) + if self.peers[idx].id != node_to_remove.id: + raise IndexError(f"Unexpected situation, the node with the index {idx} should be {node_to_remove.id}") + + self.peers.pop(idx) + self._has_update = True + + elif node_to_remove.hfid and node_to_remove.hfid in self.peer_hfids: + idx = self.peer_hfids.index(node_to_remove.hfid) + if self.peers[idx].hfid != node_to_remove.hfid: + raise IndexError(f"Unexpected situation, the node with the index {idx} should be {node_to_remove.hfid}") + + self.peers.pop(idx) + self._has_update = True + + +class RelationshipManagerSync(RelationshipManagerBase): + """Manages relationships of a node in a synchronous context.""" + + def __init__( + self, + name: str, + client: InfrahubClientSync, + node: InfrahubNodeSync, + branch: str, + schema: RelationshipSchemaAPI, + data: Any | dict, + ): + """ + Args: + name (str): The name of the relationship. + client (InfrahubClientSync): The client used to interact with the backend synchronously. + node (InfrahubNodeSync): The node to which the relationship belongs. + branch (str): The branch where the relationship resides. + schema (RelationshipSchema): The schema of the relationship. + data (Union[Any, dict]): Initial data for the relationships. + """ + self.client = client + self.node = node + + super().__init__(name=name, schema=schema, branch=branch) + + self.initialized = data is not None + self._has_update = False + + if data is None: + return + + if isinstance(data, list): + for item in data: + self.peers.append( + RelatedNodeSync(name=name, client=self.client, branch=self.branch, schema=schema, data=item) + ) + elif isinstance(data, dict) and "edges" in data: + for item in data["edges"]: + self.peers.append( + RelatedNodeSync(name=name, client=self.client, branch=self.branch, schema=schema, data=item) + ) + else: + raise ValueError(f"Unexpected format for {name} found a {type(data)}, {data}") + + def __getitem__(self, item: int) -> RelatedNodeSync: + return self.peers[item] # type: ignore[return-value] + + def fetch(self) -> None: + if not self.initialized: + exclude = self.node._schema.relationship_names + self.node._schema.attribute_names + exclude.remove(self.schema.name) + node = self.client.get( + kind=self.node._schema.kind, + id=self.node.id, + branch=self.branch, + include=[self.schema.name], + exclude=exclude, + ) + rm = getattr(node, self.schema.name) + self.peers = rm.peers + self.initialized = True + + for peer in self.peers: + peer.fetch() + + def add(self, data: str | RelatedNodeSync | dict) -> None: + """Add a new peer to this relationship.""" + if not self.initialized: + raise UninitializedError("Must call fetch() on RelationshipManager before editing members") + new_node = RelatedNodeSync(schema=self.schema, client=self.client, branch=self.branch, data=data) + + if (new_node.id and new_node.id not in self.peer_ids) or ( + new_node.hfid and new_node.hfid not in self.peer_hfids + ): + self.peers.append(new_node) + self._has_update = True + + def extend(self, data: Iterable[str | RelatedNodeSync | dict]) -> None: + """Add new peers to this relationship.""" + for d in data: + self.add(d) + + def remove(self, data: str | RelatedNodeSync | dict) -> None: + if not self.initialized: + raise UninitializedError("Must call fetch() on RelationshipManager before editing members") + node_to_remove = RelatedNodeSync(schema=self.schema, client=self.client, branch=self.branch, data=data) + + if node_to_remove.id and node_to_remove.id in self.peer_ids: + idx = self.peer_ids.index(node_to_remove.id) + if self.peers[idx].id != node_to_remove.id: + raise IndexError(f"Unexpected situation, the node with the index {idx} should be {node_to_remove.id}") + self.peers.pop(idx) + self._has_update = True + + elif node_to_remove.hfid and node_to_remove.hfid in self.peer_hfids: + idx = self.peer_hfids.index(node_to_remove.hfid) + if self.peers[idx].hfid != node_to_remove.hfid: + raise IndexError(f"Unexpected situation, the node with the index {idx} should be {node_to_remove.hfid}") + + self.peers.pop(idx) + self._has_update = True diff --git a/infrahub_sdk/protocols_base.py b/infrahub_sdk/protocols_base.py index 2d533ac7..a47d95ef 100644 --- a/infrahub_sdk/protocols_base.py +++ b/infrahub_sdk/protocols_base.py @@ -10,11 +10,42 @@ @runtime_checkable -class RelatedNode(Protocol): ... +class RelatedNodeBase(Protocol): + @property + def id(self) -> str | None: ... + + @property + def hfid(self) -> list[Any] | None: ... + + @property + def hfid_str(self) -> str | None: ... + + @property + def is_resource_pool(self) -> bool: ... + + @property + def initialized(self) -> bool: ... + + @property + def display_label(self) -> str | None: ... + + @property + def typename(self) -> str | None: ... + + def _generate_input_data(self, allocate_from_pool: bool = False) -> dict[str, Any]: ... + + def _generate_mutation_query(self) -> dict[str, Any]: ... + + @classmethod + def _generate_query_data(cls, peer_data: dict[str, Any] | None = None, property: bool = False) -> dict: ... + + +@runtime_checkable +class RelatedNode(RelatedNodeBase, Protocol): ... @runtime_checkable -class RelatedNodeSync(Protocol): ... +class RelatedNodeSync(RelatedNodeBase, Protocol): ... @runtime_checkable @@ -147,6 +178,7 @@ class CoreNodeBase(Protocol): _internal_id: str id: str # NOTE this is incorrect, should be str | None display_label: str | None + typename: str | None @property def hfid(self) -> list[str] | None: ... diff --git a/tests/unit/sdk/test_node.py b/tests/unit/sdk/test_node.py index fbe381bf..fdaa83bd 100644 --- a/tests/unit/sdk/test_node.py +++ b/tests/unit/sdk/test_node.py @@ -7,7 +7,6 @@ from infrahub_sdk.exceptions import NodeNotFoundError from infrahub_sdk.node import ( - SAFE_VALUE, InfrahubNode, InfrahubNodeBase, InfrahubNodeSync, @@ -15,6 +14,7 @@ RelationshipManagerBase, parse_human_friendly_id, ) +from infrahub_sdk.node.constants import SAFE_VALUE from infrahub_sdk.schema import GenericSchema, NodeSchemaAPI if TYPE_CHECKING: From aa8c792ea3f6bca00d2a8f2c8ec269f6eb138b8e Mon Sep 17 00:00:00 2001 From: Damien Garros Date: Mon, 12 May 2025 15:13:27 +0200 Subject: [PATCH 07/13] Expose everything from node module --- infrahub_sdk/node/__init__.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/infrahub_sdk/node/__init__.py b/infrahub_sdk/node/__init__.py index 6862aac3..c3b0d5fe 100644 --- a/infrahub_sdk/node/__init__.py +++ b/infrahub_sdk/node/__init__.py @@ -1,14 +1,32 @@ from __future__ import annotations -from .constants import HFID_STR_SEPARATOR +from .constants import ( + ARTIFACT_DEFINITION_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE, + ARTIFACT_FETCH_FEATURE_NOT_SUPPORTED_MESSAGE, + ARTIFACT_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE, + HFID_STR_SEPARATOR, + IP_TYPES, + PROPERTIES_FLAG, + PROPERTIES_OBJECT, + SAFE_VALUE, +) from .node import InfrahubNode, InfrahubNodeBase, InfrahubNodeSync +from .property import NodeProperty from .related_node import RelatedNode, RelatedNodeBase, RelatedNodeSync from .relationship import RelationshipManager, RelationshipManagerBase, RelationshipManagerSync __all__ = [ + "ARTIFACT_DEFINITION_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE", + "ARTIFACT_FETCH_FEATURE_NOT_SUPPORTED_MESSAGE", + "ARTIFACT_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE", + "IP_TYPES", + "PROPERTIES_FLAG", + "PROPERTIES_OBJECT", + "SAFE_VALUE", "InfrahubNode", "InfrahubNodeBase", "InfrahubNodeSync", + "NodeProperty", "RelatedNode", "RelatedNodeBase", "RelatedNodeSync", From 5a27695653794ca7e3be437335f3214abedbb155 Mon Sep 17 00:00:00 2001 From: Damien Garros Date: Sat, 17 May 2025 06:51:59 +0200 Subject: [PATCH 08/13] Move parse_human_friendly_id into parsers.py --- infrahub_sdk/node/__init__.py | 15 +++------------ infrahub_sdk/node/parsers.py | 15 +++++++++++++++ infrahub_sdk/store.py | 2 +- 3 files changed, 19 insertions(+), 13 deletions(-) create mode 100644 infrahub_sdk/node/parsers.py diff --git a/infrahub_sdk/node/__init__.py b/infrahub_sdk/node/__init__.py index c3b0d5fe..a2d71a87 100644 --- a/infrahub_sdk/node/__init__.py +++ b/infrahub_sdk/node/__init__.py @@ -11,6 +11,7 @@ SAFE_VALUE, ) from .node import InfrahubNode, InfrahubNodeBase, InfrahubNodeSync +from .parsers import parse_human_friendly_id from .property import NodeProperty from .related_node import RelatedNode, RelatedNodeBase, RelatedNodeSync from .relationship import RelationshipManager, RelationshipManagerBase, RelationshipManagerSync @@ -19,6 +20,7 @@ "ARTIFACT_DEFINITION_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE", "ARTIFACT_FETCH_FEATURE_NOT_SUPPORTED_MESSAGE", "ARTIFACT_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE", + "HFID_STR_SEPARATOR", "IP_TYPES", "PROPERTIES_FLAG", "PROPERTIES_OBJECT", @@ -33,16 +35,5 @@ "RelationshipManager", "RelationshipManagerBase", "RelationshipManagerSync", + "parse_human_friendly_id", ] - - -def parse_human_friendly_id(hfid: str | list[str]) -> tuple[str | None, list[str]]: - """Parse a human friendly ID into a kind and an identifier.""" - if isinstance(hfid, str): - hfid_parts = hfid.split(HFID_STR_SEPARATOR) - if len(hfid_parts) == 1: - return None, hfid_parts - return hfid_parts[0], hfid_parts[1:] - if isinstance(hfid, list): - return None, hfid - raise ValueError(f"Invalid human friendly ID: {hfid}") diff --git a/infrahub_sdk/node/parsers.py b/infrahub_sdk/node/parsers.py new file mode 100644 index 00000000..0ae830f1 --- /dev/null +++ b/infrahub_sdk/node/parsers.py @@ -0,0 +1,15 @@ +from __future__ import annotations + +from .constants import HFID_STR_SEPARATOR + + +def parse_human_friendly_id(hfid: str | list[str]) -> tuple[str | None, list[str]]: + """Parse a human friendly ID into a kind and an identifier.""" + if isinstance(hfid, str): + hfid_parts = hfid.split(HFID_STR_SEPARATOR) + if len(hfid_parts) == 1: + return None, hfid_parts + return hfid_parts[0], hfid_parts[1:] + if isinstance(hfid, list): + return None, hfid + raise ValueError(f"Invalid human friendly ID: {hfid}") diff --git a/infrahub_sdk/store.py b/infrahub_sdk/store.py index 99659fc0..6420495b 100644 --- a/infrahub_sdk/store.py +++ b/infrahub_sdk/store.py @@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Literal, overload from .exceptions import NodeInvalidError, NodeNotFoundError -from .node import parse_human_friendly_id +from .node.parsers import parse_human_friendly_id if TYPE_CHECKING: from .client import SchemaType, SchemaTypeSync From d7ec69f7b6f803c8ccecbcbfc3fb6ad430059e53 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 09:35:04 +0000 Subject: [PATCH 09/13] Bump DavidAnson/markdownlint-cli2-action from 19 to 20 Bumps [DavidAnson/markdownlint-cli2-action](https://github.com/davidanson/markdownlint-cli2-action) from 19 to 20. - [Release notes](https://github.com/davidanson/markdownlint-cli2-action/releases) - [Commits](https://github.com/davidanson/markdownlint-cli2-action/compare/v19...v20) --- updated-dependencies: - dependency-name: DavidAnson/markdownlint-cli2-action dependency-version: '20' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d799ad1f..e07359f7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -92,7 +92,7 @@ jobs: - name: "Check out repository code" uses: "actions/checkout@v4" - name: "Linting: markdownlint" - uses: DavidAnson/markdownlint-cli2-action@v19 + uses: DavidAnson/markdownlint-cli2-action@v20 with: config: .markdownlint.yaml globs: | From f6f78425845c1c63e5fe8849a8b31ff19beff739 Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Wed, 21 May 2025 13:04:06 +0200 Subject: [PATCH 10/13] Trigger markdown lint on github action updates & fix rules --- .github/workflows/ci.yml | 4 +++- .markdownlint.yaml | 2 ++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e07359f7..58cd3cc9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -84,7 +84,9 @@ jobs: markdown-lint: - if: needs.files-changed.outputs.documentation == 'true' + if: | + needs.files-changed.outputs.documentation == 'true' || + needs.files-changed.outputs.github_workflows == 'true' needs: ["files-changed"] runs-on: "ubuntu-latest" timeout-minutes: 5 diff --git a/.markdownlint.yaml b/.markdownlint.yaml index 9b33d226..f0087fa9 100644 --- a/.markdownlint.yaml +++ b/.markdownlint.yaml @@ -11,3 +11,5 @@ MD034: false # no-bare-urls MD041: false # allow 1st line to not be a top-level heading (required for Towncrier) MD045: false # no alt text around images MD047: false # single trailing newline +MD059: # Link descriptions that are prohibited + prohibited_texts: [] From 5ad92f6731aee479b8fc546403ee72d72ade6d44 Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Wed, 21 May 2025 08:49:32 +0200 Subject: [PATCH 11/13] Add NumberPool kind --- changelog/+543efbd1.added.md | 1 + infrahub_sdk/schema/main.py | 1 + 2 files changed, 2 insertions(+) create mode 100644 changelog/+543efbd1.added.md diff --git a/changelog/+543efbd1.added.md b/changelog/+543efbd1.added.md new file mode 100644 index 00000000..f728f279 --- /dev/null +++ b/changelog/+543efbd1.added.md @@ -0,0 +1 @@ +Added NumberPool as a new attribute kind, for support in Infrahub 1.3 diff --git a/infrahub_sdk/schema/main.py b/infrahub_sdk/schema/main.py index e2985d7b..ba18cf49 100644 --- a/infrahub_sdk/schema/main.py +++ b/infrahub_sdk/schema/main.py @@ -49,6 +49,7 @@ class AttributeKind(str, Enum): TEXTAREA = "TextArea" DATETIME = "DateTime" NUMBER = "Number" + NUMBERPOOL = "NumberPool" DROPDOWN = "Dropdown" EMAIL = "Email" PASSWORD = "Password" # noqa: S105 From f41402791b0d519b2deddd631abb1d14d08d8f4c Mon Sep 17 00:00:00 2001 From: LucasG0 <44552904+LucasG0@users.noreply.github.com> Date: Mon, 26 May 2025 16:36:02 +0200 Subject: [PATCH 12/13] Add objects field within InfrahubConfig (#422) --- infrahub_sdk/client.py | 19 +++- infrahub_sdk/protocols.py | 112 +++++++++++++++++++++ infrahub_sdk/query_groups.py | 15 ++- infrahub_sdk/schema/repository.py | 16 +++ infrahub_sdk/spec/object.py | 2 +- infrahub_sdk/testing/schemas/car_person.py | 1 + 6 files changed, 160 insertions(+), 5 deletions(-) diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index bfea914c..dc1f539f 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -172,11 +172,18 @@ def start_tracking( params: dict[str, Any] | None = None, delete_unused_nodes: bool = False, group_type: str | None = None, + group_params: dict[str, Any] | None = None, + branch: str | None = None, ) -> Self: self.mode = InfrahubClientMode.TRACKING identifier = identifier or self.identifier or "python-sdk" self.set_context_properties( - identifier=identifier, params=params, delete_unused_nodes=delete_unused_nodes, group_type=group_type + identifier=identifier, + params=params, + delete_unused_nodes=delete_unused_nodes, + group_type=group_type, + group_params=group_params, + branch=branch, ) return self @@ -187,14 +194,22 @@ def set_context_properties( delete_unused_nodes: bool = True, reset: bool = True, group_type: str | None = None, + group_params: dict[str, Any] | None = None, + branch: str | None = None, ) -> None: if reset: if isinstance(self, InfrahubClient): self.group_context = InfrahubGroupContext(self) elif isinstance(self, InfrahubClientSync): self.group_context = InfrahubGroupContextSync(self) + self.group_context.set_properties( - identifier=identifier, params=params, delete_unused_nodes=delete_unused_nodes, group_type=group_type + identifier=identifier, + params=params, + delete_unused_nodes=delete_unused_nodes, + group_type=group_type, + group_params=group_params, + branch=branch, ) def _graphql_url( diff --git a/infrahub_sdk/protocols.py b/infrahub_sdk/protocols.py index 7a69b5f8..d002e433 100644 --- a/infrahub_sdk/protocols.py +++ b/infrahub_sdk/protocols.py @@ -68,6 +68,12 @@ class BuiltinIPPrefix(CoreNode): children: RelationshipManager +class CoreAction(CoreNode): + name: String + description: StringOptional + triggers: RelationshipManager + + class CoreArtifactTarget(CoreNode): artifacts: RelationshipManager @@ -154,6 +160,10 @@ class CoreMenu(CoreNode): children: RelationshipManager +class CoreNodeTriggerMatch(CoreNode): + trigger: RelatedNode + + class CoreObjectComponentTemplate(CoreNode): template_name: String @@ -195,6 +205,14 @@ class CoreTransformation(CoreNode): tags: RelationshipManager +class CoreTriggerRule(CoreNode): + name: String + description: StringOptional + active: Boolean + branch_scope: Dropdown + action: RelatedNode + + class CoreValidator(CoreNode): label: StringOptional state: Enum @@ -328,6 +346,10 @@ class CoreFileThread(CoreThread): repository: RelatedNode +class CoreGeneratorAction(CoreAction): + generator: RelatedNode + + class CoreGeneratorCheck(CoreCheck): instance: String @@ -382,6 +404,16 @@ class CoreGraphQLQueryGroup(CoreGroup): query: RelatedNode +class CoreGroupAction(CoreAction): + add_members: Boolean + group: RelatedNode + + +class CoreGroupTriggerRule(CoreTriggerRule): + members_added: Boolean + group: RelatedNode + + class CoreIPAddressPool(CoreResourcePool, LineageSource): default_address_type: String default_prefix_length: IntegerOptional @@ -401,6 +433,25 @@ class CoreMenuItem(CoreMenu): pass +class CoreNodeTriggerAttributeMatch(CoreNodeTriggerMatch): + attribute_name: String + value: StringOptional + value_previous: StringOptional + value_match: Dropdown + + +class CoreNodeTriggerRelationshipMatch(CoreNodeTriggerMatch): + relationship_name: String + added: Boolean + peer: StringOptional + + +class CoreNodeTriggerRule(CoreTriggerRule): + node_kind: String + mutation_action: Enum + matches: RelationshipManager + + class CoreNumberPool(CoreResourcePool, LineageSource): node: String node_attribute: String @@ -448,6 +499,11 @@ class CoreRepository(LineageOwner, LineageSource, CoreGenericRepository, CoreTas commit: StringOptional +class CoreRepositoryGroup(CoreGroup): + content: Dropdown + repository: RelatedNode + + class CoreRepositoryValidator(CoreValidator): repository: RelatedNode @@ -545,6 +601,12 @@ class BuiltinIPPrefixSync(CoreNodeSync): children: RelationshipManagerSync +class CoreActionSync(CoreNodeSync): + name: String + description: StringOptional + triggers: RelationshipManagerSync + + class CoreArtifactTargetSync(CoreNodeSync): artifacts: RelationshipManagerSync @@ -631,6 +693,10 @@ class CoreMenuSync(CoreNodeSync): children: RelationshipManagerSync +class CoreNodeTriggerMatchSync(CoreNodeSync): + trigger: RelatedNodeSync + + class CoreObjectComponentTemplateSync(CoreNodeSync): template_name: String @@ -672,6 +738,14 @@ class CoreTransformationSync(CoreNodeSync): tags: RelationshipManagerSync +class CoreTriggerRuleSync(CoreNodeSync): + name: String + description: StringOptional + active: Boolean + branch_scope: Dropdown + action: RelatedNodeSync + + class CoreValidatorSync(CoreNodeSync): label: StringOptional state: Enum @@ -805,6 +879,10 @@ class CoreFileThreadSync(CoreThreadSync): repository: RelatedNodeSync +class CoreGeneratorActionSync(CoreActionSync): + generator: RelatedNodeSync + + class CoreGeneratorCheckSync(CoreCheckSync): instance: String @@ -859,6 +937,16 @@ class CoreGraphQLQueryGroupSync(CoreGroupSync): query: RelatedNodeSync +class CoreGroupActionSync(CoreActionSync): + add_members: Boolean + group: RelatedNodeSync + + +class CoreGroupTriggerRuleSync(CoreTriggerRuleSync): + members_added: Boolean + group: RelatedNodeSync + + class CoreIPAddressPoolSync(CoreResourcePoolSync, LineageSourceSync): default_address_type: String default_prefix_length: IntegerOptional @@ -878,6 +966,25 @@ class CoreMenuItemSync(CoreMenuSync): pass +class CoreNodeTriggerAttributeMatchSync(CoreNodeTriggerMatchSync): + attribute_name: String + value: StringOptional + value_previous: StringOptional + value_match: Dropdown + + +class CoreNodeTriggerRelationshipMatchSync(CoreNodeTriggerMatchSync): + relationship_name: String + added: Boolean + peer: StringOptional + + +class CoreNodeTriggerRuleSync(CoreTriggerRuleSync): + node_kind: String + mutation_action: Enum + matches: RelationshipManagerSync + + class CoreNumberPoolSync(CoreResourcePoolSync, LineageSourceSync): node: String node_attribute: String @@ -925,6 +1032,11 @@ class CoreRepositorySync(LineageOwnerSync, LineageSourceSync, CoreGenericReposit commit: StringOptional +class CoreRepositoryGroupSync(CoreGroupSync): + content: Dropdown + repository: RelatedNodeSync + + class CoreRepositoryValidatorSync(CoreValidatorSync): repository: RelatedNodeSync diff --git a/infrahub_sdk/query_groups.py b/infrahub_sdk/query_groups.py index 4bb732b2..94b16a10 100644 --- a/infrahub_sdk/query_groups.py +++ b/infrahub_sdk/query_groups.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from .constants import InfrahubClientMode from .exceptions import NodeNotFoundError @@ -25,6 +25,7 @@ def __init__(self) -> None: self.params: dict[str, str] = {} self.delete_unused_nodes: bool = False self.group_type: str = "CoreStandardGroup" + self.group_params: dict[str, Any] = {} def set_properties( self, @@ -32,6 +33,8 @@ def set_properties( params: dict[str, str] | None = None, delete_unused_nodes: bool = False, group_type: str | None = None, + group_params: dict[str, Any] | None = None, + branch: str | None = None, ) -> None: """Setter method to set the values of identifier and params. @@ -43,6 +46,8 @@ def set_properties( self.params = params or {} self.delete_unused_nodes = delete_unused_nodes self.group_type = group_type or self.group_type + self.group_params = group_params or {} + self.branch = branch def _get_params_as_str(self) -> str: """Convert the params in dict format, into a string""" @@ -87,7 +92,9 @@ def __init__(self, client: InfrahubClient) -> None: async def get_group(self, store_peers: bool = False) -> InfrahubNode | None: group_name = self._generate_group_name() try: - group = await self.client.get(kind=self.group_type, name__value=group_name, include=["members"]) + group = await self.client.get( + kind=self.group_type, name__value=group_name, include=["members"], branch=self.branch + ) except NodeNotFoundError: return None @@ -151,6 +158,8 @@ async def update_group(self) -> None: name=group_name, description=description, members=members, + branch=self.branch, + **self.group_params, ) await group.save(allow_upsert=True, update_group_context=False) @@ -243,6 +252,8 @@ def update_group(self) -> None: name=group_name, description=description, members=members, + branch=self.branch, + **self.group_params, ) group.save(allow_upsert=True, update_group_context=False) diff --git a/infrahub_sdk/schema/repository.py b/infrahub_sdk/schema/repository.py index b5c58d2f..69d63832 100644 --- a/infrahub_sdk/schema/repository.py +++ b/infrahub_sdk/schema/repository.py @@ -147,6 +147,18 @@ def load_query(self, relative_path: str = ".") -> str: return file.read() +class InfrahubObjectConfig(InfrahubRepositoryConfigElement): + model_config = ConfigDict(extra="forbid") + name: str = Field(..., description="The name associated to the object file") + file_path: Path = Field(..., description="The file within the repository containing object data.") + + +class InfrahubMenuConfig(InfrahubRepositoryConfigElement): + model_config = ConfigDict(extra="forbid") + name: str = Field(..., description="The name of the menu") + file_path: Path = Field(..., description="The file within the repository containing menu data.") + + RESOURCE_MAP: dict[Any, str] = { InfrahubJinja2TransformConfig: "jinja2_transforms", InfrahubCheckDefinitionConfig: "check_definitions", @@ -154,6 +166,8 @@ def load_query(self, relative_path: str = ".") -> str: InfrahubPythonTransformConfig: "python_transforms", InfrahubGeneratorDefinitionConfig: "generator_definitions", InfrahubRepositoryGraphQLConfig: "queries", + InfrahubObjectConfig: "objects", + InfrahubMenuConfig: "menus", } @@ -176,6 +190,8 @@ class InfrahubRepositoryConfig(BaseModel): default_factory=list, description="Generator definitions" ) queries: list[InfrahubRepositoryGraphQLConfig] = Field(default_factory=list, description="GraphQL Queries") + objects: list[Path] = Field(default_factory=list, description="Objects") + menus: list[Path] = Field(default_factory=list, description="Menus") @field_validator( "check_definitions", diff --git a/infrahub_sdk/spec/object.py b/infrahub_sdk/spec/object.py index acbf1551..23a11c10 100644 --- a/infrahub_sdk/spec/object.py +++ b/infrahub_sdk/spec/object.py @@ -459,7 +459,7 @@ async def create_node( await node.save(allow_upsert=True) display_label = node.get_human_friendly_id_as_string() or f"{node.get_kind()} : {node.id}" - client.log.info(f"Node: {display_label}") + client.log.info(f"Created node: {display_label}") for rel in remaining_rels: context = {} diff --git a/infrahub_sdk/testing/schemas/car_person.py b/infrahub_sdk/testing/schemas/car_person.py index 2da7ab4d..3a1c3dc3 100644 --- a/infrahub_sdk/testing/schemas/car_person.py +++ b/infrahub_sdk/testing/schemas/car_person.py @@ -48,6 +48,7 @@ def schema_person_base(self) -> NodeSchema: namespace=NAMESPACE, include_in_menu=True, label="Person", + default_filter="name__value", human_friendly_id=["name__value"], attributes=[ Attr(name="name", kind=AttributeKind.TEXT, unique=True), From 0d8fca25c5b4299c188563c6a34cb7b5be8cc546 Mon Sep 17 00:00:00 2001 From: Alex Gittings Date: Tue, 27 May 2025 11:36:52 +0100 Subject: [PATCH 13/13] Add object import to docs --- docs/docs/python-sdk/topics/object_file.mdx | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/docs/python-sdk/topics/object_file.mdx b/docs/docs/python-sdk/topics/object_file.mdx index fd747bfa..7488c02f 100644 --- a/docs/docs/python-sdk/topics/object_file.mdx +++ b/docs/docs/python-sdk/topics/object_file.mdx @@ -38,6 +38,8 @@ Multiple object files can be loaded at once by specifying the path to multiple f The `object load` command will create/update the objects using an `Upsert` operation. All objects previously loaded will NOT be deleted in the Infrahub instance. Also, if some objects present in different files are identical and dependent on each other, the `object load` command will NOT calculate the dependencies between the objects and as such it's the responsibility of the users to execute the command in the right order. +> Object files can also be loaded into Infrahub when using external Git repositories. To see how to do this, please refer to the [.infrahub.yml](https://docs.infrahub.app/topics/infrahub-yml#objects) documentation. + ### Validate the format of object files The object file can be validated using the `infrahubctl object validate` command.