Skip to content

Some fixes and adding first round of unittest #181

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
May 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion development/docker-compose.base.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ x-nautobot-base: &nautobot-base
- "creds.env"
tty: true

version: "3.8"
services:
nautobot:
depends_on:
Expand Down
1 change: 0 additions & 1 deletion development/docker-compose.dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# any override will need to include these volumes to use them.
# see: https://github.com/docker/compose/issues/3729
---
version: "3.8"
services:
nautobot:
command: "nautobot-server runserver 0.0.0.0:8080"
Expand Down
2 changes: 0 additions & 2 deletions development/docker-compose.mysql.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
---
version: "3.8"

services:
nautobot:
environment:
Expand Down
2 changes: 0 additions & 2 deletions development/docker-compose.postgres.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
---
version: "3.8"

services:
nautobot:
environment:
Expand Down
1 change: 0 additions & 1 deletion development/docker-compose.redis.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
---
version: "3.8"
services:
redis:
image: "redis:6-alpine"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -406,7 +406,7 @@ def load_devices(self):
last_network_data_sync=datetime.datetime.now().date().isoformat(),
)
self.add(network_device)
except Exception as err:
except Exception as err: # pylint: disable=broad-exception-caught
self._handle_general_load_exception(error=err, hostname=hostname, data=device_data, model_type="device")
continue
# for interface in device_data["interfaces"]:
Expand Down Expand Up @@ -467,7 +467,7 @@ def load_ip_addresses(self):
"DiffSync store. This is a duplicate IP Address."
)
continue
except Exception as err:
except Exception as err: # pylint: disable=broad-exception-caught
self._handle_general_load_exception(
error=err, hostname=hostname, data=device_data, model_type="ip_address"
)
Expand Down Expand Up @@ -496,7 +496,7 @@ def load_vlans(self):
self.add(network_vlan)
except diffsync.exceptions.ObjectAlreadyExists:
continue
except Exception as err:
except Exception as err: # pylint: disable=broad-exception-caught
self._handle_general_load_exception(
error=err, hostname=hostname, data=device_data, model_type="vlan"
)
Expand All @@ -513,7 +513,7 @@ def load_vlans(self):
self.add(network_vlan)
except diffsync.exceptions.ObjectAlreadyExists:
continue
except Exception as err:
except Exception as err: # pylint: disable=broad-exception-caught
self._handle_general_load_exception(
error=err, hostname=hostname, data=device_data, model_type="vlan"
)
Expand All @@ -536,7 +536,7 @@ def load_vrfs(self):
self.add(network_vrf)
except diffsync.exceptions.ObjectAlreadyExists:
continue
except Exception as err:
except Exception as err: # pylint: disable=broad-exception-caught
self._handle_general_load_exception(
error=err, hostname=hostname, data=device_data, model_type="vrf"
)
Expand All @@ -559,7 +559,7 @@ def load_ip_address_to_interfaces(self):
),
)
self.add(network_ip_address_to_interface)
except Exception as err:
except Exception as err: # pylint: disable=broad-exception-caught
self._handle_general_load_exception(
error=err, hostname=hostname, data=device_data, model_type="ip_address to interface"
)
Expand All @@ -578,7 +578,7 @@ def load_tagged_vlans_to_interface(self):
tagged_vlans=interface_data["tagged_vlans"],
)
self.add(network_tagged_vlans_to_interface)
except Exception as err:
except Exception as err: # pylint: disable=broad-exception-caught
self._handle_general_load_exception(
error=err, hostname=hostname, data=device_data, model_type="tagged vlan to interface"
)
Expand All @@ -597,7 +597,7 @@ def load_untagged_vlan_to_interface(self):
untagged_vlan=interface_data["untagged_vlan"],
)
self.add(network_untagged_vlan_to_interface)
except Exception as err:
except Exception as err: # pylint: disable=broad-exception-caught
self._handle_general_load_exception(
error=err, hostname=hostname, data=device_data, model_type="untagged vlan to interface"
)
Expand All @@ -616,7 +616,7 @@ def load_lag_to_interface(self):
lag__interface__name=interface_data["lag"] if interface_data["lag"] else "",
)
self.add(network_lag_to_interface)
except Exception as err:
except Exception as err: # pylint: disable=broad-exception-caught
self._handle_general_load_exception(
error=err, hostname=hostname, data=device_data, model_type="lag to interface"
)
Expand All @@ -635,7 +635,7 @@ def load_vrf_to_interface(self):
vrf=interface_data["vrf"],
)
self.add(network_vrf_to_interface)
except Exception as err:
except Exception as err: # pylint: disable=broad-exception-caught
self._handle_general_load_exception(
error=err, hostname=hostname, data=device_data, model_type="vrf to interface"
)
Expand Down
2 changes: 1 addition & 1 deletion nautobot_device_onboarding/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -629,7 +629,7 @@ def run(
cf.content_types.add(ContentType.objects.get_for_model(Device))
if self.debug:
self.logger.debug("Custom field found or created")
except Exception as err:
except Exception as err: # pylint: disable=broad-exception-caught
self.logger.error(f"Failed to get or create last_network_data_sync custom field, {err}")
return

Expand Down
12 changes: 11 additions & 1 deletion nautobot_device_onboarding/nornir_plays/command_getter.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,17 @@ def _get_commands_to_run(yaml_parsed_info, sync_vlans, sync_vrfs):
"""Using merged command mapper info and look up all commands that need to be run."""
all_commands = []
for key, value in yaml_parsed_info.items():
if not key.startswith("_metadata"):
if key == "pre_processor":
for _, v in value.items():
current_root_key = v.get("commands")
if isinstance(current_root_key, list):
# Means their is any "nested" structures. e.g multiple commands
for command in v["commands"]:
all_commands.append(command)
else:
if isinstance(current_root_key, dict):
all_commands.append(current_root_key)
else:
# Deduplicate commands + parser key
current_root_key = value.get("commands")
if isinstance(current_root_key, list):
Expand Down
146 changes: 90 additions & 56 deletions nautobot_device_onboarding/nornir_plays/formatter.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Command Extraction and Formatting or SSoT Based Jobs."""

import json
from json.decoder import JSONDecodeError
import logging

from django.template import engines
Expand Down Expand Up @@ -38,80 +39,107 @@ def get_django_env():
return jinja_env


def process_empty_result(iterable_type):
"""Helper to map iterable_type on an empty result."""
iterable_mapping = {
"dict": {},
"str": "",
}
return iterable_mapping.get(iterable_type, [])


def normalize_processed_data(processed_data, iterable_type):
"""Helper to normalize the processed data returned from jdiff/jmespath."""
# If processed_data is an empty data structure, return default based on iterable_type
if not processed_data:
return process_empty_result(iterable_type)
if isinstance(processed_data, str):
try:
# If processed_data is a json string try to load it into a python datatype.
post_processed_data = json.loads(processed_data)
except (JSONDecodeError, TypeError):
post_processed_data = processed_data
else:
post_processed_data = processed_data
if isinstance(post_processed_data, list) and len(post_processed_data) == 1:
if isinstance(post_processed_data[0], str):
post_processed_data = post_processed_data[0]
else:
if isinstance(post_processed_data[0], dict):
if iterable_type:
if iterable_type == "dict":
post_processed_data = post_processed_data[0]
else:
post_processed_data = post_processed_data[0]
return post_processed_data


def extract_and_post_process(parsed_command_output, yaml_command_element, j2_data_context, iter_type, job_debug):
"""Helper to extract and apply post_processing on a single element."""
logger = logger = setup_logger("DEVICE_ONBOARDING_ETL_LOGGER", job_debug)
# if parsed_command_output is an empty data structure, no need to go through all the processing.
if parsed_command_output:
j2_env = get_django_env()
jpath_template = j2_env.from_string(yaml_command_element["jpath"])
j2_rendered_jpath = jpath_template.render(**j2_data_context)
logger.debug("Post Rendered Jpath: %s", j2_rendered_jpath)
if not parsed_command_output:
return parsed_command_output, normalize_processed_data(parsed_command_output, iter_type)
j2_env = get_django_env()
# This just renders the jpath itself if any interpolation is needed.
jpath_template = j2_env.from_string(yaml_command_element["jpath"])
j2_rendered_jpath = jpath_template.render(**j2_data_context)
logger.debug("Post Rendered Jpath: %s", j2_rendered_jpath)
try:
if isinstance(parsed_command_output, str):
parsed_command_output = json.loads(parsed_command_output)
try:
extracted_value = extract_data_from_json(parsed_command_output, j2_rendered_jpath)
except TypeError as err:
logger.debug("Error occurred during extraction: %s", err)
extracted_value = []
pre_processed_extracted = extracted_value
if yaml_command_element.get("post_processor"):
# j2 context data changes obj(hostname) -> extracted_value for post_processor
j2_data_context["obj"] = extracted_value
template = j2_env.from_string(yaml_command_element["post_processor"])
extracted_processed = template.render(**j2_data_context)
else:
extracted_processed = extracted_value
if isinstance(extracted_processed, str):
try:
post_processed_data = json.loads(extracted_processed)
except Exception:
post_processed_data = extracted_processed
else:
post_processed_data = extracted_processed
if isinstance(post_processed_data, list) and len(post_processed_data) == 0:
# means result was empty, change empty result to iterater_type if applicable.
if iter_type:
if iter_type == "dict":
post_processed_data = {}
if iter_type == "str":
post_processed_data = ""
if isinstance(post_processed_data, list) and len(post_processed_data) == 1:
if isinstance(post_processed_data[0], str):
post_processed_data = post_processed_data[0]
else:
if isinstance(post_processed_data[0], dict):
if iter_type:
if iter_type == "dict":
post_processed_data = post_processed_data[0]
else:
post_processed_data = post_processed_data[0]
logger.debug("Pre Processed Extracted: %s", pre_processed_extracted)
logger.debug("Post Processed Data: %s", post_processed_data)
return pre_processed_extracted, post_processed_data
if iter_type:
if iter_type == "dict":
post_processed_data = {}
if iter_type == "str":
post_processed_data = ""
parsed_command_output = json.loads(parsed_command_output)
except (JSONDecodeError, TypeError):
logger.debug("Parsed Command Output is a string but not jsonable: %s", parsed_command_output)
extracted_value = extract_data_from_json(parsed_command_output, j2_rendered_jpath)
except TypeError as err:
logger.debug("Error occurred during extraction: %s setting default extracted value to []", err)
extracted_value = []
pre_processed_extracted = extracted_value
if yaml_command_element.get("post_processor"):
# j2 context data changes obj(hostname) -> extracted_value for post_processor
j2_data_context["obj"] = extracted_value
template = j2_env.from_string(yaml_command_element["post_processor"])
extracted_processed = template.render(**j2_data_context)
else:
post_processed_data = []
logger.debug("Pre Processed Extracted: %s", parsed_command_output)
extracted_processed = extracted_value
post_processed_data = normalize_processed_data(extracted_processed, iter_type)
logger.debug("Pre Processed Extracted: %s", pre_processed_extracted)
logger.debug("Post Processed Data: %s", post_processed_data)
return parsed_command_output, post_processed_data
return pre_processed_extracted, post_processed_data


def perform_data_extraction(host, command_info_dict, command_outputs_dict, job_debug):
"""Extract, process data."""
result_dict = {}
sync_vlans = host.defaults.data.get("sync_vlans", False)
sync_vrfs = host.defaults.data.get("sync_vrfs", False)
get_context_from_pre_processor = {}
if command_info_dict.get("pre_processor"):
for pre_processor_name, field_data in command_info_dict["pre_processor"].items():
if isinstance(field_data["commands"], dict):
# only one command is specified as a dict force it to a list.
loop_commands = [field_data["commands"]]
else:
loop_commands = field_data["commands"]
for show_command_dict in loop_commands:
final_iterable_type = show_command_dict.get("iterable_type")
_, current_field_post = extract_and_post_process(
command_outputs_dict[show_command_dict["command"]],
show_command_dict,
{"obj": host.name, "original_host": host.name},
final_iterable_type,
job_debug,
)
get_context_from_pre_processor[pre_processor_name] = current_field_post
for ssot_field, field_data in command_info_dict.items():
if not sync_vlans and ssot_field in ["interfaces__tagged_vlans", "interfaces__untagged_vlan"]:
continue
# If syncing vrfs isn't inscope remove the unneeded commands.
if not sync_vrfs and ssot_field == "interfaces__vrf":
continue
if ssot_field == "pre_processor":
continue
if isinstance(field_data["commands"], dict):
# only one command is specified as a dict force it to a list.
loop_commands = [field_data["commands"]]
Expand All @@ -120,10 +148,12 @@ def perform_data_extraction(host, command_info_dict, command_outputs_dict, job_d
for show_command_dict in loop_commands:
final_iterable_type = show_command_dict.get("iterable_type")
if field_data.get("root_key"):
original_context = {"obj": host.name, "original_host": host.name}
merged_context = {**original_context, **get_context_from_pre_processor}
root_key_pre, root_key_post = extract_and_post_process(
command_outputs_dict[show_command_dict["command"]],
show_command_dict,
{"obj": host.name, "original_host": host.name},
merged_context,
final_iterable_type,
job_debug,
)
Expand All @@ -139,19 +169,23 @@ def perform_data_extraction(host, command_info_dict, command_outputs_dict, job_d
# a list of data that we want to become our nested key. E.g. current_key "Ethernet1/1"
# These get passed into the render context for the template render to allow nested jpaths to use
# the current_key context for more flexible jpath queries.
original_context = {"current_key": current_key, "obj": host.name, "original_host": host.name}
merged_context = {**original_context, **get_context_from_pre_processor}
_, current_key_post = extract_and_post_process(
command_outputs_dict[show_command_dict["command"]],
show_command_dict,
{"current_key": current_key, "obj": host.name, "original_host": host.name},
merged_context,
final_iterable_type,
job_debug,
)
result_dict[field_nesting[0]][current_key][field_nesting[1]] = current_key_post
else:
original_context = {"obj": host.name, "original_host": host.name}
merged_context = {**original_context, **get_context_from_pre_processor}
_, current_field_post = extract_and_post_process(
command_outputs_dict[show_command_dict["command"]],
show_command_dict,
{"obj": host.name, "original_host": host.name},
merged_context,
final_iterable_type,
job_debug,
)
Expand Down
4 changes: 4 additions & 0 deletions nautobot_device_onboarding/nornir_plays/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,10 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) -
f"task_instance_completed Task Name: {task.name}",
extra={"object": task.host},
)
if self.kwargs["debug"]:
self.logger.debug(
f"task_instance_completed {task.host} Task result {result.result}.", extra={"object": task.host}
)
# If any main task resulted in a failed:True then add that key so ssot side can ignore that entry.
if result[0].failed:
if task.params["command_getter_job"] == "sync_devices":
Expand Down
Loading
Loading