From aac090ccdfa4979771bda5e14dc9f8ebd7354464 Mon Sep 17 00:00:00 2001 From: Marek Zbroch Date: Thu, 3 Apr 2025 08:05:28 +0200 Subject: [PATCH 1/8] marek prototype for discovery --- nautobot_device_onboarding/jobs.py | 162 ++++++++++++++++++++++++++++- 1 file changed, 161 insertions(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 60ca84bd..25f28f90 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -33,8 +33,9 @@ SecretsGroup, SecretsGroupAssociation, Status, + Tag, ) -from nautobot.ipam.models import Namespace +from nautobot.ipam.models import Namespace, Prefix from nautobot_plugin_nornir.constants import NORNIR_SETTINGS from nautobot_ssot.jobs.base import DataSource from nornir import InitNornir @@ -61,6 +62,10 @@ from nautobot_device_onboarding.nornir_plays.processor import TroubleshootingProcessor from nautobot_device_onboarding.utils.helper import onboarding_task_fqdn_to_ip +from netmiko import SSHDetect +from scapy.all import IP, TCP, sr1, sr, conf + + InventoryPluginRegister.register("empty-inventory", EmptyInventory) PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] @@ -837,10 +842,165 @@ def run(self, *args, **kwargs): # pragma: no cover return f"Successfully ran the following commands: {', '.join(list(compiled_results.keys()))}" +class DeviceOnboardingDiscoveryJob(Job): + """Job to Discover Network Devices and queue for actual Onboarding.""" + + prefix_tag = ObjectVar(model=Tag, required=True) + secrets_groups = MultiObjectVar( + model=SecretsGroup, + required=True, + description="SecretsGroup for device connection credentials.", + ) + + class Meta: + """Meta object.""" + + name = "Discovers devices within networks and runs onboarding." + description = "Scan network prefixes and onboard devices." + has_sensitive_variables = False + hidden = False + + def _get_active_targets(self): + active_targets = set() + autodiscovery_prefixes = Prefix.objects.filter(tags__in=[self.prefix_tag]) + + # TODO(mzb): Prevent already existing devices from being scanned + + # Send SYN packet to target IPs and SSH port + syn_packet = IP(dst=[str(p) for p in autodiscovery_prefixes]) / TCP(dport=22, flags="S") + + # Send packet and wait for a response + response = sr(syn_packet, timeout=3, verbose=1)[0] + + # Check if there is a response + if response: + for sent, received in response: + # Check if the response is a SYN-ACK (port is open) + if received.haslayer(TCP) and received[TCP].flags == 18: # SYN-ACK flag is 0x12, or 18 in decimal + self.logger.info(f"Port {sent.dst}:{sent.dport} is OPEN") + active_targets.add(sent.dst) + # Check if the response is an RST (port is closed) + elif received.haslayer(TCP) and received[TCP].flags & 0x04 == 4: # RST flag is 0x04, or 4 in decimal + self.logger.info(f"Port {sent.dst}:{sent.dport} is CLOSED") + # else: + # print(f"Port {target_port} is filtered (no response)") + + return active_targets + + def _parse_credentials(self, credentials): + """Parse and return dictionary of credentials.""" + self.logger.info("Attempting to parse credentials from selected SecretGroup") + try: + self.username = credentials.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ) + self.password = credentials.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + ) + except SecretsGroupAssociation.DoesNotExist as err: + self.logger.exception( + "Unable to use SecretsGroup selected, ensure Access Type is set to Generic & at minimum Username & Password types are set." + ) + raise OnboardException("fail-credentials - Unable to parse selected credentials.") from err + + def _guess_netmiko_device_type(self, hostname): + """Guess the device type of host, based on Netmiko.""" + netmiko_optional_args = {"port": 22} # , **NETMIKO_EXTRAS} + guessed_device_type = None + + working_credentials = None + + for secret_group in self.secrets_groups: + self._parse_credentials(credentials=secret_group) + guessed_exc = None + + remote_device = { + "device_type": "autodetect", + "host": hostname, + "username": self.username, + "password": self.password, + **netmiko_optional_args, + } + + try: + guesser = SSHDetect(**remote_device) + guessed_device_type = guesser.autodetect() + working_credentials = secret_group + + break + + except Exception as err: # pylint: disable=broad-exception-caught + guessed_device_type = None + guessed_exc = err + + return guessed_device_type, guessed_exc, working_credentials + + def run(self, prefix_tag, secrets_groups, *args, **kwargs): # pragma: no cover + """Process discovering devices.""" + self.prefix_tag = prefix_tag + self.secrets_groups = secrets_groups + + onboarding_requests = [] + for active_target in self._get_active_targets(): + guessed_device_type, guessed_exc, working_credentials = self._guess_netmiko_device_type(hostname=active_target) + if working_credentials and guessed_exc is None: + onboarding_requests.append([active_target, working_credentials]) + + return onboarding_requests + + def on_success(self, retval, task_id, args, kwargs): + """Success handler. + + Run by the worker if the task executes successfully. + + Arguments: + retval (Any): The return value of the task. + task_id (str): Unique id of the executed task. + args (Tuple): Original arguments for the executed task. + kwargs (Dict): Original keyword arguments for the executed task. + + Returns: + (None): The return value of this handler is ignored. + """ + from nautobot.extras.models import Job, JobResult + from django.contrib.auth import get_user_model + User = get_user_model() + try: + user = User.objects.get(username="admin") + except User.DoesNotExist as exc: + raise CommandError("No such user") from exc + for val in retval: + data = { + "location": Location.objects.get(name="Temp").id, + "namespace": Namespace.objects.first().id, + "ip_addresses": val[0], + "port": 22, + "timeout": 10, + "device_role": Role.objects.get(name="Generic").id, + "device_status": Status.objects.get(name="Active").id, + "ip_address_status": Status.objects.get(name="Active").id, + "secrets_group": val[1].pk, + 'dryrun': False, + 'memory_profiling': False, + 'debug': False, + 'csv_file': None, + 'set_mgmt_only': False, + 'update_devices_without_primary_ip': True, + 'interface_status': Status.objects.get(name="Active").id, + 'platform': None, + "connectivity_test": False, + } + job = Job.objects.get(name="Sync Devices From Network") + JobResult.enqueue_job(job, user, **data) + + jobs = [ OnboardingTask, SSOTSyncDevices, SSOTSyncNetworkData, DeviceOnboardingTroubleshootingJob, + DeviceOnboardingDiscoveryJob, ] register_jobs(*jobs) From 39682e64aaf728f43411bbacf2fbced57bb095e3 Mon Sep 17 00:00:00 2001 From: Marek Zbroch Date: Thu, 3 Apr 2025 11:47:14 +0200 Subject: [PATCH 2/8] marek prototype for discovery --- nautobot_device_onboarding/jobs.py | 37 ++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 25f28f90..ba1d1635 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -887,6 +887,43 @@ def _get_active_targets(self): return active_targets + def _test(self): + for prefix in self.prefixes: + network = ipaddress.ip_network(prefix.prefix) + + # Get a list of all IPs in the subnet + ip_list = [str(ip) for ip in network.hosts()] + + # Use a ThreadPoolExecutor to scan IPs concurrently + with concurrent.futures.ThreadPoolExecutor(max_workers=self.num_threads) as executor: + future_to_ip = {} + + for ip in ip_list: + if self.debug: + self.logger.debug(f"Starting scan for IP: {ip}") + + # Submit the scanning task to the executor + future = executor.submit( + self.scan_ip, + ip, + ipaddress.ip_network(prefix.prefix), + self.ports, + self.rdns_lookup, + self.os_identification, + ) + future_to_ip[future] = ip + + for future in concurrent.futures.as_completed(future_to_ip): + ip = future_to_ip[future] + try: + ip_result = future.result() + results.update(ip_result) + except Exception as e: + self.logger.error(f"Error with future for IP {ip}: {e}") + + if self.debug: + self.logger.info(f"Results: {results}") + def _parse_credentials(self, credentials): """Parse and return dictionary of credentials.""" self.logger.info("Attempting to parse credentials from selected SecretGroup") From ade20dec3728537278474f34e8d9d694764216b6 Mon Sep 17 00:00:00 2001 From: Marek Zbroch Date: Fri, 4 Apr 2025 09:24:03 +0200 Subject: [PATCH 3/8] marek prototype for discovery --- nautobot_device_onboarding/choices.py | 14 ++++++++++ nautobot_device_onboarding/jobs.py | 37 ++++++++++++++++++++++++--- 2 files changed, 47 insertions(+), 4 deletions(-) diff --git a/nautobot_device_onboarding/choices.py b/nautobot_device_onboarding/choices.py index 24741fe1..0411d53a 100755 --- a/nautobot_device_onboarding/choices.py +++ b/nautobot_device_onboarding/choices.py @@ -5,3 +5,17 @@ ("sync_network_data", "Sync Network Data"), ("both", "Both"), ) + + +class AutodiscoveryProtocolTypeChoices(ChoiceSet): + """Auto Discovery Protocol Type Choices.""" + + SSH = "ssh" + + CHOICES = ( + (SSH, "ssh"), + ) + + AUTODISCOVERY_PORTS = ( + (SSH, [22]), + ) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index ba1d1635..0d601c73 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -41,7 +41,8 @@ from nornir import InitNornir from nornir.core.plugins.inventory import InventoryPluginRegister -from nautobot_device_onboarding.choices import SSOT_JOB_TO_COMMAND_CHOICE +from nautobot_device_onboarding.choices import SSOT_JOB_TO_COMMAND_CHOICE, AutodiscoveryProtocolTypeChoices +from nautobot_device_onboarding.constants import AUTODISCOVERY_PORTS_SSH from nautobot_device_onboarding.diffsync.adapters.sync_devices_adapters import ( SyncDevicesNautobotAdapter, SyncDevicesNetworkAdapter, @@ -887,8 +888,34 @@ def _get_active_targets(self): return active_targets + def _probe_ssh(self, ip_address): + """SSH specific test. """ + from netutils.ping import tcp_ping # TODO(mzb): move + ssh_port_results = {} + + for ssh_port in AUTODISCOVERY_PORTS_SSH: + ssh_port_results[ssh_port] = tcp_ping(ip_address, ssh_port) + + return ssh_port_results + + def _probe_target(self, ip_address, protocols): + """Test if $ports are open on $ip_address. + Returns: + {$protocol_name: {port_number: bool} + {"ssh": {22: True}} + """ + result = {} + + for protocol in protocols: + if protocol == AutodiscoveryProtocolTypeChoices.SSH: + result["ssh"] = self._probe_ssh(ip_address=ip_address) # TODO(mzb): getattr for monkey patching + + return result + def _test(self): - for prefix in self.prefixes: + results = {} + + for prefix in self.prefixes: # Hackaton contribution by Hanlin and team. network = ipaddress.ip_network(prefix.prefix) # Get a list of all IPs in the subnet @@ -904,7 +931,7 @@ def _test(self): # Submit the scanning task to the executor future = executor.submit( - self.scan_ip, + self._probe_target, ip, ipaddress.ip_network(prefix.prefix), self.ports, @@ -976,9 +1003,11 @@ def _guess_netmiko_device_type(self, hostname): def run(self, prefix_tag, secrets_groups, *args, **kwargs): # pragma: no cover """Process discovering devices.""" - self.prefix_tag = prefix_tag self.secrets_groups = secrets_groups + # TODO(mzb): Initial filtering is with tags, can extend to Location/Role/Status too. + self.prefixes = Prefix.objects.filter(tags__in=[prefix_tag]) + onboarding_requests = [] for active_target in self._get_active_targets(): guessed_device_type, guessed_exc, working_credentials = self._guess_netmiko_device_type(hostname=active_target) From 11b1f5c4305edc4968e6717611a12e2d110a19d7 Mon Sep 17 00:00:00 2001 From: Marek Zbroch Date: Fri, 4 Apr 2025 09:36:38 +0200 Subject: [PATCH 4/8] marek prototype for discovery --- nautobot_device_onboarding/jobs.py | 34 ++++++++++++++++++------------ 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 0d601c73..14353f2a 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -1,7 +1,10 @@ # pylint: disable=attribute-defined-outside-init """Device Onboarding Jobs.""" + +import concurrent.futures import csv +import ipaddress import json import logging from io import StringIO @@ -16,6 +19,7 @@ FileVar, IntegerVar, Job, + MultiChoiceVar, MultiObjectVar, ObjectVar, StringVar, @@ -847,6 +851,7 @@ class DeviceOnboardingDiscoveryJob(Job): """Job to Discover Network Devices and queue for actual Onboarding.""" prefix_tag = ObjectVar(model=Tag, required=True) + protocols = MultiChoiceVar(choices=AutodiscoveryProtocolTypeChoices, required=True) secrets_groups = MultiObjectVar( model=SecretsGroup, required=True, @@ -922,7 +927,7 @@ def _test(self): ip_list = [str(ip) for ip in network.hosts()] # Use a ThreadPoolExecutor to scan IPs concurrently - with concurrent.futures.ThreadPoolExecutor(max_workers=self.num_threads) as executor: + with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor: future_to_ip = {} for ip in ip_list: @@ -933,24 +938,24 @@ def _test(self): future = executor.submit( self._probe_target, ip, - ipaddress.ip_network(prefix.prefix), - self.ports, - self.rdns_lookup, - self.os_identification, + self.protocols ) future_to_ip[future] = ip for future in concurrent.futures.as_completed(future_to_ip): ip = future_to_ip[future] try: - ip_result = future.result() - results.update(ip_result) + # ip_result = future.result() + # results.update(ip_result) + results[ip] = future.result() except Exception as e: self.logger.error(f"Error with future for IP {ip}: {e}") if self.debug: self.logger.info(f"Results: {results}") + return results + def _parse_credentials(self, credentials): """Parse and return dictionary of credentials.""" self.logger.info("Attempting to parse credentials from selected SecretGroup") @@ -1001,18 +1006,21 @@ def _guess_netmiko_device_type(self, hostname): return guessed_device_type, guessed_exc, working_credentials - def run(self, prefix_tag, secrets_groups, *args, **kwargs): # pragma: no cover + def run(self, prefix_tag, secrets_groups, protocols, *args, **kwargs): # pragma: no cover """Process discovering devices.""" self.secrets_groups = secrets_groups # TODO(mzb): Initial filtering is with tags, can extend to Location/Role/Status too. self.prefixes = Prefix.objects.filter(tags__in=[prefix_tag]) + self.protocols = protocols + + self._test() - onboarding_requests = [] - for active_target in self._get_active_targets(): - guessed_device_type, guessed_exc, working_credentials = self._guess_netmiko_device_type(hostname=active_target) - if working_credentials and guessed_exc is None: - onboarding_requests.append([active_target, working_credentials]) + # onboarding_requests = [] + # for active_target in self._get_active_targets(): + # guessed_device_type, guessed_exc, working_credentials = self._guess_netmiko_device_type(hostname=active_target) + # if working_credentials and guessed_exc is None: + # onboarding_requests.append([active_target, working_credentials]) return onboarding_requests From d384675b19b5e863fbad01f43b82af532f060d5f Mon Sep 17 00:00:00 2001 From: Marek Zbroch Date: Tue, 8 Apr 2025 12:23:46 +0200 Subject: [PATCH 5/8] marek prototype for discovery --- nautobot_device_onboarding/choices.py | 6 +- nautobot_device_onboarding/constants.py | 8 + nautobot_device_onboarding/jobs.py | 207 ++++++++++++++---------- 3 files changed, 128 insertions(+), 93 deletions(-) diff --git a/nautobot_device_onboarding/choices.py b/nautobot_device_onboarding/choices.py index 0411d53a..1942cb32 100755 --- a/nautobot_device_onboarding/choices.py +++ b/nautobot_device_onboarding/choices.py @@ -1,4 +1,6 @@ """Choices used througout the app.""" +from nautobot.core.choices import ChoiceSet + SSOT_JOB_TO_COMMAND_CHOICE = ( ("sync_devices", "Sync Devices"), @@ -15,7 +17,3 @@ class AutodiscoveryProtocolTypeChoices(ChoiceSet): CHOICES = ( (SSH, "ssh"), ) - - AUTODISCOVERY_PORTS = ( - (SSH, [22]), - ) diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index ffc8b209..8088862b 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -1,6 +1,10 @@ """Constants for nautobot_device_onboarding app.""" from django.conf import settings +from netutils.data_files.protocol_mappings import PROTOCOLS as NETUTILS_PROTOCOLS + +from .choices import AutodiscoveryProtocolTypeChoices + NETMIKO_EXTRAS = ( settings.PLUGINS_CONFIG.get("nautobot_plugin_nornir", {}) @@ -50,3 +54,7 @@ # The git repository data source folder name for custom command mappers. ONBOARDING_COMMAND_MAPPERS_REPOSITORY_FOLDER = "onboarding_command_mappers" + +AUTODISCOVERY_PORTS = { + AutodiscoveryProtocolTypeChoices.SSH: [NETUTILS_PROTOCOLS["ssh"]["port_number"]], +} diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 14353f2a..46ded501 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -41,12 +41,13 @@ ) from nautobot.ipam.models import Namespace, Prefix from nautobot_plugin_nornir.constants import NORNIR_SETTINGS +from netutils.ping import tcp_ping from nautobot_ssot.jobs.base import DataSource from nornir import InitNornir from nornir.core.plugins.inventory import InventoryPluginRegister from nautobot_device_onboarding.choices import SSOT_JOB_TO_COMMAND_CHOICE, AutodiscoveryProtocolTypeChoices -from nautobot_device_onboarding.constants import AUTODISCOVERY_PORTS_SSH +from nautobot_device_onboarding.constants import AUTODISCOVERY_PORTS from nautobot_device_onboarding.diffsync.adapters.sync_devices_adapters import ( SyncDevicesNautobotAdapter, SyncDevicesNetworkAdapter, @@ -849,6 +850,10 @@ def run(self, *args, **kwargs): # pragma: no cover class DeviceOnboardingDiscoveryJob(Job): """Job to Discover Network Devices and queue for actual Onboarding.""" + debug = BooleanVar( + default=False, + description="Enable for more verbose logging.", + ) prefix_tag = ObjectVar(model=Tag, required=True) protocols = MultiChoiceVar(choices=AutodiscoveryProtocolTypeChoices, required=True) @@ -857,6 +862,16 @@ class DeviceOnboardingDiscoveryJob(Job): required=True, description="SecretsGroup for device connection credentials.", ) + scanning_threads_count = IntegerVar( + description="Number of IPs to scan at a time.", + label="Number of Threads", + default=8, + ) + login_threads_count = IntegerVar( + description="Number of simultaneous SSH logins.", + label="Number of sim. SSH logins. Good luck ISE.", + default=2, + ) class Meta: """Meta object.""" @@ -866,95 +881,51 @@ class Meta: has_sensitive_variables = False hidden = False - def _get_active_targets(self): - active_targets = set() - autodiscovery_prefixes = Prefix.objects.filter(tags__in=[self.prefix_tag]) - - # TODO(mzb): Prevent already existing devices from being scanned - - # Send SYN packet to target IPs and SSH port - syn_packet = IP(dst=[str(p) for p in autodiscovery_prefixes]) / TCP(dport=22, flags="S") - - # Send packet and wait for a response - response = sr(syn_packet, timeout=3, verbose=1)[0] - - # Check if there is a response - if response: - for sent, received in response: - # Check if the response is a SYN-ACK (port is open) - if received.haslayer(TCP) and received[TCP].flags == 18: # SYN-ACK flag is 0x12, or 18 in decimal - self.logger.info(f"Port {sent.dst}:{sent.dport} is OPEN") - active_targets.add(sent.dst) - # Check if the response is an RST (port is closed) - elif received.haslayer(TCP) and received[TCP].flags & 0x04 == 4: # RST flag is 0x04, or 4 in decimal - self.logger.info(f"Port {sent.dst}:{sent.dport} is CLOSED") - # else: - # print(f"Port {target_port} is filtered (no response)") - - return active_targets - - def _probe_ssh(self, ip_address): - """SSH specific test. """ - from netutils.ping import tcp_ping # TODO(mzb): move - ssh_port_results = {} - - for ssh_port in AUTODISCOVERY_PORTS_SSH: - ssh_port_results[ssh_port] = tcp_ping(ip_address, ssh_port) + def _scan_target_ip(self, target_ip, protocols): + """Scan target IP Address for open protocol-ports.""" + return {protocol: getattr(self, f"_scan_target_{protocol}")(target_ip=target_ip) for protocol in protocols} - return ssh_port_results + def _scan_target_ssh(self, target_ip): + """Scan target IP address for TCP-SSH ports.""" + ssh_targets = [] - def _probe_target(self, ip_address, protocols): - """Test if $ports are open on $ip_address. - Returns: - {$protocol_name: {port_number: bool} - {"ssh": {22: True}} - """ - result = {} - - for protocol in protocols: - if protocol == AutodiscoveryProtocolTypeChoices.SSH: - result["ssh"] = self._probe_ssh(ip_address=ip_address) # TODO(mzb): getattr for monkey patching - - return result + for target_ssh_port in AUTODISCOVERY_PORTS[AutodiscoveryProtocolTypeChoices.SSH]: + if tcp_ping(target_ip, target_ssh_port): # Report only opened ports. + open_ssh_port = { + "port": target_ssh_port, + "is_open": True, + "protocol": AutodiscoveryProtocolTypeChoices.SSH, + } - def _test(self): - results = {} + ssh_targets.append(open_ssh_port) - for prefix in self.prefixes: # Hackaton contribution by Hanlin and team. - network = ipaddress.ip_network(prefix.prefix) + return ssh_targets - # Get a list of all IPs in the subnet - ip_list = [str(ip) for ip in network.hosts()] + def _scan(self): + """Scan the selected IP Addresses for open protocol-ports.""" + scan_result = {} - # Use a ThreadPoolExecutor to scan IPs concurrently - with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor: - future_to_ip = {} + with concurrent.futures.ThreadPoolExecutor(max_workers=self.scanning_threads_count) as executor: + future_to_ip = {} - for ip in ip_list: - if self.debug: - self.logger.debug(f"Starting scan for IP: {ip}") + for target_ip in self.targets: + if self.debug: + self.logger.debug(f"Starting scan for IP: {target_ip}") - # Submit the scanning task to the executor - future = executor.submit( - self._probe_target, - ip, - self.protocols - ) - future_to_ip[future] = ip + future = executor.submit(self._scan_target_ip, target_ip, self.protocols) + future_to_ip[future] = target_ip - for future in concurrent.futures.as_completed(future_to_ip): - ip = future_to_ip[future] - try: - # ip_result = future.result() - # results.update(ip_result) - results[ip] = future.result() - except Exception as e: - self.logger.error(f"Error with future for IP {ip}: {e}") + for future in concurrent.futures.as_completed(future_to_ip): + target_ip = future_to_ip[future] + try: + scan_result[target_ip] = future.result() + except Exception as e: + self.logger.error(f"Error with future for IP {target_ip}: {e}") if self.debug: - self.logger.info(f"Results: {results}") + self.logger.info(f"Results: {scan_result}") - return results + return scan_result def _parse_credentials(self, credentials): """Parse and return dictionary of credentials.""" @@ -974,12 +945,11 @@ def _parse_credentials(self, credentials): ) raise OnboardException("fail-credentials - Unable to parse selected credentials.") from err - def _guess_netmiko_device_type(self, hostname): + def _guess_netmiko_device_type(self, hostname, port): """Guess the device type of host, based on Netmiko.""" - netmiko_optional_args = {"port": 22} # , **NETMIKO_EXTRAS} + netmiko_optional_args = {"port": port} # , **NETMIKO_EXTRAS} guessed_device_type = None - - working_credentials = None + valid_credentials = None for secret_group in self.secrets_groups: self._parse_credentials(credentials=secret_group) @@ -996,7 +966,7 @@ def _guess_netmiko_device_type(self, hostname): try: guesser = SSHDetect(**remote_device) guessed_device_type = guesser.autodetect() - working_credentials = secret_group + valid_credentials = secret_group break @@ -1004,17 +974,76 @@ def _guess_netmiko_device_type(self, hostname): guessed_device_type = None guessed_exc = err - return guessed_device_type, guessed_exc, working_credentials + return guessed_device_type, guessed_exc, valid_credentials + + def _get_target_details(self, target_ip, target_port_details): + """Get target IP and protocol-port details.""" + target_details = {**target_port_details} + target_details["ip"] = target_ip + + # Dispatch SSH Manually + if target_port_details["protocol"] == AutodiscoveryProtocolTypeChoices.SSH: + guessed_device_type, guessed_exc, valid_credentials = self._guess_netmiko_device_type( + hostname=target_ip, + port=target_details["port"] + ) + target_details["platform"] = guessed_device_type + target_details["exception"] = guessed_exc + target_details["valid_credentials"] = valid_credentials + + return target_details + + def _get_targets_details(self, scan_result): + """Get target IPs details and find valid credentials for an open protocol-port.""" + results = {} + + # Use a ThreadPoolExecutor to scan IPs concurrently + with concurrent.futures.ThreadPoolExecutor(max_workers=self.login_threads_count) as executor: + future_to_ip = {} + + for target_ip in scan_result: + for target_port_details in scan_result[target_ip]: + target_port = target_port_details["port"] + if self.debug: + self.logger.debug(f"Starting get_details for IP: {target_ip}:{target_port}") + + future = executor.submit(self._get_target_details, target_ip, target_port_details) + future_to_ip[future] = f"{target_ip}:{target_port}" - def run(self, prefix_tag, secrets_groups, protocols, *args, **kwargs): # pragma: no cover + for future in concurrent.futures.as_completed(future_to_ip): + host = future_to_ip[future] + try: + results[host] = future.result() + except Exception as e: + self.logger.error(f"Error with future for IP {host}: {e}") + + if self.debug: + self.logger.info(f"Results: {results}") + + return results + + def run(self, debug, num_threads, num_ssh_threads, prefix_tag, secrets_groups, protocols, *args, **kwargs): # pragma: no cover """Process discovering devices.""" + self.debug = debug self.secrets_groups = secrets_groups + self.scanning_threads_count = num_threads + self.login_threads_count = num_ssh_threads + + # TODO(mzb): Introduce "skip" / blacklist tag too. + # TODO(mzb): Skip devices with primary_ip4 set. + self.targets = set() + for prefix in Prefix.objects.filter(tags__in=[prefix_tag]): # TODO(mzb): Initial filtering is with tags, can extend to Location/Role/Status too. + network = ipaddress.ip_network(prefix.prefix) + # Get a list of all IPs in the subnet + for ip in network.hosts(): + self.targets.add(str(ip)) - # TODO(mzb): Initial filtering is with tags, can extend to Location/Role/Status too. - self.prefixes = Prefix.objects.filter(tags__in=[prefix_tag]) self.protocols = protocols - self._test() + scan_result = self._scan() + ssh_result = self._get_targets_details(scan_result) + + return ssh_result # onboarding_requests = [] # for active_target in self._get_active_targets(): @@ -1022,7 +1051,7 @@ def run(self, prefix_tag, secrets_groups, protocols, *args, **kwargs): # pragma # if working_credentials and guessed_exc is None: # onboarding_requests.append([active_target, working_credentials]) - return onboarding_requests + # return onboarding_requests def on_success(self, retval, task_id, args, kwargs): """Success handler. From e22c7d773361408834423dbaf4931a10feb06b98 Mon Sep 17 00:00:00 2001 From: Marek Zbroch Date: Tue, 8 Apr 2025 19:40:59 +0200 Subject: [PATCH 6/8] marek prototype for discovery --- nautobot_device_onboarding/jobs.py | 150 +++++++++++++++++------------ 1 file changed, 88 insertions(+), 62 deletions(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 46ded501..2c9f378c 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -11,6 +11,7 @@ from diffsync.enum import DiffSyncFlags from django.conf import settings +from django.contrib.auth import get_user_model from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ObjectDoesNotExist, ValidationError from nautobot.apps.jobs import ( @@ -33,6 +34,8 @@ ) from nautobot.extras.models import ( CustomField, + Job, + JobResult, Role, SecretsGroup, SecretsGroupAssociation, @@ -854,9 +857,14 @@ class DeviceOnboardingDiscoveryJob(Job): default=False, description="Enable for more verbose logging.", ) - - prefix_tag = ObjectVar(model=Tag, required=True) - protocols = MultiChoiceVar(choices=AutodiscoveryProtocolTypeChoices, required=True) + prefix_tag = ObjectVar( + model=Tag, + required=True, + ) + protocols = MultiChoiceVar( + choices=AutodiscoveryProtocolTypeChoices, + required=True, + ) secrets_groups = MultiObjectVar( model=SecretsGroup, required=True, @@ -869,9 +877,15 @@ class DeviceOnboardingDiscoveryJob(Job): ) login_threads_count = IntegerVar( description="Number of simultaneous SSH logins.", - label="Number of sim. SSH logins. Good luck ISE.", + label="Number of sim. SSH logins..", default=2, ) + location = SSOTSyncNetworkData.location + namespace = SSOTSyncDevices.namespace + device_role = SSOTSyncDevices.device_role + device_status = SSOTSyncDevices.device_status + interface_status = SSOTSyncDevices.interface_status + ip_address_status = SSOTSyncDevices.ip_address_status class Meta: """Meta object.""" @@ -883,14 +897,21 @@ class Meta: def _scan_target_ip(self, target_ip, protocols): """Scan target IP Address for open protocol-ports.""" - return {protocol: getattr(self, f"_scan_target_{protocol}")(target_ip=target_ip) for protocol in protocols} + open_ports = [] + for protocol in protocols: + open_ports.extend(getattr(self, f"_scan_target_{protocol}")(target_ip=target_ip)) + + return open_ports def _scan_target_ssh(self, target_ip): """Scan target IP address for TCP-SSH ports.""" ssh_targets = [] for target_ssh_port in AUTODISCOVERY_PORTS[AutodiscoveryProtocolTypeChoices.SSH]: + self.logger.info(target_ssh_port) if tcp_ping(target_ip, target_ssh_port): # Report only opened ports. + self.logger.info(target_ssh_port) + open_ssh_port = { "port": target_ssh_port, "is_open": True, @@ -902,7 +923,7 @@ def _scan_target_ssh(self, target_ip): return ssh_targets def _scan(self): - """Scan the selected IP Addresses for open protocol-ports.""" + """Scan the selected IP Addresses for open protocol-ports - dispatcher method.""" scan_result = {} with concurrent.futures.ThreadPoolExecutor(max_workers=self.scanning_threads_count) as executor: @@ -929,75 +950,78 @@ def _scan(self): def _parse_credentials(self, credentials): """Parse and return dictionary of credentials.""" - self.logger.info("Attempting to parse credentials from selected SecretGroup") + if self.debug: + self.logger.debug("Attempting to parse credentials from selected SecretGroup") + try: - self.username = credentials.get_secret_value( + username = credentials.get_secret_value( access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, ) - self.password = credentials.get_secret_value( + password = credentials.get_secret_value( access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, ) + return username, password except SecretsGroupAssociation.DoesNotExist as err: self.logger.exception( "Unable to use SecretsGroup selected, ensure Access Type is set to Generic & at minimum Username & Password types are set." ) raise OnboardException("fail-credentials - Unable to parse selected credentials.") from err - def _guess_netmiko_device_type(self, hostname, port): - """Guess the device type of host, based on Netmiko.""" + def _get_target_details_ssh(self, hostname, port): + """Guess the device platform of host, based on SSH.""" netmiko_optional_args = {"port": port} # , **NETMIKO_EXTRAS} - guessed_device_type = None + guessed_platform = None valid_credentials = None for secret_group in self.secrets_groups: - self._parse_credentials(credentials=secret_group) - guessed_exc = None + username, password = _parse_credentials(credentials=secret_group) + exception = None remote_device = { "device_type": "autodetect", "host": hostname, - "username": self.username, - "password": self.password, + "username": username, + "password": password, **netmiko_optional_args, } try: guesser = SSHDetect(**remote_device) - guessed_device_type = guesser.autodetect() + guessed_platform = guesser.autodetect() valid_credentials = secret_group break except Exception as err: # pylint: disable=broad-exception-caught - guessed_device_type = None - guessed_exc = err + guessed_platform = None + valid_credentials = None + exception = err - return guessed_device_type, guessed_exc, valid_credentials + return guessed_platform, exception, valid_credentials def _get_target_details(self, target_ip, target_port_details): """Get target IP and protocol-port details.""" target_details = {**target_port_details} target_details["ip"] = target_ip - # Dispatch SSH Manually + # Dispatch SSH if target_port_details["protocol"] == AutodiscoveryProtocolTypeChoices.SSH: - guessed_device_type, guessed_exc, valid_credentials = self._guess_netmiko_device_type( + guessed_platform, exception, credentials = self._get_target_details_ssh( hostname=target_ip, port=target_details["port"] ) - target_details["platform"] = guessed_device_type - target_details["exception"] = guessed_exc - target_details["valid_credentials"] = valid_credentials + target_details["platform"] = guessed_platform + target_details["exception"] = exception + target_details["credentials"] = credentials return target_details def _get_targets_details(self, scan_result): - """Get target IPs details and find valid credentials for an open protocol-port.""" + """Get target IPs details and find valid credentials for an open protocol-port - dispatcher method.""" results = {} - # Use a ThreadPoolExecutor to scan IPs concurrently with concurrent.futures.ThreadPoolExecutor(max_workers=self.login_threads_count) as executor: future_to_ip = {} @@ -1022,12 +1046,35 @@ def _get_targets_details(self, scan_result): return results - def run(self, debug, num_threads, num_ssh_threads, prefix_tag, secrets_groups, protocols, *args, **kwargs): # pragma: no cover + def run(self, + debug, + scanning_threads_count, + login_threads_count, + prefix_tag, + secrets_groups, + protocols, + location, + namespace, + device_role, + device_status, + interface_status, + ip_address_status, + *args, + **kwargs + ): # pragma: no cover """Process discovering devices.""" self.debug = debug self.secrets_groups = secrets_groups - self.scanning_threads_count = num_threads - self.login_threads_count = num_ssh_threads + self.scanning_threads_count = scanning_threads_count + self.login_threads_count = login_threads_count + + # Pass through to onboarding task + self.location = location + self.namespace = namespace + self.device_role = device_role + self.device_status = device_status + self.interface_status = interface_status + self.ip_address_status = ip_address_status # TODO(mzb): Introduce "skip" / blacklist tag too. # TODO(mzb): Skip devices with primary_ip4 set. @@ -1041,34 +1088,13 @@ def run(self, debug, num_threads, num_ssh_threads, prefix_tag, secrets_groups, p self.protocols = protocols scan_result = self._scan() + self.logger.info(scan_result) ssh_result = self._get_targets_details(scan_result) + self.logger.info(ssh_result) return ssh_result - # onboarding_requests = [] - # for active_target in self._get_active_targets(): - # guessed_device_type, guessed_exc, working_credentials = self._guess_netmiko_device_type(hostname=active_target) - # if working_credentials and guessed_exc is None: - # onboarding_requests.append([active_target, working_credentials]) - - # return onboarding_requests - def on_success(self, retval, task_id, args, kwargs): - """Success handler. - - Run by the worker if the task executes successfully. - - Arguments: - retval (Any): The return value of the task. - task_id (str): Unique id of the executed task. - args (Tuple): Original arguments for the executed task. - kwargs (Dict): Original keyword arguments for the executed task. - - Returns: - (None): The return value of this handler is ignored. - """ - from nautobot.extras.models import Job, JobResult - from django.contrib.auth import get_user_model User = get_user_model() try: user = User.objects.get(username="admin") @@ -1076,22 +1102,22 @@ def on_success(self, retval, task_id, args, kwargs): raise CommandError("No such user") from exc for val in retval: data = { - "location": Location.objects.get(name="Temp").id, - "namespace": Namespace.objects.first().id, - "ip_addresses": val[0], - "port": 22, + "location": self.location.id, + "namespace": self.namespace.id, + "ip_addresses": retval[val]["ip"], + "port": retval[val]["port"].id, "timeout": 10, - "device_role": Role.objects.get(name="Generic").id, - "device_status": Status.objects.get(name="Active").id, - "ip_address_status": Status.objects.get(name="Active").id, - "secrets_group": val[1].pk, + "device_role": self.device_role.id, + "device_status": self.device_status.id, + "ip_address_status": self.ip_address_status.id, + "secrets_group": retval[val]["credentials"].id, 'dryrun': False, 'memory_profiling': False, 'debug': False, 'csv_file': None, 'set_mgmt_only': False, 'update_devices_without_primary_ip': True, - 'interface_status': Status.objects.get(name="Active").id, + 'interface_status': self.interface_status.id, 'platform': None, "connectivity_test": False, } From bd2c77fd0a2962d2f8b0f1cbca9f4beed3abdebe Mon Sep 17 00:00:00 2001 From: Marek Zbroch Date: Wed, 9 Apr 2025 21:01:36 +0200 Subject: [PATCH 7/8] marek prototype for discovery --- nautobot_device_onboarding/jobs.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 2c9f378c..67c7078c 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -72,7 +72,6 @@ from nautobot_device_onboarding.utils.helper import onboarding_task_fqdn_to_ip from netmiko import SSHDetect -from scapy.all import IP, TCP, sr1, sr, conf InventoryPluginRegister.register("empty-inventory", EmptyInventory) @@ -1063,6 +1062,7 @@ def run(self, **kwargs ): # pragma: no cover """Process discovering devices.""" + self.logger.debug(self) self.debug = debug self.secrets_groups = secrets_groups self.scanning_threads_count = scanning_threads_count @@ -1095,6 +1095,7 @@ def run(self, return ssh_result def on_success(self, retval, task_id, args, kwargs): + """Start Onboarding job for discovered targets.""" User = get_user_model() try: user = User.objects.get(username="admin") From 58ddb95aab9adfd0f4704427231fe3d801919984 Mon Sep 17 00:00:00 2001 From: Marek Zbroch Date: Thu, 10 Apr 2025 13:14:22 +0200 Subject: [PATCH 8/8] marek prototype for discovery --- nautobot_device_onboarding/jobs.py | 71 ++++++++++++++++++------------ 1 file changed, 42 insertions(+), 29 deletions(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 67c7078c..443e9a39 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -34,7 +34,6 @@ ) from nautobot.extras.models import ( CustomField, - Job, JobResult, Role, SecretsGroup, @@ -42,7 +41,7 @@ Status, Tag, ) -from nautobot.ipam.models import Namespace, Prefix +from nautobot.ipam.models import IPAddress, IPAddressToInterface, Namespace, Prefix from nautobot_plugin_nornir.constants import NORNIR_SETTINGS from netutils.ping import tcp_ping from nautobot_ssot.jobs.base import DataSource @@ -856,13 +855,15 @@ class DeviceOnboardingDiscoveryJob(Job): default=False, description="Enable for more verbose logging.", ) - prefix_tag = ObjectVar( - model=Tag, + prefixes = ObjectVar( + model=Prefix, required=True, + description="Prefixes to be searched for devices missing in Nautobot inventory.", ) protocols = MultiChoiceVar( choices=AutodiscoveryProtocolTypeChoices, required=True, + description="Discovery protocols.", ) secrets_groups = MultiObjectVar( model=SecretsGroup, @@ -885,7 +886,9 @@ class DeviceOnboardingDiscoveryJob(Job): device_status = SSOTSyncDevices.device_status interface_status = SSOTSyncDevices.interface_status ip_address_status = SSOTSyncDevices.ip_address_status - + set_mgmt_only = SSOTSyncDevices.set_mgmt_only + update_devices_without_primary_ip = SSOTSyncDevices.update_devices_without_primary_ip + class Meta: """Meta object.""" @@ -1001,7 +1004,7 @@ def _get_target_details_ssh(self, hostname, port): return guessed_platform, exception, valid_credentials def _get_target_details(self, target_ip, target_port_details): - """Get target IP and protocol-port details.""" + """Get open protocol-port details on target IP.""" target_details = {**target_port_details} target_details["ip"] = target_ip @@ -1045,11 +1048,13 @@ def _get_targets_details(self, scan_result): return results - def run(self, - debug, + def run(self, + dryrun, + memory_profiling, + debug, scanning_threads_count, login_threads_count, - prefix_tag, + prefixes, secrets_groups, protocols, location, @@ -1058,12 +1063,18 @@ def run(self, device_status, interface_status, ip_address_status, + set_mgmt_only, + update_devices_without_primary_ip, *args, **kwargs ): # pragma: no cover """Process discovering devices.""" - self.logger.debug(self) + self.dryrun = dryrun + self.memory_profiling = memory_profiling self.debug = debug + + self.prefixes = prefixes + self.protocols = protocols self.secrets_groups = secrets_groups self.scanning_threads_count = scanning_threads_count self.login_threads_count = login_threads_count @@ -1075,17 +1086,23 @@ def run(self, self.device_status = device_status self.interface_status = interface_status self.ip_address_status = ip_address_status + self.set_mgmt_only = set_mgmt_only + self.update_devices_without_primary_ip = update_devices_without_primary_ip # TODO(mzb): Introduce "skip" / blacklist tag too. - # TODO(mzb): Skip devices with primary_ip4 set. - self.targets = set() - for prefix in Prefix.objects.filter(tags__in=[prefix_tag]): # TODO(mzb): Initial filtering is with tags, can extend to Location/Role/Status too. + + self.targets = set() # Ensure IP uniqueness + + for prefix in self.prefixes: network = ipaddress.ip_network(prefix.prefix) + # Get a list of all IPs in the subnet for ip in network.hosts(): - self.targets.add(str(ip)) + # Skip IP Addresses already assigned to an interface + if IPAddressToInterface.objects.filter(ip_address__in=IPAddress.objects.filter(host=ip)): + continue - self.protocols = protocols + self.targets.add(str(ip)) scan_result = self._scan() self.logger.info(scan_result) @@ -1096,11 +1113,7 @@ def run(self, def on_success(self, retval, task_id, args, kwargs): """Start Onboarding job for discovered targets.""" - User = get_user_model() - try: - user = User.objects.get(username="admin") - except User.DoesNotExist as exc: - raise CommandError("No such user") from exc + for val in retval: data = { "location": self.location.id, @@ -1112,18 +1125,18 @@ def on_success(self, retval, task_id, args, kwargs): "device_status": self.device_status.id, "ip_address_status": self.ip_address_status.id, "secrets_group": retval[val]["credentials"].id, - 'dryrun': False, - 'memory_profiling': False, - 'debug': False, - 'csv_file': None, - 'set_mgmt_only': False, - 'update_devices_without_primary_ip': True, - 'interface_status': self.interface_status.id, - 'platform': None, + "dryrun": self.dryrun, + "memory_profiling": self.memory_profiling, + "debug": self.debug, + "csv_file": None, + "set_mgmt_only": self.set_mgmt_only, + "update_devices_without_primary_ip": self.update_devices_without_primary_ip, + "interface_status": self.interface_status.id, + "platform": None, "connectivity_test": False, } job = Job.objects.get(name="Sync Devices From Network") - JobResult.enqueue_job(job, user, **data) + JobResult.enqueue_job(job, self.user, **data) jobs = [