From 22a9d711447a7db2ade624f5b3698251a01c413a Mon Sep 17 00:00:00 2001 From: Adam Byczkowski <38091261+qduk@users.noreply.github.com> Date: Fri, 5 Jan 2024 12:47:14 -0600 Subject: [PATCH 01/47] Working on tests --- development/nautobot_config.py | 1 + docs/admin/integrations/infoblox_setup.md | 37 ++++---- .../integrations/infoblox/constant.py | 1 + .../infoblox/diffsync/adapters/infoblox.py | 4 + .../integrations/infoblox/utils/client.py | 20 +++- .../fixtures/get_network_containers.json | 38 ++++++++ .../fixtures/get_network_containers_ipv6.json | 52 ++++++++++ .../tests/infoblox/fixtures_infoblox.py | 10 ++ nautobot_ssot/tests/infoblox/test_client.py | 24 +++++ .../tests/infoblox/test_infoblox_adapter.py | 95 +++++++++++++++++++ 10 files changed, 261 insertions(+), 21 deletions(-) create mode 100644 nautobot_ssot/tests/infoblox/fixtures/get_network_containers.json create mode 100644 nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6.json create mode 100644 nautobot_ssot/tests/infoblox/test_infoblox_adapter.py diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 07f784400..94e630d8f 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -222,6 +222,7 @@ os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_IP_ADDRESSES") ), "infoblox_import_objects_subnets": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_SUBNETS")), + "infoblox_import_objects_subnets_ipv6": os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_SUBNETS_IPV^", "").split(","), "infoblox_import_objects_vlan_views": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLAN_VIEWS")), "infoblox_import_objects_vlans": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLANS")), "infoblox_import_subnets": os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_SUBNETS", "").split(","), diff --git a/docs/admin/integrations/infoblox_setup.md b/docs/admin/integrations/infoblox_setup.md index e23abc61f..614383ec9 100644 --- a/docs/admin/integrations/infoblox_setup.md +++ b/docs/admin/integrations/infoblox_setup.md @@ -1,6 +1,6 @@ # Infoblox Integration Setup -This guide will walk you through steps to set up Infoblox integration with the `nautobot_ssot` app. +This guide will walk you through the steps to set up Infoblox integration with the `nautobot_ssot` app. ## Prerequisites @@ -14,22 +14,23 @@ pip install nautobot-ssot[infoblox] Integration behavior can be controlled with the following settings: -| Setting | Default | Description | -| ------------------------------------------ | ------- | ------------------------------------------------------------------------ | -| infoblox_url | N/A | URL of the Infoblox instance to sync with. | -| infoblox_username | N/A | The username to authenticate against Infoblox with. | -| infoblox_password | N/A | The password to authenticate against Infblox with. | -| infoblox_verify_ssl | True | Toggle SSL verification when syncing data with Infoblox. | -| infoblox_wapi_version | v2.12 | The version of the Infoblox API. | -| infoblox_enable_sync_to_infoblox | False | Add job to sync data from Nautobot into Infoblox. | -| infoblox_enable_rfc1918_network_containers | False | Add job to sync network containers to Nautobot (top level aggregates). | -| infoblox_default_status | active | Default Status to be assigned to imported objects. | -| infoblox_import_objects_ip_addresses | False | Import IP addresses from Infoblox to Nautobot. | -| infoblox_import_objects_subnets | False | Import subnets from Infoblox to Nautobot. | -| infoblox_import_objects_vlan_views | False | Import VLAN views from Infoblox to Nautobot. | -| infoblox_import_objects_vlans | False | Import VLANs from Infoblox to Nautobot. | -| infoblox_import_subnets | N/A | List of Subnets in CIDR string notation to filter import to. | -| infoblox_network_view | N/A | Only load IPAddresses from a specific Infoblox Network View. | +| Setting | Default | Description | +| ------------------------------------------ | ------- | ---------------------------------------------------------------------- | +| infoblox_url | N/A | URL of the Infoblox instance to sync with. | +| infoblox_username | N/A | The username to authenticate against Infoblox with. | +| infoblox_password | N/A | The password to authenticate against Infblox with. | +| infoblox_verify_ssl | True | Toggle SSL verification when syncing data with Infoblox. | +| infoblox_wapi_version | v2.12 | The version of the Infoblox API. | +| infoblox_enable_sync_to_infoblox | False | Add job to sync data from Nautobot into Infoblox. | +| infoblox_enable_rfc1918_network_containers | False | Add job to sync network containers to Nautobot (top level aggregates). | +| infoblox_default_status | active | Default Status to be assigned to imported objects. | +| infoblox_import_objects_ip_addresses | False | Import IP addresses from Infoblox to Nautobot. | +| infoblox_import_objects_subnets | False | Import subnets from Infoblox to Nautobot. | +| infoblox_import_objects_subnets_ipv6 | False | Import IPv6 subnets from Infoblox to Nautobot. | +| infoblox_import_objects_vlan_views | False | Import VLAN views from Infoblox to Nautobot. | +| infoblox_import_objects_vlans | False | Import VLANs from Infoblox to Nautobot. | +| infoblox_import_subnets | N/A | List of Subnets in CIDR string notation to filter import to. | +| infoblox_network_view | N/A | Only load IPAddresses from a specific Infoblox Network View. | Below is an example snippet from `nautobot_config.py` that demonstrates how to enable and configure Infoblox integration: @@ -46,6 +47,7 @@ PLUGINS_CONFIG = { os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_IP_ADDRESSES") ), "infoblox_import_objects_subnets": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_SUBNETS")), + "infoblox_import_objects_subnets_ipv6": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_SUBNETS_IPV6")), "infoblox_import_objects_vlan_views": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLAN_VIEWS")), "infoblox_import_objects_vlans": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLANS")), "infoblox_import_subnets": os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_SUBNETS", "").split(","), @@ -113,6 +115,7 @@ PLUGINS_CONFIG = { os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_IP_ADDRESSES") ), "infoblox_import_objects_subnets": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_SUBNETS")), + "infoblox_import_objects_subnets_ipv6": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_SUBNETS_IPV6")), "infoblox_import_objects_vlan_views": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLAN_VIEWS")), "infoblox_import_objects_vlans": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLANS")), "infoblox_import_subnets": os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_SUBNETS", "").split(","), diff --git a/nautobot_ssot/integrations/infoblox/constant.py b/nautobot_ssot/integrations/infoblox/constant.py index f4ade4983..33933a32f 100644 --- a/nautobot_ssot/integrations/infoblox/constant.py +++ b/nautobot_ssot/integrations/infoblox/constant.py @@ -20,6 +20,7 @@ def _read_app_config(): "vlan_views": config["infoblox_import_objects_vlan_views"], "vlans": config["infoblox_import_objects_vlans"], "subnets": config["infoblox_import_objects_subnets"], + "subnets_ipv6": config["infoblox_import_objects_subnets_ipv6"], "ip_addresses": config["infoblox_import_objects_ip_addresses"], }, "infoblox_import_subnets": config["infoblox_import_subnets"], diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index e3ef25b88..62cccc6ff 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -71,7 +71,11 @@ def load_prefixes(self): # Need to load containers here to prevent duplicates when syncing back to Infoblox containers = self.conn.get_network_containers() subnets = self.conn.get_all_subnets() + if PLUGIN_CFG["infoblox_import_objects"].get("subnets_ipv6"): + containers += self.conn.get_network_containers(ipv6=True) + subnets += self.conn.get_all_subnets(ipv6=True) all_networks = containers + subnets + raise Exception(f"{containers}") self.subnets = [(x["network"], x["network_view"]) for x in subnets] default_ext_attrs = get_default_ext_attrs(review_list=all_networks) for _pf in all_networks: diff --git a/nautobot_ssot/integrations/infoblox/utils/client.py b/nautobot_ssot/integrations/infoblox/utils/client.py index 0863cb6db..219606dc5 100644 --- a/nautobot_ssot/integrations/infoblox/utils/client.py +++ b/nautobot_ssot/integrations/infoblox/utils/client.py @@ -756,11 +756,12 @@ def get_dhcp_lease_from_hostname(self, hostname): logger.info(response.json) return response.json() - def get_all_subnets(self, prefix: str = None): + def get_all_subnets(self, prefix: str = None, ipv6: bool = False): """Get all Subnets. Args: prefix (str): Network prefix - '10.220.0.0/22' + ipv6 (bool): Whether or not the call should be made for IPv6 subnets. Returns: (list) of record dicts @@ -785,7 +786,11 @@ def get_all_subnets(self, prefix: str = None): }, ] """ - url_path = "network" + if ipv6: + url_path = "ipv6network" + else: + url_path = "network" + params = { "_return_as_object": 1, "_return_fields": "network,network_view,comment,extattrs,rir_organization,rir,vlans", @@ -1261,9 +1266,12 @@ def remove_duplicates(self, network_list: list) -> list: return new_list - def get_network_containers(self, prefix: str = ""): + def get_network_containers(self, prefix: str = "", ipv6: bool = False): """Get all Network Containers. + Args: + prefix (Str): Specific prefix (192.168.0.1/24) + Returns: (list) of record dicts @@ -1279,7 +1287,11 @@ def get_network_containers(self, prefix: str = ""): } ] """ - url_path = "networkcontainer" + if ipv6: + url_path = "ipv6networkcontainer" + else: + url_path = "networkcontainer" + params = { "_return_as_object": 1, "_return_fields": "network,comment,network_view,extattrs,rir_organization,rir", diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_network_containers.json b/nautobot_ssot/tests/infoblox/fixtures/get_network_containers.json new file mode 100644 index 000000000..ef019768d --- /dev/null +++ b/nautobot_ssot/tests/infoblox/fixtures/get_network_containers.json @@ -0,0 +1,38 @@ +{ + "result": [ + { + "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDEwLjYxLjE1LjAvMjQvMA:10.61.15.0/24/NAT", + "comment": "NAT", + "extattrs": { + "IPPlan Last Modified": { + "value": "2011-09-28T11:01:00Z" + }, + "IPPlan UserID": { + "value": "admin" + } + }, + "network": "10.61.15.0/24", + "network_view": "default", + "rir": "NONE", + "status": "container" + }, + { + "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDE3Mi4zMC4zMi4wLzIyLzA:172.30.32.0/22/2%20NAT", + "comment": "NAT", + "extattrs": {}, + "network": "172.30.32.0/22", + "network_view": "default", + "rir": "NONE", + "status": "container" + }, + { + "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDE3Mi4xOS42NS4wLzI0LzA:172.19.65.0/24/3%20NAT", + "comment": "NAT", + "extattrs": {}, + "network": "172.19.65.0/24", + "network_view": "default", + "rir": "NONE", + "status": "container" + } + ] +} \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6.json b/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6.json new file mode 100644 index 000000000..f5cf8b97b --- /dev/null +++ b/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6.json @@ -0,0 +1,52 @@ +{ + "result": [ + { + "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQxMDA6Oi80MC8w:2001%3A5b0%3A4100%3A%3A/40/Gateway%201", + "comment": "Gateway 1", + "extattrs": { + "GWID": { + "value": "ABC" + }, + "GWType": { + "value": "Test" + } + }, + "network": "2001:5b0:4100::/40", + "network_view": "default", + "rir": "NONE", + "status": "container" + }, + { + "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQyMDA6Oi80MC8w:2001%3A5b0%3A4200%3A%3A/40/Gateway%202", + "comment": "Gateway 2", + "extattrs": { + "GWID": { + "value": "ABC" + }, + "GWType": { + "value": "Test" + } + }, + "network": "2001:5b0:4200::/40", + "network_view": "default", + "rir": "NONE", + "status": "container" + }, + { + "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQzMDA6Oi80MC8w:2001%3A5b0%3A4300%3A%3A/40/Gateway%203", + "comment": "Gateway 3", + "extattrs": { + "GWID": { + "value": "XYZ" + }, + "GWType": { + "value": "Test" + } + }, + "network": "2001:5b0:4300::/40", + "network_view": "default", + "rir": "NONE", + "status": "container" + } + ] +} \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/fixtures_infoblox.py b/nautobot_ssot/tests/infoblox/fixtures_infoblox.py index 2d4407160..45136d9e6 100644 --- a/nautobot_ssot/tests/infoblox/fixtures_infoblox.py +++ b/nautobot_ssot/tests/infoblox/fixtures_infoblox.py @@ -123,3 +123,13 @@ def find_next_available_ip(): def search_ipv4_address(): """Return a search IPv4 address response.""" return _json_read_fixture("search_ipv4_address.json") + + +def get_network_containers(): + """Return a get_all_containers response.""" + return _json_read_fixture("get_network_containers.json") + + +def get_network_containers_ipv6(): + """Return a get_all_containers IPv6 response.""" + return _json_read_fixture("get_network_containers_ipv6.json") diff --git a/nautobot_ssot/tests/infoblox/test_client.py b/nautobot_ssot/tests/infoblox/test_client.py index 0095e6836..a2b87d735 100644 --- a/nautobot_ssot/tests/infoblox/test_client.py +++ b/nautobot_ssot/tests/infoblox/test_client.py @@ -30,6 +30,8 @@ get_dhcp_lease_from_hostname, get_all_subnets, get_authoritative_zone, + get_network_containers, + get_network_containers_ipv6, find_network_reference, find_next_available_ip, search_ipv4_address, @@ -639,3 +641,25 @@ def test_search_ipv4_address_fail(self): self.infoblox_client.search_ipv4_address(mock_ip) self.assertEqual(context.exception.response.status_code, 404) + + def test_get_network_containers(self): + """Test get_network_containers success.""" + mock_response = get_network_containers() + mock_uri = "networkcontainer" + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_uri}", json=mock_response, status_code=200) + resp = self.infoblox_client.get_network_containers() + + self.assertEqual(resp, mock_response["result"]) + + def test_get_network_containers_ipv6(self): + """Test get_network_containers IPv6 success.""" + mock_response = get_network_containers_ipv6() + mock_uri = "ipv6networkcontainer" + + with requests_mock.Mocker() as req: + req.get(f"{LOCALHOST}/{mock_uri}", json=mock_response, status_code=200) + resp = self.infoblox_client.get_network_containers(ipv6=True) + + self.assertEqual(resp, mock_response["result"]) diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py new file mode 100644 index 000000000..5585e73f3 --- /dev/null +++ b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py @@ -0,0 +1,95 @@ +"""Unit tests for the Infoblox DiffSync adapter class.""" +import json +from unittest.mock import MagicMock, patch + +from django.test import TestCase, override_settings +from nautobot.extras.models import JobResult + +from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import InfobloxAdapter +from nautobot_ssot.integrations.infoblox.diffsync.models.infoblox import InfobloxNetwork +from nautobot_ssot.integrations.infoblox.jobs import InfobloxDataSource +from nautobot_ssot.integrations.infoblox.utils.client import InfobloxApi + + +def load_json(path): + """Load a json file.""" + with open(path, encoding="utf-8") as file: + return json.loads(file.read()) + + +CONTAINER_FIXTURE = load_json("./nautobot_ssot/tests/infoblox/fixtures/get_network_containers.json") +SUBNET_FIXTURE = load_json("./nautobot_ssot/tests/infoblox/fixtures/get_all_subnets.json") +# DEVICE_INVENTORY_FIXTURE = load_json("./nautobot_ssot/tests/ipfabric/fixtures/get_device_inventory.json") +# VLAN_FIXTURE = load_json("./nautobot_ssot/tests/ipfabric/fixtures/get_vlans.json") +# INTERFACE_FIXTURE = load_json("./nautobot_ssot/tests/ipfabric/fixtures/get_interface_inventory.json") + + +class InfobloxDiffSyncTestCase(TestCase): + """Test the InfobloxDiffSync adapter class.""" + + def setUp(self) -> None: + # Create a mock client + self.conn = MagicMock() + + self.job = InfobloxDataSource() + self.job.job_result = JobResult.objects.create( + name=self.job.class_path, task_name="fake task", worker="default" + ) + self.infoblox = InfobloxAdapter(job=self.job, sync=None, conn=self.conn) + return super().setUp() + + def test_load_prefixes(self): + """Test the load_prefixes function.""" + self.conn.get_all_subnets.return_value = SUBNET_FIXTURE + self.conn.get_network_containers.return_value = CONTAINER_FIXTURE + with patch.object(InfobloxApi, "get_all_subnets", self.conn.get_all_subnets): + with patch.object(InfobloxApi, "get_network_containers", self.conn.get_network_containers): + self.infoblox.load_prefixes() + # print(self.infoblox.get(InfobloxNetwork, {"network": "10.61.15.0/24"})) + print(self.infoblox.dict()) + self.assertEqual(True, False) + + # self.ipfabric.load() + # self.assertEqual( + # {site["siteName"] for site in SITE_FIXTURE}, + # {site.get_unique_id() for site in ipfabric.get_all("location")}, + # ) + # self.assertEqual( + # {dev["hostname"] for dev in DEVICE_INVENTORY_FIXTURE}, + # {dev.get_unique_id() for dev in ipfabric.get_all("device")}, + # ) + # self.assertEqual( + # {f"{vlan['vlanName']}__{vlan['siteName']}" for vlan in VLAN_FIXTURE}, + # {vlan.get_unique_id() for vlan in ipfabric.get_all("vlan")}, + # ) + + # # Assert each site has a device tied to it. + # for site in ipfabric.get_all("location"): + # self.assertEqual(len(site.devices), 1, f"{site} does not have the expected single device tied to it.") + # self.assertTrue(hasattr(site, "vlans")) + + # # Assert each device has the necessary attributes + # for device in ipfabric.get_all("device"): + # self.assertTrue(hasattr(device, "location_name")) + # self.assertTrue(hasattr(device, "model")) + # self.assertTrue(hasattr(device, "vendor")) + # self.assertTrue(hasattr(device, "serial_number")) + # self.assertTrue(hasattr(device, "interfaces")) + + # # Assert each vlan has the necessary attributes + # for vlan in ipfabric.get_all("vlan"): + # self.assertTrue(hasattr(vlan, "name")) + # self.assertTrue(hasattr(vlan, "vid")) + # self.assertTrue(hasattr(vlan, "status")) + # self.assertTrue(hasattr(vlan, "location")) + # self.assertTrue(hasattr(vlan, "description")) + + # # Assert each interface has the necessary attributes + # for interface in ipfabric.get_all("interface"): + # self.assertTrue(hasattr(interface, "name")) + # self.assertTrue(hasattr(interface, "device_name")) + # self.assertTrue(hasattr(interface, "mac_address")) + # self.assertTrue(hasattr(interface, "mtu")) + # self.assertTrue(hasattr(interface, "ip_address")) + # self.assertTrue(hasattr(interface, "subnet_mask")) + # self.assertTrue(hasattr(interface, "type")) From dc68ab2cd20937f05e28a4e0afeb33044b44c146 Mon Sep 17 00:00:00 2001 From: Adam Byczkowski <38091261+qduk@users.noreply.github.com> Date: Mon, 8 Jan 2024 15:46:24 -0600 Subject: [PATCH 02/47] TESTING --- development/nautobot_config.py | 3 +- .../infoblox/diffsync/adapters/infoblox.py | 4 ++- .../tests/infoblox/test_infoblox_adapter.py | 35 ++++++++++++------- 3 files changed, 27 insertions(+), 15 deletions(-) diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 94e630d8f..2816e1cc4 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -225,7 +225,8 @@ "infoblox_import_objects_subnets_ipv6": os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_SUBNETS_IPV^", "").split(","), "infoblox_import_objects_vlan_views": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLAN_VIEWS")), "infoblox_import_objects_vlans": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLANS")), - "infoblox_import_subnets": os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_SUBNETS", "").split(","), + # "infoblox_import_subnets": os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_SUBNETS", "").split(","), + "infoblox_import_subnets": False, "infoblox_password": os.getenv("NAUTOBOT_SSOT_INFOBLOX_PASSWORD"), "infoblox_url": os.getenv("NAUTOBOT_SSOT_INFOBLOX_URL"), "infoblox_username": os.getenv("NAUTOBOT_SSOT_INFOBLOX_USERNAME"), diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index 62cccc6ff..251210d24 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -70,12 +70,14 @@ def load_prefixes(self): else: # Need to load containers here to prevent duplicates when syncing back to Infoblox containers = self.conn.get_network_containers() + # raise Exception("Hello") subnets = self.conn.get_all_subnets() if PLUGIN_CFG["infoblox_import_objects"].get("subnets_ipv6"): containers += self.conn.get_network_containers(ipv6=True) subnets += self.conn.get_all_subnets(ipv6=True) all_networks = containers + subnets - raise Exception(f"{containers}") + raise Exception(f"{all_networks}") + # raise Exception(f"{containers}") self.subnets = [(x["network"], x["network_view"]) for x in subnets] default_ext_attrs = get_default_ext_attrs(review_list=all_networks) for _pf in all_networks: diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py index 5585e73f3..144f5ea62 100644 --- a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py @@ -8,7 +8,7 @@ from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import InfobloxAdapter from nautobot_ssot.integrations.infoblox.diffsync.models.infoblox import InfobloxNetwork from nautobot_ssot.integrations.infoblox.jobs import InfobloxDataSource -from nautobot_ssot.integrations.infoblox.utils.client import InfobloxApi +from nautobot_ssot.integrations.infoblox.constant import PLUGIN_CFG def load_json(path): @@ -27,8 +27,20 @@ def load_json(path): class InfobloxDiffSyncTestCase(TestCase): """Test the InfobloxDiffSync adapter class.""" - def setUp(self) -> None: - # Create a mock client + # def setUp(self) -> None: + # # Create a mock client + # self.conn = MagicMock() + + # self.job = InfobloxDataSource() + # self.job.job_result = JobResult.objects.create( + # name=self.job.class_path, task_name="fake task", worker="default" + # ) + # self.infoblox = InfobloxAdapter(job=self.job, sync=None, conn=self.conn) + # return super().setUp() + + @patch("PLUGIN_CFG", {"infoblox_import_subnets": False}) + def test_load_prefixes(self): + """Test the load_prefixes function.""" self.conn = MagicMock() self.job = InfobloxDataSource() @@ -36,18 +48,15 @@ def setUp(self) -> None: name=self.job.class_path, task_name="fake task", worker="default" ) self.infoblox = InfobloxAdapter(job=self.job, sync=None, conn=self.conn) - return super().setUp() - - def test_load_prefixes(self): - """Test the load_prefixes function.""" self.conn.get_all_subnets.return_value = SUBNET_FIXTURE self.conn.get_network_containers.return_value = CONTAINER_FIXTURE - with patch.object(InfobloxApi, "get_all_subnets", self.conn.get_all_subnets): - with patch.object(InfobloxApi, "get_network_containers", self.conn.get_network_containers): - self.infoblox.load_prefixes() - # print(self.infoblox.get(InfobloxNetwork, {"network": "10.61.15.0/24"})) - print(self.infoblox.dict()) - self.assertEqual(True, False) + # print(self.conn.get_network_containers()) + # with patch.object(InfobloxApi, "get_all_subnets", self.conn.get_all_subnets): + # with patch.object(InfobloxApi, "get_network_containers", self.conn.get_network_containers): + # with patch.object(PLUGIN_CFG, {"infoblox_import_subnets": False}): + self.infoblox.load_prefixes() + # # print(self.infoblox.get(InfobloxNetwork, {"network": "10.61.15.0/24"})) + # self.assertEqual(True, False) # self.ipfabric.load() # self.assertEqual( From 76c39861a248d36876374b25c9087da33728c07d Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 9 Jan 2024 13:42:16 -0600 Subject: [PATCH 03/47] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20plugin=20?= =?UTF-8?q?settings=20to=20correct=20prepended=20form.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../aristacv/diffsync/models/nautobot.py | 98 ++++++++++++++----- 1 file changed, 73 insertions(+), 25 deletions(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py index 95d9f2bfd..7b0bf9957 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py @@ -11,8 +11,17 @@ from nautobot.ipam.models import IPAddress as OrmIPAddress import distutils -from nautobot_ssot.integrations.aristacv.constant import APP_SETTINGS, ARISTA_PLATFORM, CLOUDVISION_PLATFORM -from nautobot_ssot.integrations.aristacv.diffsync.models.base import Device, CustomField, IPAddress, Port +from nautobot_ssot.integrations.aristacv.constant import ( + APP_SETTINGS, + ARISTA_PLATFORM, + CLOUDVISION_PLATFORM, +) +from nautobot_ssot.integrations.aristacv.diffsync.models.base import ( + Device, + CustomField, + IPAddress, + Port, +) from nautobot_ssot.integrations.aristacv.utils import nautobot try: @@ -20,7 +29,9 @@ LIFECYCLE_MGMT = True except ImportError: - print("Device Lifecycle app isn't installed so will revert to CustomField for OS version.") + print( + "Device Lifecycle app isn't installed so will revert to CustomField for OS version." + ) LIFECYCLE_MGMT = False @@ -42,33 +53,48 @@ class NautobotDevice(Device): def create(cls, diffsync, ids, attrs): """Create device object in Nautobot.""" site_code, role_code = nautobot.parse_hostname(ids["name"].lower()) - site_map = APP_SETTINGS.get("site_mappings") - role_map = APP_SETTINGS.get("role_mappings") + site_map = APP_SETTINGS.get("aristacv_site_mappings") + role_map = APP_SETTINGS.get("aristacv_role_mappings") if site_code and site_code in site_map: site = nautobot.verify_site(site_map[site_code]) elif "CloudVision" in ids["name"]: - if APP_SETTINGS.get("controller_site"): - site = nautobot.verify_site(APP_SETTINGS["controller_site"]) + if APP_SETTINGS.get("aristacv_controller_site"): + site = nautobot.verify_site(APP_SETTINGS["aristacv_controller_site"]) else: site = nautobot.verify_site("CloudVision") else: - site = nautobot.verify_site(APP_SETTINGS.get("from_cloudvision_default_site", DEFAULT_SITE)) + site = nautobot.verify_site( + APP_SETTINGS.get("aristacv_from_cloudvision_default_site", DEFAULT_SITE) + ) if role_code and role_code in role_map: role = nautobot.verify_device_role_object( role_map[role_code], - APP_SETTINGS.get("from_cloudvision_default_device_role_color", DEFAULT_DEVICE_ROLE_COLOR), + APP_SETTINGS.get( + "aristacv_from_cloudvision_default_device_role_color", + DEFAULT_DEVICE_ROLE_COLOR, + ), ) elif "CloudVision" in ids["name"]: - role = nautobot.verify_device_role_object("Controller", DEFAULT_DEVICE_ROLE_COLOR) + role = nautobot.verify_device_role_object( + "Controller", DEFAULT_DEVICE_ROLE_COLOR + ) else: role = nautobot.verify_device_role_object( - APP_SETTINGS.get("from_cloudvision_default_device_role", DEFAULT_DEVICE_ROLE), - APP_SETTINGS.get("from_cloudvision_default_device_role_color", DEFAULT_DEVICE_ROLE_COLOR), + APP_SETTINGS.get( + "aristacv_from_cloudvision_default_device_role", DEFAULT_DEVICE_ROLE + ), + APP_SETTINGS.get( + "aristacv_from_cloudvision_default_device_role_color", + DEFAULT_DEVICE_ROLE_COLOR, + ), ) - if APP_SETTINGS.get("create_controller") and "CloudVision" in ids["name"]: + if ( + APP_SETTINGS.get("aristacv_create_controller") + and "CloudVision" in ids["name"] + ): platform = OrmPlatform.objects.get(name=CLOUDVISION_PLATFORM) else: platform = OrmPlatform.objects.get(name=ARISTA_PLATFORM) @@ -85,14 +111,18 @@ def create(cls, diffsync, ids, attrs): serial=attrs["serial"] if attrs.get("serial") else "", ) - if APP_SETTINGS.get("apply_import_tag", APPLY_IMPORT_TAG): + if APP_SETTINGS.get("aristacv_apply_import_tag", APPLY_IMPORT_TAG): import_tag = nautobot.verify_import_tag() new_device.tags.add(import_tag) try: new_device.validated_save() if LIFECYCLE_MGMT and attrs.get("version"): - software_lcm = cls._add_software_lcm(platform=platform.name, version=attrs["version"]) - cls._assign_version_to_device(diffsync=diffsync, device=new_device, software_lcm=software_lcm) + software_lcm = cls._add_software_lcm( + platform=platform.name, version=attrs["version"] + ) + cls._assign_version_to_device( + diffsync=diffsync, device=new_device, software_lcm=software_lcm + ) return super().create(ids=ids, diffsync=diffsync, attrs=attrs) except ValidationError as err: diffsync.job.logger.warning(f"Unable to create Device {ids['name']}. {err}") @@ -111,19 +141,29 @@ def update(self, attrs): if "serial" in attrs: dev.serial = attrs["serial"] if "version" in attrs and LIFECYCLE_MGMT: - software_lcm = self._add_software_lcm(platform=dev.platform.name, version=attrs["version"]) - self._assign_version_to_device(diffsync=self.diffsync, device=dev, software_lcm=software_lcm) + software_lcm = self._add_software_lcm( + platform=dev.platform.name, version=attrs["version"] + ) + self._assign_version_to_device( + diffsync=self.diffsync, device=dev, software_lcm=software_lcm + ) try: dev.validated_save() return super().update(attrs) except ValidationError as err: - self.diffsync.job.logger.warning(f"Unable to update Device {self.name}. {err}") + self.diffsync.job.logger.warning( + f"Unable to update Device {self.name}. {err}" + ) return None def delete(self): """Delete device object in Nautobot.""" - if APP_SETTINGS.get("delete_devices_on_sync", DEFAULT_DELETE_DEVICES_ON_SYNC): - self.diffsync.job.logger.warning(f"Device {self.name} will be deleted per app settings.") + if APP_SETTINGS.get( + "aristacv_delete_devices_on_sync", DEFAULT_DELETE_DEVICES_ON_SYNC + ): + self.diffsync.job.logger.warning( + f"Device {self.name} will be deleted per app settings." + ) device = OrmDevice.objects.get(id=self.uuid) device.delete() super().delete() @@ -216,7 +256,9 @@ def update(self, attrs): _port.validated_save() return super().update(attrs) except ValidationError as err: - self.diffsync.job.logger.warning(f"Unable to update port {self.name} for {self.device} with {attrs}: {err}") + self.diffsync.job.logger.warning( + f"Unable to update port {self.name} for {self.device} with {attrs}: {err}" + ) return None def delete(self): @@ -224,7 +266,9 @@ def delete(self): if APP_SETTINGS.get("delete_devices_on_sync"): super().delete() if self.diffsync.job.debug: - self.diffsync.job.logger.warning(f"Interface {self.name} for {self.device} will be deleted.") + self.diffsync.job.logger.warning( + f"Interface {self.name} for {self.device} will be deleted." + ) _port = OrmInterface.objects.get(id=self.uuid) _port.delete() return self @@ -246,7 +290,9 @@ def create(cls, diffsync, ids, attrs): new_ip.validated_save() try: intf = OrmInterface.objects.get(device=dev, name=ids["interface"]) - new_ip.assigned_object_type = ContentType.objects.get(app_label="dcim", model="interface") + new_ip.assigned_object_type = ContentType.objects.get( + app_label="dcim", model="interface" + ) new_ip.assigned_object = intf new_ip.validated_save() if "Management" in ids["interface"]: @@ -256,7 +302,9 @@ def create(cls, diffsync, ids, attrs): dev.primary_ip4 = new_ip dev.validated_save() except OrmInterface.DoesNotExist as err: - diffsync.job.logger.warning(f"Unable to find Interface {ids['interface']} for {ids['device']}. {err}") + diffsync.job.logger.warning( + f"Unable to find Interface {ids['interface']} for {ids['device']}. {err}" + ) return super().create(ids=ids, diffsync=diffsync, attrs=attrs) From 679b96fd6748ef41c839fecddc15e42e41a4e879 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 9 Jan 2024 13:45:05 -0600 Subject: [PATCH 04/47] =?UTF-8?q?feat:=20=E2=9C=A8=20Add=20Prefix=20DiffSy?= =?UTF-8?q?ncModel=20to=20track=20for=20IPAddress=20parent=20requirement.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This should fix the issue brought up in #266. --- .../aristacv/diffsync/adapters/cloudvision.py | 11 +++++++- .../aristacv/diffsync/adapters/nautobot.py | 17 +++++++++-- .../aristacv/diffsync/models/base.py | 16 +++++++++++ .../aristacv/diffsync/models/cloudvision.py | 28 ++++++++++++++++++- .../aristacv/diffsync/models/nautobot.py | 20 +++++++++++++ 5 files changed, 88 insertions(+), 4 deletions(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py index b91da7c07..99146c855 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py @@ -1,5 +1,6 @@ """DiffSync adapter for Arista CloudVision.""" import distutils +import ipaddress import re import arista.tag.v2 as TAG @@ -11,6 +12,7 @@ CloudvisionCustomField, CloudvisionDevice, CloudvisionPort, + CloudvisionPrefix, CloudvisionIPAddress, ) from nautobot_ssot.integrations.aristacv.utils import cloudvision @@ -21,10 +23,11 @@ class CloudvisionAdapter(DiffSync): device = CloudvisionDevice port = CloudvisionPort + prefix = CloudvisionPrefix ipaddr = CloudvisionIPAddress cf = CloudvisionCustomField - top_level = ["device", "ipaddr", "cf"] + top_level = ["device", "prefix", "ipaddr", "cf"] def __init__(self, *args, job=None, conn: cloudvision.CloudvisionApi, **kwargs): """Initialize the CloudVision DiffSync adapter.""" @@ -182,8 +185,14 @@ def load_ip_addresses(self, dev: device): f"Attempting to load IP Address {intf['address']} for {intf['interface']} on {dev.name}." ) if intf["address"] and intf["address"] != "none": + prefix = ipaddress.ip_interface(intf["address"]).network.with_prefixlen + self.get_or_instantiate( + self.prefix, + ids={"prefix": prefix}, + ) new_ip = self.ipaddr( address=intf["address"], + prefix=prefix, interface=intf["interface"], device=dev.name, uuid=None, diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py index 0a49424f3..661ac6ac1 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py @@ -12,6 +12,7 @@ from nautobot_ssot.integrations.aristacv.diffsync.models.nautobot import ( NautobotDevice, NautobotCustomField, + NautobotPrefix, NautobotIPAddress, NautobotPort, ) @@ -23,10 +24,11 @@ class NautobotAdapter(DiffSync): device = NautobotDevice port = NautobotPort + prefix = NautobotPrefix ipaddr = NautobotIPAddress cf = NautobotCustomField - top_level = ["device", "ipaddr", "cf"] + top_level = ["device", "prefix", "ipaddr", "cf"] def __init__(self, *args, job=None, **kwargs): """Initialize the Nautobot DiffSync adapter.""" @@ -89,9 +91,20 @@ def load_interfaces(self): def load_ip_addresses(self): """Add Nautobot IPAddress objects as DiffSync IPAddress models.""" - for ipaddr in OrmIPAddress.objects.filter(interfaces__device__device_type__manufacturer__name__in=["Arista"]): + for ipaddr in OrmIPAddress.objects.filter( + interfaces__device__device_type__manufacturer__name__in=["Arista"] + ): + try: + self.get(self.prefix, ipaddr.parent.prefix.with_prefixlen) + except ObjectNotFound: + new_pf = self.prefix( + prefix=ipaddr.parent.prefix.with_prefixlen, + uuid=ipaddr.parent.prefix.id, + ) + self.add(new_pf) new_ip = self.ipaddr( address=str(ipaddr.address), + prefix=ipaddr.parent.prefix.with_prefixlen, interface=ipaddr.assigned_object.name, device=ipaddr.assigned_object.device.name, uuid=ipaddr.id, diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/base.py b/nautobot_ssot/integrations/aristacv/diffsync/models/base.py index e795869e3..f04cfee8f 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/base.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/base.py @@ -58,6 +58,20 @@ class Port(DiffSyncModel): uuid: Optional[UUID] +class Prefix(DiffSyncModel): + """DiffSync Model for Ringhealth nodes management network.""" + + model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + _modelname = "prefix" + _identifiers = ("prefix",) + _attributes = () + _children = {} + + prefix: str + uuid: Optional[UUID] + + class IPAddress(DiffSyncModel): """IPAddress Model.""" @@ -66,6 +80,7 @@ class IPAddress(DiffSyncModel): _modelname = "ipaddr" _identifiers = ( "address", + "prefix", "device", "interface", ) @@ -73,6 +88,7 @@ class IPAddress(DiffSyncModel): _children = {} address: str + prefix: str device: str interface: str uuid: Optional[UUID] diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py index d1f29508f..d10dced2b 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py @@ -1,6 +1,12 @@ """Cloudvision DiffSync models for AristaCV SSoT.""" from nautobot_ssot.integrations.aristacv.constant import APP_SETTINGS -from nautobot_ssot.integrations.aristacv.diffsync.models.base import Device, CustomField, IPAddress, Port +from nautobot_ssot.integrations.aristacv.diffsync.models.base import ( + Device, + CustomField, + Prefix, + IPAddress, + Port, +) from nautobot_ssot.integrations.aristacv.utils.cloudvision import CloudvisionApi @@ -38,6 +44,26 @@ def delete(self): return self +class CloudvisionPrefix(Prefix): + """Cloudvision IPAdress model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Prefix in AristaCV from Prefix object.""" + ... + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + def update(self, attrs): + """Update Prefix in AristaCV from Prefix object.""" + ... + return super().update(attrs) + + def delete(self): + """Delete Prefix in AristaCV from Prefix object.""" + ... + return self + + class CloudvisionIPAddress(IPAddress): """Cloudvision IPAdress model.""" diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py index 7b0bf9957..884e58e2c 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py @@ -9,6 +9,8 @@ from nautobot.extras.models import RelationshipAssociation as OrmRelationshipAssociation from nautobot.extras.models import Status as OrmStatus from nautobot.ipam.models import IPAddress as OrmIPAddress +from nautobot.ipam.models import Prefix as OrmPrefix +from nautobot.ipam.models import Namespace import distutils from nautobot_ssot.integrations.aristacv.constant import ( @@ -21,6 +23,7 @@ CustomField, IPAddress, Port, + Prefix, ) from nautobot_ssot.integrations.aristacv.utils import nautobot @@ -274,6 +277,23 @@ def delete(self): return self +class NautobotPrefix(Prefix): + """Nautobot Prefix model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Prefix in Nautobot from NautobotPrefix objects.""" + if diffsync.job.debug: + diffsync.job.logger.info(f"Creating Prefix {ids['prefix']}.") + _pf = OrmPrefix( + prefix=ids["prefix"], + namespace=Namespace.objects.get(name="Global"), + status_id=OrmStatus.objects.get(name="Active"), + ) + _pf.validated_save() + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + class NautobotIPAddress(IPAddress): """Nautobot IPAddress model.""" From be969a6a4748adf6c46b2e9986687b4acbc97d55 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 9 Jan 2024 13:45:36 -0600 Subject: [PATCH 05/47] =?UTF-8?q?style:=20=F0=9F=9A=A8=20Fix=20formatting?= =?UTF-8?q?=20for=20black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../aristacv/diffsync/adapters/cloudvision.py | 62 ++++++++++++++----- .../aristacv/diffsync/adapters/nautobot.py | 26 ++++++-- .../aristacv/diffsync/models/cloudvision.py | 12 +++- 3 files changed, 76 insertions(+), 24 deletions(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py index 99146c855..1421bf968 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py @@ -39,7 +39,9 @@ def load_devices(self): """Load devices from CloudVision.""" if APP_SETTINGS.get("create_controller"): cvp_version = cloudvision.get_cvp_version() - cvp_ver_cf = self.cf(name="arista_eos", value=cvp_version, device_name="CloudVision") + cvp_ver_cf = self.cf( + name="arista_eos", value=cvp_version, device_name="CloudVision" + ) try: self.add(cvp_ver_cf) except ObjectAlreadyExists as err: @@ -57,9 +59,13 @@ def load_devices(self): try: self.add(new_cvp) except ObjectAlreadyExists as err: - self.job.logger.warning(f"Error attempting to add CloudVision device. {err}") + self.job.logger.warning( + f"Error attempting to add CloudVision device. {err}" + ) - for index, dev in enumerate(cloudvision.get_devices(client=self.conn.comm_channel), start=1): + for index, dev in enumerate( + cloudvision.get_devices(client=self.conn.comm_channel), start=1 + ): self.job.logger.info(f"Loading {index}° device") if dev["hostname"] != "": new_device = self.device( @@ -81,7 +87,9 @@ def load_devices(self): self.load_ip_addresses(dev=new_device) self.load_device_tags(device=new_device) else: - self.job.logger.warning(f"Device {dev} is missing hostname so won't be imported.") + self.job.logger.warning( + f"Device {dev} is missing hostname so won't be imported." + ) continue def load_interfaces(self, device): @@ -91,9 +99,13 @@ def load_interfaces(self, device): self.job.logger.debug(f"Chassis type for {device.name} is {chassis_type}.") port_info = [] if chassis_type == "modular": - port_info = cloudvision.get_interfaces_chassis(client=self.conn, dId=device.serial) + port_info = cloudvision.get_interfaces_chassis( + client=self.conn, dId=device.serial + ) elif chassis_type == "fixedSystem": - port_info = cloudvision.get_interfaces_fixed(client=self.conn, dId=device.serial) + port_info = cloudvision.get_interfaces_fixed( + client=self.conn, dId=device.serial + ) elif chassis_type == "Unknown": self.job.logger.warning( f"Unable to determine chassis type for {device.name} so will be unable to retrieve interfaces." @@ -101,13 +113,19 @@ def load_interfaces(self, device): return None if self.job.debug: - self.job.logger.debug(f"Device being loaded: {device.name}. Port: {port_info}.") + self.job.logger.debug( + f"Device being loaded: {device.name}. Port: {port_info}." + ) for port in port_info: if self.job.debug: - self.job.logger.debug(f"Port {port['interface']} being loaded for {device.name}.") + self.job.logger.debug( + f"Port {port['interface']} being loaded for {device.name}." + ) - port_mode = cloudvision.get_interface_mode(client=self.conn, dId=device.serial, interface=port["interface"]) + port_mode = cloudvision.get_interface_mode( + client=self.conn, dId=device.serial, interface=port["interface"] + ) transceiver = cloudvision.get_interface_transceiver( client=self.conn, dId=device.serial, interface=port["interface"] ) @@ -122,7 +140,9 @@ def load_interfaces(self, device): client=self.conn, dId=device.serial, interface=port["interface"] ) port_status = cloudvision.get_interface_status(port_info=port) - port_type = cloudvision.get_port_type(port_info=port, transceiver=transceiver) + port_type = cloudvision.get_port_type( + port_info=port, transceiver=transceiver + ) if port["interface"] != "": new_port = self.port( name=port["interface"], @@ -153,7 +173,9 @@ def load_ip_addresses(self, dev: device): dev_ip_intfs = cloudvision.get_ip_interfaces(client=self.conn, dId=dev.serial) for intf in dev_ip_intfs: if self.job.debug: - self.job.logger.info(f"Loading interface {intf['interface']} on {dev.name} for {intf['address']}.") + self.job.logger.info( + f"Loading interface {intf['interface']} on {dev.name} for {intf['address']}." + ) try: _ = self.get(self.port, {"name": intf["interface"], "device": dev.name}) except ObjectNotFound: @@ -167,7 +189,9 @@ def load_ip_addresses(self, dev: device): enabled=True, mode="access", mtu=65535, - port_type=cloudvision.get_port_type(port_info={"interface": intf["interface"]}, transceiver=""), + port_type=cloudvision.get_port_type( + port_info={"interface": intf["interface"]}, transceiver="" + ), status="Active", uuid=None, ) @@ -212,7 +236,9 @@ def load_device_tags(self, device): ) dev_tags = [ tag - for tag in cloudvision.get_device_tags(client=self.conn.comm_channel, device_id=device.serial) + for tag in cloudvision.get_device_tags( + client=self.conn.comm_channel, device_id=device.serial + ) if tag in system_tags ] @@ -227,11 +253,17 @@ def load_device_tags(self, device): if tag["label"] == "mpls" or tag["label"] == "ztp": tag["value"] = bool(distutils.util.strtobool(tag["value"])) - new_cf = self.cf(name=f"arista_{tag['label']}", value=tag["value"], device_name=device.name) + new_cf = self.cf( + name=f"arista_{tag['label']}", + value=tag["value"], + device_name=device.name, + ) try: self.add(new_cf) except ObjectAlreadyExists: - self.job.logger.warning(f"Duplicate tag encountered for {tag['label']} on device {device.name}") + self.job.logger.warning( + f"Duplicate tag encountered for {tag['label']} on device {device.name}" + ) def load(self): """Load devices and associated data from CloudVision.""" diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py index 661ac6ac1..c55245545 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py @@ -49,7 +49,9 @@ def load_devices(self): ) self.add(new_device) except ObjectAlreadyExists as err: - self.job.logger.warning(f"Unable to load {dev.name} as it appears to be a duplicate. {err}") + self.job.logger.warning( + f"Unable to load {dev.name} as it appears to be a duplicate. {err}" + ) continue self.load_custom_fields(dev=dev) @@ -59,7 +61,11 @@ def load_custom_fields(self, dev: OrmDevice): for cf_name, cf_value in dev.custom_field_data.items(): if cf_name.startswith("arista_"): try: - new_cf = self.cf(name=cf_name, value=cf_value if cf_value is not None else "", device_name=dev.name) + new_cf = self.cf( + name=cf_name, + value=cf_value if cf_value is not None else "", + device_name=dev.name, + ) self.add(new_cf) except AttributeError as err: self.job.logger.warning(f"Unable to load {cf_name}. {err}") @@ -67,7 +73,9 @@ def load_custom_fields(self, dev: OrmDevice): def load_interfaces(self): """Add Nautobot Interface objects as DiffSync Port models.""" - for intf in OrmInterface.objects.filter(device__device_type__manufacturer__name="Arista"): + for intf in OrmInterface.objects.filter( + device__device_type__manufacturer__name="Arista" + ): new_port = self.port( name=intf.name, device=intf.device.name, @@ -112,7 +120,9 @@ def load_ip_addresses(self): try: self.add(new_ip) except ObjectAlreadyExists as err: - self.job.logger.warning(f"Unable to load {ipaddr.address} as appears to be a duplicate. {err}") + self.job.logger.warning( + f"Unable to load {ipaddr.address} as appears to be a duplicate. {err}" + ) def sync_complete(self, source: DiffSync, *args, **kwargs): """Perform actions after sync is completed. @@ -122,8 +132,12 @@ def sync_complete(self, source: DiffSync, *args, **kwargs): """ # if Controller is created we need to ensure all imported Devices have RelationshipAssociation to it. if APP_SETTINGS.get("create_controller"): - self.job.logger.info("Creating Relationships between CloudVision and connected Devices.") - controller_relation = OrmRelationship.objects.get(name="Controller -> Device") + self.job.logger.info( + "Creating Relationships between CloudVision and connected Devices." + ) + controller_relation = OrmRelationship.objects.get( + name="Controller -> Device" + ) device_ct = ContentType.objects.get_for_model(OrmDevice) cvp = OrmDevice.objects.get(name="CloudVision") loaded_devices = source.dict()["device"] diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py index d10dced2b..2c0bdba9f 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py @@ -109,10 +109,16 @@ def create(cls, diffsync, ids, attrs): for device in attrs["devices"]: # Exclude devices that are inactive in CloudVision if device in device_ids: - cvp.assign_tag_to_device(device_ids[device], ids["name"], attrs["value"]) + cvp.assign_tag_to_device( + device_ids[device], ids["name"], attrs["value"] + ) else: - tag = f"{ids['name']}:{attrs['value']}" if attrs["value"] else ids["name"] - diffsync.job.logger.warning(f"{device} is inactive or missing in CloudVision - skipping for tag: {tag}") + tag = ( + f"{ids['name']}:{attrs['value']}" if attrs["value"] else ids["name"] + ) + diffsync.job.logger.warning( + f"{device} is inactive or missing in CloudVision - skipping for tag: {tag}" + ) return super().create(ids=ids, diffsync=diffsync, attrs=attrs) def update(self, attrs): From 9c41897d343c19f9a2bab352cc09917eb25d1ce8 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 9 Jan 2024 14:25:54 -0600 Subject: [PATCH 06/47] =?UTF-8?q?feat:=20=E2=9C=A8=20Add=20IPAssignment=20?= =?UTF-8?q?DiffSync=20Model=20to=20track=20IPAddress=20to=20Interface=20ma?= =?UTF-8?q?ppings.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../aristacv/diffsync/adapters/cloudvision.py | 22 +++++--- .../aristacv/diffsync/adapters/nautobot.py | 18 +++++-- .../aristacv/diffsync/models/base.py | 19 ++++++- .../aristacv/diffsync/models/cloudvision.py | 21 ++++++++ .../aristacv/diffsync/models/nautobot.py | 51 ++++++++++++++----- 5 files changed, 108 insertions(+), 23 deletions(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py index 1421bf968..fc0a90290 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py @@ -14,6 +14,7 @@ CloudvisionPort, CloudvisionPrefix, CloudvisionIPAddress, + CloudvisionIPAssignment, ) from nautobot_ssot.integrations.aristacv.utils import cloudvision @@ -25,9 +26,10 @@ class CloudvisionAdapter(DiffSync): port = CloudvisionPort prefix = CloudvisionPrefix ipaddr = CloudvisionIPAddress + ipassignment = CloudvisionIPAssignment cf = CloudvisionCustomField - top_level = ["device", "prefix", "ipaddr", "cf"] + top_level = ["device", "prefix", "ipaddr", "ipassignment", "cf"] def __init__(self, *args, job=None, conn: cloudvision.CloudvisionApi, **kwargs): """Initialize the CloudVision DiffSync adapter.""" @@ -217,8 +219,6 @@ def load_ip_addresses(self, dev: device): new_ip = self.ipaddr( address=intf["address"], prefix=prefix, - interface=intf["interface"], - device=dev.name, uuid=None, ) try: @@ -228,6 +228,15 @@ def load_ip_addresses(self, dev: device): f"Unable to load {intf['address']} for {dev.name} on {intf['interface']}. {err}" ) continue + self.get_or_instantiate( + self.ipassignment, + ids={ + "address": intf["address"], + "device": dev.name, + "interface": intf["interface"], + }, + attrs={"primary": bool("Management" in intf["interface"])}, + ) def load_device_tags(self, device): """Load device tags from CloudVision.""" @@ -267,10 +276,11 @@ def load_device_tags(self, device): def load(self): """Load devices and associated data from CloudVision.""" - if APP_SETTINGS.get("hostname_patterns") and not ( - APP_SETTINGS.get("site_mappings") and APP_SETTINGS.get("role_mappings") + if APP_SETTINGS.get("aristacv_hostname_patterns") and not ( + APP_SETTINGS.get("aristacv_site_mappings") + and APP_SETTINGS.get("aristacv_role_mappings") ): self.job.logger.warning( - "Configuration found for hostname_patterns but no site_mappings or role_mappings. Please ensure your mappings are defined." + "Configuration found for aristacv_hostname_patterns but no aristacv_site_mappings or aristacv_role_mappings. Please ensure your mappings are defined." ) self.load_devices() diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py index c55245545..478cf2363 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py @@ -5,6 +5,7 @@ from nautobot.extras.models import Relationship as OrmRelationship from nautobot.extras.models import RelationshipAssociation as OrmRelationshipAssociation from nautobot.ipam.models import IPAddress as OrmIPAddress +from nautobot.ipam.models import IPAddressToInterface from diffsync import DiffSync from diffsync.exceptions import ObjectNotFound, ObjectAlreadyExists @@ -14,6 +15,7 @@ NautobotCustomField, NautobotPrefix, NautobotIPAddress, + NautobotIPAssignment, NautobotPort, ) from nautobot_ssot.integrations.aristacv.utils import nautobot @@ -26,9 +28,10 @@ class NautobotAdapter(DiffSync): port = NautobotPort prefix = NautobotPrefix ipaddr = NautobotIPAddress + ipassignment = NautobotIPAssignment cf = NautobotCustomField - top_level = ["device", "prefix", "ipaddr", "cf"] + top_level = ["device", "prefix", "ipaddr", "ipassigment", "cf"] def __init__(self, *args, job=None, **kwargs): """Initialize the Nautobot DiffSync adapter.""" @@ -113,8 +116,6 @@ def load_ip_addresses(self): new_ip = self.ipaddr( address=str(ipaddr.address), prefix=ipaddr.parent.prefix.with_prefixlen, - interface=ipaddr.assigned_object.name, - device=ipaddr.assigned_object.device.name, uuid=ipaddr.id, ) try: @@ -123,6 +124,17 @@ def load_ip_addresses(self): self.job.logger.warning( f"Unable to load {ipaddr.address} as appears to be a duplicate. {err}" ) + ip_to_intfs = IPAddressToInterface.objects.filter(ip_address=ipaddr) + for mapping in ip_to_intfs: + new_map = self.ipassignment( + address=str(ipaddr.address), + device=mapping.device.name, + interface=mapping.interface.name, + primary=len(mapping.ip_address.primary_ip4_for.all()) > 0 + or len(mapping.ip_address.primary_ip6_for.all()) > 0, + uuid=mapping.id, + ) + self.add(new_map) def sync_complete(self, source: DiffSync, *args, **kwargs): """Perform actions after sync is completed. diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/base.py b/nautobot_ssot/integrations/aristacv/diffsync/models/base.py index f04cfee8f..f51f65484 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/base.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/base.py @@ -81,16 +81,31 @@ class IPAddress(DiffSyncModel): _identifiers = ( "address", "prefix", - "device", - "interface", ) _attributes = () _children = {} address: str prefix: str + uuid: Optional[UUID] + + +class IPAssignment(DiffSyncModel): + """IPAssignment Model.""" + + _modelname = "ipassignment" + _identifiers = ( + "address", + "device", + "interface", + ) + _attributes = ("primary",) + _children = {} + + address: str device: str interface: str + primary: bool uuid: Optional[UUID] diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py index 2c0bdba9f..c70b3a7ca 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py @@ -5,6 +5,7 @@ CustomField, Prefix, IPAddress, + IPAssignment, Port, ) from nautobot_ssot.integrations.aristacv.utils.cloudvision import CloudvisionApi @@ -84,6 +85,26 @@ def delete(self): return self +class CloudvisionIPAssignment(IPAssignment): + """Cloudvision IPAssignment model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create IPAssignment in AristaCV from IPAssignment object.""" + ... + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + def update(self, attrs): + """Update IPAssignment in AristaCV from IPAssignment object.""" + ... + return super().update(attrs) + + def delete(self): + """Delete IPAssignment in AristaCV from IPAssignment object.""" + ... + return self + + class CloudvisionCustomField(CustomField): """Cloudvision CustomField model.""" diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py index 884e58e2c..bccc61a57 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py @@ -10,7 +10,7 @@ from nautobot.extras.models import Status as OrmStatus from nautobot.ipam.models import IPAddress as OrmIPAddress from nautobot.ipam.models import Prefix as OrmPrefix -from nautobot.ipam.models import Namespace +from nautobot.ipam.models import Namespace, IPAddressToInterface import distutils from nautobot_ssot.integrations.aristacv.constant import ( @@ -22,6 +22,7 @@ Device, CustomField, IPAddress, + IPAssignment, Port, Prefix, ) @@ -300,7 +301,6 @@ class NautobotIPAddress(IPAddress): @classmethod def create(cls, diffsync, ids, attrs): """Create IPAddress in Nautobot.""" - dev = OrmDevice.objects.get(name=ids["device"]) new_ip = OrmIPAddress( address=ids["address"], status=OrmStatus.objects.get(name="Active"), @@ -308,24 +308,51 @@ def create(cls, diffsync, ids, attrs): if "loopback" in ids["interface"]: new_ip.role = "loopback" new_ip.validated_save() + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + +class NautobotIPAssignment(IPAssignment): + """Nautobot IPAssignment model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create IPAddressToInterface in Nautobot.""" try: - intf = OrmInterface.objects.get(device=dev, name=ids["interface"]) - new_ip.assigned_object_type = ContentType.objects.get( - app_label="dcim", model="interface" + ipaddr = OrmIPAddress.objects.get(address=ids["address"]) + intf = OrmInterface.objects.get( + name=ids["interface"], device__name=ids["device"] ) - new_ip.assigned_object = intf - new_ip.validated_save() - if "Management" in ids["interface"]: + new_map = IPAddressToInterface(ip_address=ipaddr, interface=intf) + new_map.validated_save() + if attrs.get("primary"): if ":" in ids["address"]: - dev.primary_ip6 = new_ip + intf.device.primary_ip6 = ipaddr else: - dev.primary_ip4 = new_ip - dev.validated_save() + intf.device.primary_ip4 = ipaddr + intf.device.validated_save() + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) except OrmInterface.DoesNotExist as err: diffsync.job.logger.warning( f"Unable to find Interface {ids['interface']} for {ids['device']}. {err}" ) - return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + def update(self, attrs): + """Update IPAddressToInterface in Nautobot.""" + map = IPAddressToInterface.objects.get(id=self.uuid) + if attrs.get("primary"): + if ":" in map.ip_address.address: + map.interface.device.primary_ip6 = map.ip_address + else: + map.interface.device.primary_ip4 = map.ip_address + map.interface.device.validated_save() + return super().update(attrs) + + def delete(self): + """Delete IPAddressToInterface in Nautobot.""" + super().delete() + mapping = IPAddressToInterface.objects.get(id=self.uuid) + mapping.delete() + return self class NautobotCustomField(CustomField): From 373b448a3426575fdb1af9fa184ae80c5bcdaf32 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 9 Jan 2024 14:33:02 -0600 Subject: [PATCH 07/47] =?UTF-8?q?style:=20=F0=9F=9A=A8=20Fix=20formatting?= =?UTF-8?q?=20for=20black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../aristacv/diffsync/adapters/cloudvision.py | 59 +++++------------ .../aristacv/diffsync/adapters/nautobot.py | 24 ++----- .../aristacv/diffsync/models/cloudvision.py | 12 +--- .../aristacv/diffsync/models/nautobot.py | 65 +++++-------------- 4 files changed, 40 insertions(+), 120 deletions(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py index fc0a90290..1ec002ac7 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py @@ -41,9 +41,7 @@ def load_devices(self): """Load devices from CloudVision.""" if APP_SETTINGS.get("create_controller"): cvp_version = cloudvision.get_cvp_version() - cvp_ver_cf = self.cf( - name="arista_eos", value=cvp_version, device_name="CloudVision" - ) + cvp_ver_cf = self.cf(name="arista_eos", value=cvp_version, device_name="CloudVision") try: self.add(cvp_ver_cf) except ObjectAlreadyExists as err: @@ -61,13 +59,9 @@ def load_devices(self): try: self.add(new_cvp) except ObjectAlreadyExists as err: - self.job.logger.warning( - f"Error attempting to add CloudVision device. {err}" - ) + self.job.logger.warning(f"Error attempting to add CloudVision device. {err}") - for index, dev in enumerate( - cloudvision.get_devices(client=self.conn.comm_channel), start=1 - ): + for index, dev in enumerate(cloudvision.get_devices(client=self.conn.comm_channel), start=1): self.job.logger.info(f"Loading {index}° device") if dev["hostname"] != "": new_device = self.device( @@ -89,9 +83,7 @@ def load_devices(self): self.load_ip_addresses(dev=new_device) self.load_device_tags(device=new_device) else: - self.job.logger.warning( - f"Device {dev} is missing hostname so won't be imported." - ) + self.job.logger.warning(f"Device {dev} is missing hostname so won't be imported.") continue def load_interfaces(self, device): @@ -101,13 +93,9 @@ def load_interfaces(self, device): self.job.logger.debug(f"Chassis type for {device.name} is {chassis_type}.") port_info = [] if chassis_type == "modular": - port_info = cloudvision.get_interfaces_chassis( - client=self.conn, dId=device.serial - ) + port_info = cloudvision.get_interfaces_chassis(client=self.conn, dId=device.serial) elif chassis_type == "fixedSystem": - port_info = cloudvision.get_interfaces_fixed( - client=self.conn, dId=device.serial - ) + port_info = cloudvision.get_interfaces_fixed(client=self.conn, dId=device.serial) elif chassis_type == "Unknown": self.job.logger.warning( f"Unable to determine chassis type for {device.name} so will be unable to retrieve interfaces." @@ -115,19 +103,13 @@ def load_interfaces(self, device): return None if self.job.debug: - self.job.logger.debug( - f"Device being loaded: {device.name}. Port: {port_info}." - ) + self.job.logger.debug(f"Device being loaded: {device.name}. Port: {port_info}.") for port in port_info: if self.job.debug: - self.job.logger.debug( - f"Port {port['interface']} being loaded for {device.name}." - ) + self.job.logger.debug(f"Port {port['interface']} being loaded for {device.name}.") - port_mode = cloudvision.get_interface_mode( - client=self.conn, dId=device.serial, interface=port["interface"] - ) + port_mode = cloudvision.get_interface_mode(client=self.conn, dId=device.serial, interface=port["interface"]) transceiver = cloudvision.get_interface_transceiver( client=self.conn, dId=device.serial, interface=port["interface"] ) @@ -142,9 +124,7 @@ def load_interfaces(self, device): client=self.conn, dId=device.serial, interface=port["interface"] ) port_status = cloudvision.get_interface_status(port_info=port) - port_type = cloudvision.get_port_type( - port_info=port, transceiver=transceiver - ) + port_type = cloudvision.get_port_type(port_info=port, transceiver=transceiver) if port["interface"] != "": new_port = self.port( name=port["interface"], @@ -175,9 +155,7 @@ def load_ip_addresses(self, dev: device): dev_ip_intfs = cloudvision.get_ip_interfaces(client=self.conn, dId=dev.serial) for intf in dev_ip_intfs: if self.job.debug: - self.job.logger.info( - f"Loading interface {intf['interface']} on {dev.name} for {intf['address']}." - ) + self.job.logger.info(f"Loading interface {intf['interface']} on {dev.name} for {intf['address']}.") try: _ = self.get(self.port, {"name": intf["interface"], "device": dev.name}) except ObjectNotFound: @@ -191,9 +169,7 @@ def load_ip_addresses(self, dev: device): enabled=True, mode="access", mtu=65535, - port_type=cloudvision.get_port_type( - port_info={"interface": intf["interface"]}, transceiver="" - ), + port_type=cloudvision.get_port_type(port_info={"interface": intf["interface"]}, transceiver=""), status="Active", uuid=None, ) @@ -245,9 +221,7 @@ def load_device_tags(self, device): ) dev_tags = [ tag - for tag in cloudvision.get_device_tags( - client=self.conn.comm_channel, device_id=device.serial - ) + for tag in cloudvision.get_device_tags(client=self.conn.comm_channel, device_id=device.serial) if tag in system_tags ] @@ -270,15 +244,12 @@ def load_device_tags(self, device): try: self.add(new_cf) except ObjectAlreadyExists: - self.job.logger.warning( - f"Duplicate tag encountered for {tag['label']} on device {device.name}" - ) + self.job.logger.warning(f"Duplicate tag encountered for {tag['label']} on device {device.name}") def load(self): """Load devices and associated data from CloudVision.""" if APP_SETTINGS.get("aristacv_hostname_patterns") and not ( - APP_SETTINGS.get("aristacv_site_mappings") - and APP_SETTINGS.get("aristacv_role_mappings") + APP_SETTINGS.get("aristacv_site_mappings") and APP_SETTINGS.get("aristacv_role_mappings") ): self.job.logger.warning( "Configuration found for aristacv_hostname_patterns but no aristacv_site_mappings or aristacv_role_mappings. Please ensure your mappings are defined." diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py index 478cf2363..bc5ff64ec 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py @@ -52,9 +52,7 @@ def load_devices(self): ) self.add(new_device) except ObjectAlreadyExists as err: - self.job.logger.warning( - f"Unable to load {dev.name} as it appears to be a duplicate. {err}" - ) + self.job.logger.warning(f"Unable to load {dev.name} as it appears to be a duplicate. {err}") continue self.load_custom_fields(dev=dev) @@ -76,9 +74,7 @@ def load_custom_fields(self, dev: OrmDevice): def load_interfaces(self): """Add Nautobot Interface objects as DiffSync Port models.""" - for intf in OrmInterface.objects.filter( - device__device_type__manufacturer__name="Arista" - ): + for intf in OrmInterface.objects.filter(device__device_type__manufacturer__name="Arista"): new_port = self.port( name=intf.name, device=intf.device.name, @@ -102,9 +98,7 @@ def load_interfaces(self): def load_ip_addresses(self): """Add Nautobot IPAddress objects as DiffSync IPAddress models.""" - for ipaddr in OrmIPAddress.objects.filter( - interfaces__device__device_type__manufacturer__name__in=["Arista"] - ): + for ipaddr in OrmIPAddress.objects.filter(interfaces__device__device_type__manufacturer__name__in=["Arista"]): try: self.get(self.prefix, ipaddr.parent.prefix.with_prefixlen) except ObjectNotFound: @@ -121,9 +115,7 @@ def load_ip_addresses(self): try: self.add(new_ip) except ObjectAlreadyExists as err: - self.job.logger.warning( - f"Unable to load {ipaddr.address} as appears to be a duplicate. {err}" - ) + self.job.logger.warning(f"Unable to load {ipaddr.address} as appears to be a duplicate. {err}") ip_to_intfs = IPAddressToInterface.objects.filter(ip_address=ipaddr) for mapping in ip_to_intfs: new_map = self.ipassignment( @@ -144,12 +136,8 @@ def sync_complete(self, source: DiffSync, *args, **kwargs): """ # if Controller is created we need to ensure all imported Devices have RelationshipAssociation to it. if APP_SETTINGS.get("create_controller"): - self.job.logger.info( - "Creating Relationships between CloudVision and connected Devices." - ) - controller_relation = OrmRelationship.objects.get( - name="Controller -> Device" - ) + self.job.logger.info("Creating Relationships between CloudVision and connected Devices.") + controller_relation = OrmRelationship.objects.get(name="Controller -> Device") device_ct = ContentType.objects.get_for_model(OrmDevice) cvp = OrmDevice.objects.get(name="CloudVision") loaded_devices = source.dict()["device"] diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py index c70b3a7ca..5edc89e98 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py @@ -130,16 +130,10 @@ def create(cls, diffsync, ids, attrs): for device in attrs["devices"]: # Exclude devices that are inactive in CloudVision if device in device_ids: - cvp.assign_tag_to_device( - device_ids[device], ids["name"], attrs["value"] - ) + cvp.assign_tag_to_device(device_ids[device], ids["name"], attrs["value"]) else: - tag = ( - f"{ids['name']}:{attrs['value']}" if attrs["value"] else ids["name"] - ) - diffsync.job.logger.warning( - f"{device} is inactive or missing in CloudVision - skipping for tag: {tag}" - ) + tag = f"{ids['name']}:{attrs['value']}" if attrs["value"] else ids["name"] + diffsync.job.logger.warning(f"{device} is inactive or missing in CloudVision - skipping for tag: {tag}") return super().create(ids=ids, diffsync=diffsync, attrs=attrs) def update(self, attrs): diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py index bccc61a57..d62e37492 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py @@ -33,9 +33,7 @@ LIFECYCLE_MGMT = True except ImportError: - print( - "Device Lifecycle app isn't installed so will revert to CustomField for OS version." - ) + print("Device Lifecycle app isn't installed so will revert to CustomField for OS version.") LIFECYCLE_MGMT = False @@ -68,9 +66,7 @@ def create(cls, diffsync, ids, attrs): else: site = nautobot.verify_site("CloudVision") else: - site = nautobot.verify_site( - APP_SETTINGS.get("aristacv_from_cloudvision_default_site", DEFAULT_SITE) - ) + site = nautobot.verify_site(APP_SETTINGS.get("aristacv_from_cloudvision_default_site", DEFAULT_SITE)) if role_code and role_code in role_map: role = nautobot.verify_device_role_object( @@ -81,24 +77,17 @@ def create(cls, diffsync, ids, attrs): ), ) elif "CloudVision" in ids["name"]: - role = nautobot.verify_device_role_object( - "Controller", DEFAULT_DEVICE_ROLE_COLOR - ) + role = nautobot.verify_device_role_object("Controller", DEFAULT_DEVICE_ROLE_COLOR) else: role = nautobot.verify_device_role_object( - APP_SETTINGS.get( - "aristacv_from_cloudvision_default_device_role", DEFAULT_DEVICE_ROLE - ), + APP_SETTINGS.get("aristacv_from_cloudvision_default_device_role", DEFAULT_DEVICE_ROLE), APP_SETTINGS.get( "aristacv_from_cloudvision_default_device_role_color", DEFAULT_DEVICE_ROLE_COLOR, ), ) - if ( - APP_SETTINGS.get("aristacv_create_controller") - and "CloudVision" in ids["name"] - ): + if APP_SETTINGS.get("aristacv_create_controller") and "CloudVision" in ids["name"]: platform = OrmPlatform.objects.get(name=CLOUDVISION_PLATFORM) else: platform = OrmPlatform.objects.get(name=ARISTA_PLATFORM) @@ -121,12 +110,8 @@ def create(cls, diffsync, ids, attrs): try: new_device.validated_save() if LIFECYCLE_MGMT and attrs.get("version"): - software_lcm = cls._add_software_lcm( - platform=platform.name, version=attrs["version"] - ) - cls._assign_version_to_device( - diffsync=diffsync, device=new_device, software_lcm=software_lcm - ) + software_lcm = cls._add_software_lcm(platform=platform.name, version=attrs["version"]) + cls._assign_version_to_device(diffsync=diffsync, device=new_device, software_lcm=software_lcm) return super().create(ids=ids, diffsync=diffsync, attrs=attrs) except ValidationError as err: diffsync.job.logger.warning(f"Unable to create Device {ids['name']}. {err}") @@ -145,29 +130,19 @@ def update(self, attrs): if "serial" in attrs: dev.serial = attrs["serial"] if "version" in attrs and LIFECYCLE_MGMT: - software_lcm = self._add_software_lcm( - platform=dev.platform.name, version=attrs["version"] - ) - self._assign_version_to_device( - diffsync=self.diffsync, device=dev, software_lcm=software_lcm - ) + software_lcm = self._add_software_lcm(platform=dev.platform.name, version=attrs["version"]) + self._assign_version_to_device(diffsync=self.diffsync, device=dev, software_lcm=software_lcm) try: dev.validated_save() return super().update(attrs) except ValidationError as err: - self.diffsync.job.logger.warning( - f"Unable to update Device {self.name}. {err}" - ) + self.diffsync.job.logger.warning(f"Unable to update Device {self.name}. {err}") return None def delete(self): """Delete device object in Nautobot.""" - if APP_SETTINGS.get( - "aristacv_delete_devices_on_sync", DEFAULT_DELETE_DEVICES_ON_SYNC - ): - self.diffsync.job.logger.warning( - f"Device {self.name} will be deleted per app settings." - ) + if APP_SETTINGS.get("aristacv_delete_devices_on_sync", DEFAULT_DELETE_DEVICES_ON_SYNC): + self.diffsync.job.logger.warning(f"Device {self.name} will be deleted per app settings.") device = OrmDevice.objects.get(id=self.uuid) device.delete() super().delete() @@ -260,9 +235,7 @@ def update(self, attrs): _port.validated_save() return super().update(attrs) except ValidationError as err: - self.diffsync.job.logger.warning( - f"Unable to update port {self.name} for {self.device} with {attrs}: {err}" - ) + self.diffsync.job.logger.warning(f"Unable to update port {self.name} for {self.device} with {attrs}: {err}") return None def delete(self): @@ -270,9 +243,7 @@ def delete(self): if APP_SETTINGS.get("delete_devices_on_sync"): super().delete() if self.diffsync.job.debug: - self.diffsync.job.logger.warning( - f"Interface {self.name} for {self.device} will be deleted." - ) + self.diffsync.job.logger.warning(f"Interface {self.name} for {self.device} will be deleted.") _port = OrmInterface.objects.get(id=self.uuid) _port.delete() return self @@ -319,9 +290,7 @@ def create(cls, diffsync, ids, attrs): """Create IPAddressToInterface in Nautobot.""" try: ipaddr = OrmIPAddress.objects.get(address=ids["address"]) - intf = OrmInterface.objects.get( - name=ids["interface"], device__name=ids["device"] - ) + intf = OrmInterface.objects.get(name=ids["interface"], device__name=ids["device"]) new_map = IPAddressToInterface(ip_address=ipaddr, interface=intf) new_map.validated_save() if attrs.get("primary"): @@ -332,9 +301,7 @@ def create(cls, diffsync, ids, attrs): intf.device.validated_save() return super().create(ids=ids, diffsync=diffsync, attrs=attrs) except OrmInterface.DoesNotExist as err: - diffsync.job.logger.warning( - f"Unable to find Interface {ids['interface']} for {ids['device']}. {err}" - ) + diffsync.job.logger.warning(f"Unable to find Interface {ids['interface']} for {ids['device']}. {err}") def update(self, attrs): """Update IPAddressToInterface in Nautobot.""" From ba1429a09b1aa3dd5956b6432d852c626267455e Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 9 Jan 2024 14:34:06 -0600 Subject: [PATCH 08/47] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20typo?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/aristacv/diffsync/adapters/nautobot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py index bc5ff64ec..794c6ea2c 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py @@ -31,7 +31,7 @@ class NautobotAdapter(DiffSync): ipassignment = NautobotIPAssignment cf = NautobotCustomField - top_level = ["device", "prefix", "ipaddr", "ipassigment", "cf"] + top_level = ["device", "prefix", "ipaddr", "ipassignment", "cf"] def __init__(self, *args, job=None, **kwargs): """Initialize the Nautobot DiffSync adapter.""" From 25969e123a727fc459bd7e589a82b0c8a011fba5 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 9 Jan 2024 16:08:11 -0600 Subject: [PATCH 09/47] =?UTF-8?q?test:=20=E2=9C=85=20Update=20test=20to=20?= =?UTF-8?q?account=20for=20DiffSync=20model=20changes.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../tests/aristacv/test_cloudvision_adapter.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/nautobot_ssot/tests/aristacv/test_cloudvision_adapter.py b/nautobot_ssot/tests/aristacv/test_cloudvision_adapter.py index 84bef29e8..0a6675c5d 100644 --- a/nautobot_ssot/tests/aristacv/test_cloudvision_adapter.py +++ b/nautobot_ssot/tests/aristacv/test_cloudvision_adapter.py @@ -1,9 +1,12 @@ """Unit tests for the Cloudvision DiffSync adapter class.""" +import ipaddress from unittest.mock import MagicMock, patch from nautobot.extras.models import JobResult from nautobot.core.testing import TransactionTestCase -from nautobot_ssot.integrations.aristacv.diffsync.adapters.cloudvision import CloudvisionAdapter +from nautobot_ssot.integrations.aristacv.diffsync.adapters.cloudvision import ( + CloudvisionAdapter, +) from nautobot_ssot.integrations.aristacv.jobs import CloudVisionDataSource from nautobot_ssot.tests.aristacv.fixtures import fixtures @@ -50,7 +53,10 @@ def setUp(self): ) def test_load_devices(self): """Test the load_devices() adapter method.""" - with patch("nautobot_ssot.integrations.aristacv.utils.cloudvision.get_devices", self.cloudvision.get_devices): + with patch( + "nautobot_ssot.integrations.aristacv.utils.cloudvision.get_devices", + self.cloudvision.get_devices, + ): with patch( "nautobot_ssot.integrations.aristacv.utils.cloudvision.get_device_type", self.cloudvision.get_device_type, @@ -75,7 +81,8 @@ def test_load_interfaces(self): mock_device.device_model.return_value = "DCS-7280CR2-60" with patch( - "nautobot_ssot.integrations.aristacv.utils.cloudvision.get_device_type", self.cloudvision.get_device_type + "nautobot_ssot.integrations.aristacv.utils.cloudvision.get_device_type", + self.cloudvision.get_device_type, ): with patch( "nautobot_ssot.integrations.aristacv.utils.cloudvision.get_interfaces_fixed", @@ -116,6 +123,9 @@ def test_load_ip_addresses(self): ): self.cvp.load_ip_addresses(dev=mock_device) self.assertEqual( - {f"{ipaddr['address']}__mock_device__{ipaddr['interface']}" for ipaddr in fixtures.IP_INTF_FIXTURE}, + { + f"{ipaddr['address']}__{ipaddress.ip_interface(ipaddr['address']).network.with_prefixlen}" + for ipaddr in fixtures.IP_INTF_FIXTURE + }, {ipaddr.get_unique_id() for ipaddr in self.cvp.get_all("ipaddr")}, ) From 6e80c1483284257826e9092ed127ead77ec921f5 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 9 Jan 2024 16:21:20 -0600 Subject: [PATCH 10/47] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Specify=20parent=20?= =?UTF-8?q?prefix=20when=20creating=20IPAddress?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py index d62e37492..590fc20e2 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py @@ -274,6 +274,7 @@ def create(cls, diffsync, ids, attrs): """Create IPAddress in Nautobot.""" new_ip = OrmIPAddress( address=ids["address"], + parent=OrmPrefix.objects.get(prefix=ids["prefix"], namespace=Namespace.objects.get(name="Global")), status=OrmStatus.objects.get(name="Active"), ) if "loopback" in ids["interface"]: From 8f10fd6696fdd5976bb5aac9d245b0015db071be Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 9 Jan 2024 16:36:42 -0600 Subject: [PATCH 11/47] =?UTF-8?q?fix:=20=F0=9F=90=9B=20It=20should=20be=20?= =?UTF-8?q?just=20status,=20not=20status=5Fid?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py index 590fc20e2..a8218cfe4 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py @@ -260,7 +260,7 @@ def create(cls, diffsync, ids, attrs): _pf = OrmPrefix( prefix=ids["prefix"], namespace=Namespace.objects.get(name="Global"), - status_id=OrmStatus.objects.get(name="Active"), + status=OrmStatus.objects.get(name="Active"), ) _pf.validated_save() return super().create(diffsync=diffsync, ids=ids, attrs=attrs) From 2924e7a9e41f741ab80c15c3e19dc480660da856 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Tue, 9 Jan 2024 20:08:05 -0600 Subject: [PATCH 12/47] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Move=20loopback=20t?= =?UTF-8?q?ype=20setting=20to=20IPAssignment.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/aristacv/diffsync/models/nautobot.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py index a8218cfe4..d6e44bc95 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py @@ -277,8 +277,6 @@ def create(cls, diffsync, ids, attrs): parent=OrmPrefix.objects.get(prefix=ids["prefix"], namespace=Namespace.objects.get(name="Global")), status=OrmStatus.objects.get(name="Active"), ) - if "loopback" in ids["interface"]: - new_ip.role = "loopback" new_ip.validated_save() return super().create(ids=ids, diffsync=diffsync, attrs=attrs) @@ -293,6 +291,9 @@ def create(cls, diffsync, ids, attrs): ipaddr = OrmIPAddress.objects.get(address=ids["address"]) intf = OrmInterface.objects.get(name=ids["interface"], device__name=ids["device"]) new_map = IPAddressToInterface(ip_address=ipaddr, interface=intf) + if "loopback" in ids["interface"]: + ipaddr.role = "loopback" + ipaddr.validated_save() new_map.validated_save() if attrs.get("primary"): if ":" in ids["address"]: From c9078aa9aed194a43a2133e442db2b112a7afbfa Mon Sep 17 00:00:00 2001 From: Jan Snasel Date: Wed, 10 Jan 2024 10:05:50 +0000 Subject: [PATCH 13/47] feat: Ability to store `set()`s diffs in Sync --- nautobot_ssot/models.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/models.py b/nautobot_ssot/models.py index 48a54255d..230951f4d 100644 --- a/nautobot_ssot/models.py +++ b/nautobot_ssot/models.py @@ -37,6 +37,16 @@ from .choices import SyncLogEntryActionChoices, SyncLogEntryStatusChoices +class DiffJSONEncoder(DjangoJSONEncoder): + """Custom JSON encoder for the Sync.diff field.""" + + def default(self, o): + """Custom JSON encoder for the Sync.diff field.""" + if isinstance(o, set): + return self.encode(list(o)) + return super().default(o) + + @extras_features( "custom_links", ) @@ -67,7 +77,7 @@ class Sync(BaseModel): # pylint: disable=nb-string-field-blank-null dry_run = models.BooleanField( default=False, help_text="Report what data would be synced but do not make any changes" ) - diff = models.JSONField(blank=True, encoder=DjangoJSONEncoder) + diff = models.JSONField(blank=True, encoder=DiffJSONEncoder) summary = models.JSONField(blank=True, null=True) job_result = models.ForeignKey(to=JobResult, on_delete=models.PROTECT, blank=True, null=True) @@ -156,7 +166,7 @@ class SyncLogEntry(BaseModel): # pylint: disable=nb-string-field-blank-null action = models.CharField(max_length=32, choices=SyncLogEntryActionChoices) status = models.CharField(max_length=32, choices=SyncLogEntryStatusChoices) - diff = models.JSONField(blank=True, null=True, encoder=DjangoJSONEncoder) + diff = models.JSONField(blank=True, null=True, encoder=DiffJSONEncoder) synced_object_type = models.ForeignKey( to=ContentType, From 51b01294f300e32870c72456b6ced34003a91a27 Mon Sep 17 00:00:00 2001 From: Jan Snasel Date: Wed, 10 Jan 2024 10:19:53 +0000 Subject: [PATCH 14/47] chore: Make migrations --- .../migrations/0008_auto_20240110_1019.py | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 nautobot_ssot/migrations/0008_auto_20240110_1019.py diff --git a/nautobot_ssot/migrations/0008_auto_20240110_1019.py b/nautobot_ssot/migrations/0008_auto_20240110_1019.py new file mode 100644 index 000000000..9a74b2e3d --- /dev/null +++ b/nautobot_ssot/migrations/0008_auto_20240110_1019.py @@ -0,0 +1,24 @@ +# Generated by Django 3.2.21 on 2024-01-10 10:19 + +from django.db import migrations, models +import nautobot_ssot.models + + +class Migration(migrations.Migration): + + dependencies = [ + ('nautobot_ssot', '0007_replace_dashed_custom_fields'), + ] + + operations = [ + migrations.AlterField( + model_name='sync', + name='diff', + field=models.JSONField(blank=True, encoder=nautobot_ssot.models.DiffJSONEncoder), + ), + migrations.AlterField( + model_name='synclogentry', + name='diff', + field=models.JSONField(blank=True, encoder=nautobot_ssot.models.DiffJSONEncoder, null=True), + ), + ] From 07334ec610eaf3b55f2231a87a2c84ec56cf2e06 Mon Sep 17 00:00:00 2001 From: Jan Snasel Date: Wed, 10 Jan 2024 10:22:45 +0000 Subject: [PATCH 15/47] fix: Black --- nautobot_ssot/migrations/0008_auto_20240110_1019.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/nautobot_ssot/migrations/0008_auto_20240110_1019.py b/nautobot_ssot/migrations/0008_auto_20240110_1019.py index 9a74b2e3d..96c6e3663 100644 --- a/nautobot_ssot/migrations/0008_auto_20240110_1019.py +++ b/nautobot_ssot/migrations/0008_auto_20240110_1019.py @@ -5,20 +5,19 @@ class Migration(migrations.Migration): - dependencies = [ - ('nautobot_ssot', '0007_replace_dashed_custom_fields'), + ("nautobot_ssot", "0007_replace_dashed_custom_fields"), ] operations = [ migrations.AlterField( - model_name='sync', - name='diff', + model_name="sync", + name="diff", field=models.JSONField(blank=True, encoder=nautobot_ssot.models.DiffJSONEncoder), ), migrations.AlterField( - model_name='synclogentry', - name='diff', + model_name="synclogentry", + name="diff", field=models.JSONField(blank=True, encoder=nautobot_ssot.models.DiffJSONEncoder, null=True), ), ] From af53c81384ff171c222668666fe696fc27227529 Mon Sep 17 00:00:00 2001 From: Leo Kirchner Date: Tue, 7 Nov 2023 17:45:28 +0100 Subject: [PATCH 16/47] introduces custom relationships for the contrib module --- docs/user/modeling.md | 4 +- nautobot_ssot/contrib.py | 448 ++++++++++++++++++++++------ nautobot_ssot/tests/test_contrib.py | 141 ++++++++- 3 files changed, 505 insertions(+), 88 deletions(-) diff --git a/docs/user/modeling.md b/docs/user/modeling.md index e659c48c5..8bcca9347 100644 --- a/docs/user/modeling.md +++ b/docs/user/modeling.md @@ -7,12 +7,12 @@ This page describes how to model various kinds of fields on a `nautobot_ssot.con The following table describes in brief the different types of model fields and how they are handled. | Type of field | Field name | Notes | Applies to | -| -------------------------------------------------- | ------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------ | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +|----------------------------------------------------|---------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | [Normal fields](#normal-fields) | Has to match ORM exactly | Make sure that the name matches the name in the ORM model. | Fields that are neither custom fields nor relations | | [Custom fields](#custom-fields) | Field name doesn't matter | Use `nautobot_ssot.contrib.CustomFieldAnnotation` | [Nautobot custom fields](https://docs.nautobot.com/projects/core/en/stable/user-guides/custom-fields/?h=custom+fields) | | [*-to-one relationships](#-to-one-relationships) | Django lookup syntax | See [here](https://docs.djangoproject.com/en/3.2/topics/db/queries/#lookups-that-span-relationships) - your model fields need to use this syntax | `django.db.models.OneToOneField`, `django.db.models.ForeignKey`, `django.contrib.contenttypes.fields.GenericForeignKey` | | [*-to-many relationships](#-to-many-relationships) | Has to match ORM exactly | In case of a generic foreign key see [here](#special-case-generic-foreign-key) | `django.db.models.ManyToManyField`, `django.contrib.contenttypes.fields.GenericRelation`, `django.db.models.ForeignKey` [backwards](https://docs.djangoproject.com/en/3.2/topics/db/queries/#backwards-related-objects) | -| Custom Relationships | n/a | Not yet supported | https://docs.nautobot.com/projects/core/en/stable/models/extras/relationship/ | +| Custom Relationships | Field name doesn't matter | Use `nautobot_ssot.contrib.CustomRelationshipAnnotation` | https://docs.nautobot.com/projects/core/en/stable/models/extras/relationship/ | ## Normal Fields diff --git a/nautobot_ssot/contrib.py b/nautobot_ssot/contrib.py index be932b470..5d71a21cf 100644 --- a/nautobot_ssot/contrib.py +++ b/nautobot_ssot/contrib.py @@ -4,15 +4,55 @@ from collections import defaultdict from dataclasses import dataclass +from enum import Enum import pydantic from diffsync import DiffSyncModel, DiffSync from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError, MultipleObjectsReturned from django.db.models import Model +from nautobot.extras.models import Relationship, RelationshipAssociation from typing_extensions import get_type_hints +class RelationshipSideEnum(Enum): + """This details which side of a custom relationship the model it's defined on is on.""" + + SOURCE = "SOURCE" + DESTINATION = "DESTINATION" + + +@dataclass +class CustomRelationshipAnnotation: + """Map a model field to an arbitrary custom relationship. + + For usage with `typing.Annotated`. + + This exists to map model fields to their corresponding relationship fields. All different types of relationships + then work exactly the same as they normally do, just that you have to annotate the field(s) that belong(s) to the + relationship. + + Example: + Given a custom relationship called "Circuit provider to tenant": + ```python + class ProviderModel(NautobotModel): + _model: Provider + _identifiers = ("name",) + _attributes = ("tenant__name",) + + tenant__name = Annotated[ + str, + CustomRelationshipAnnotation(name="Circuit provider to tenant", side=RelationshipSideEnum.SOURCE) + ] + + This then identifies the tenant to relate the provider to through its `name` field as well as the relationship + name. + """ + + name: str + side: RelationshipSideEnum + + @dataclass class CustomFieldAnnotation: """Map a model field to an arbitrary custom field name. @@ -57,6 +97,10 @@ def __init__(self, *args, job, sync=None, **kwargs): self.job = job self.sync = sync + # Caches lookups to custom relationships. + # TODO: Once caching is in, replace this cache with it. + self.custom_relationship_cache = {} + @staticmethod def _get_parameter_names(diffsync_model): """Ignore the differences between identifiers and attributes, because at this point they don't matter to us.""" @@ -68,48 +112,66 @@ def _load_objects(self, diffsync_model): for database_object in diffsync_model._get_queryset(): self._load_single_object(database_object, diffsync_model, parameter_names) - def _load_single_object(self, database_object, diffsync_model, parameter_names): - """Load a single diffsync object from a single database object.""" - parameters = {} + def _handle_single_parameter(self, parameters, parameter_name, database_object, diffsync_model): type_hints = get_type_hints(diffsync_model, include_extras=True) - for parameter_name in parameter_names: - # Handling of foreign keys where the local side is the many and the remote side the one. - # Note: This includes the side of a generic foreign key that has the foreign key, i.e. - # the 'many' side. - if "__" in parameter_name: + # Handle custom fields and custom relationships. See CustomFieldAnnotation and CustomRelationshipAnnotation + # docstrings for more details. + is_custom_field = False + custom_relationship_annotation = None + metadata_for_this_field = getattr(type_hints[parameter_name], "__metadata__", []) + for metadata in metadata_for_this_field: + if isinstance(metadata, CustomFieldAnnotation): + if metadata.name in database_object.cf: + parameters[parameter_name] = database_object.cf[metadata.name] + is_custom_field = True + break + if isinstance(metadata, CustomRelationshipAnnotation): + custom_relationship_annotation = metadata + break + if is_custom_field: + return + + # Handling of foreign keys where the local side is the many and the remote side the one. + # Note: This includes the side of a generic foreign key that has the foreign key, i.e. + # the 'many' side. + if "__" in parameter_name: + if custom_relationship_annotation: + parameters[parameter_name] = self._handle_custom_relationship_foreign_key( + database_object, parameter_name, custom_relationship_annotation + ) + else: parameters[parameter_name] = self._handle_foreign_key(database_object, parameter_name) - continue + return - # Handle custom fields. See CustomFieldAnnotation docstring for more details. - is_custom_field = False - metadata_for_this_field = getattr(type_hints[parameter_name], "__metadata__", []) - for metadata in metadata_for_this_field: - if isinstance(metadata, CustomFieldAnnotation): - if metadata.name in database_object.cf: - parameters[parameter_name] = database_object.cf[metadata.name] - is_custom_field = True - break - if is_custom_field: - continue + # Handling of one- and many-to custom relationship fields: + if custom_relationship_annotation: + parameters[parameter_name] = self._handle_custom_relationship_to_many_relationship( + database_object, diffsync_model, parameter_name, custom_relationship_annotation + ) + return - database_field = diffsync_model._model._meta.get_field(parameter_name) + database_field = diffsync_model._model._meta.get_field(parameter_name) - # Handling of one- and many-to-many fields. - # Note: This includes the side of a generic foreign key that constitues the foreign key, - # i.e. the 'one' side. - if database_field.many_to_many or database_field.one_to_many: - parameters[parameter_name] = self._handle_to_many_relationship( - database_object, diffsync_model, parameter_name - ) - continue + # Handling of one- and many-to-many non-custom relationship fields. + # Note: This includes the side of a generic foreign key that constitutes the foreign key, + # i.e. the 'one' side. + if database_field.many_to_many or database_field.one_to_many: + parameters[parameter_name] = self._handle_to_many_relationship( + database_object, diffsync_model, parameter_name + ) + return - # Handling of normal fields - as this is the default case, set the attribute directly. - if hasattr(self, f"load_param_{parameter_name}"): - parameters[parameter_name] = getattr(self, f"load_param_{parameter_name}")( - parameter_name, database_object - ) - else: - parameters[parameter_name] = getattr(database_object, parameter_name) + # Handling of normal fields - as this is the default case, set the attribute directly. + if hasattr(self, f"load_param_{parameter_name}"): + parameters[parameter_name] = getattr(self, f"load_param_{parameter_name}")(parameter_name, database_object) + else: + parameters[parameter_name] = getattr(database_object, parameter_name) + + def _load_single_object(self, database_object, diffsync_model, parameter_names): + """Load a single diffsync object from a single database object.""" + parameters = {} + for parameter_name in parameter_names: + self._handle_single_parameter(parameters, parameter_name, database_object, diffsync_model) try: diffsync_model = diffsync_model(**parameters) except pydantic.ValidationError as error: @@ -151,6 +213,64 @@ def _get_diffsync_class(self, model_name): ) from error return diffsync_model + def _handle_custom_relationship_to_many_relationship( + self, database_object, diffsync_model, parameter_name, annotation + ): + # Introspect type annotations to deduce which fields are of interest + # for this many-to-many relationship. + diffsync_field_type = diffsync_model.__annotations__[parameter_name] + # TODO: Why is this different then in the normal case?? + inner_type = diffsync_field_type.__dict__["__args__"][0].__dict__["__args__"][0] + related_objects_list = [] + # TODO: Allow for filtering, i.e. not taking into account all the objects behind the relationship. + relationship = Relationship.objects.get(label=annotation.name) + relationship_association_parameters = self._construct_relationship_association_parameters( + annotation, database_object + ) + relationship_associations = RelationshipAssociation.objects.filter(**relationship_association_parameters) + + field_name = "" + field_name += "source" if annotation.side == RelationshipSideEnum.DESTINATION else "destination" + field_name += "_" + field_name += ( + relationship.source_type.app_label.lower() + if annotation.side == RelationshipSideEnum.DESTINATION + else relationship.destination_type.app_label.lower() + ) + field_name += "_" + field_name += ( + relationship.source_type.model.lower() + if annotation.side == RelationshipSideEnum.DESTINATION + else relationship.destination_type.model.lower() + ) + + for association in relationship_associations: + related_object = getattr( + association, "source" if annotation.side == RelationshipSideEnum.DESTINATION else "destination" + ) + dictionary_representation = { + field_name: getattr(related_object, field_name) for field_name in inner_type.__annotations__ + } + # Only use those where there is a single field defined, all 'None's will not help us. + if any(dictionary_representation.values()): + related_objects_list.append(dictionary_representation) + return related_objects_list + + def _construct_relationship_association_parameters(self, annotation, database_object): + relationship = self.custom_relationship_cache.get( + annotation.name, Relationship.objects.get(label=annotation.name) + ) + relationship_association_parameters = { + "relationship": relationship, + "source_type": relationship.source_type, + "destination_type": relationship.destination_type, + } + if annotation.side == RelationshipSideEnum.SOURCE: + relationship_association_parameters["source_id"] = database_object.id + else: + relationship_association_parameters["destination_id"] = database_object.id + return relationship_association_parameters + @staticmethod def _handle_to_many_relationship(database_object, diffsync_model, parameter_name): """Handle a single one- or many-to-many relationship field. @@ -216,6 +336,30 @@ class NautobotInterface(NautobotModel): related_objects_list.append(dictionary_representation) return related_objects_list + def _handle_custom_relationship_foreign_key( + self, database_object, parameter_name: str, annotation: CustomRelationshipAnnotation + ): + """Handle a single custom relationship foreign key field.""" + relationship_association_parameters = self._construct_relationship_association_parameters( + annotation, database_object + ) + + relationship_association = RelationshipAssociation.objects.filter(**relationship_association_parameters) + amount_of_relationship_associations = relationship_association.count() + if amount_of_relationship_associations == 0: + return None + if amount_of_relationship_associations == 1: + association = relationship_association.first() + related_object = getattr( + association, "source" if annotation.side == RelationshipSideEnum.DESTINATION else "destination" + ) + # Discard the first part as there is no actual field on the model corresponding to that part. + _, *lookups = parameter_name.split("__") + for lookup in lookups[:-1]: + related_object = getattr(related_object, lookup) + return getattr(related_object, lookups[-1]) + raise ValueError("Foreign key custom relationship matched two associations - this shouldn't happen.") + @staticmethod def _handle_foreign_key(database_object, parameter_name): """Handle a single foreign key field. @@ -266,7 +410,10 @@ class NautobotModel(DiffSyncModel): @classmethod def _get_queryset(cls): """Get the queryset used to load the models data from Nautobot.""" - parameter_names = list(cls._identifiers) + list(cls._attributes) + available_fields = {field.name for field in cls._model._meta.get_fields()} + parameter_names = [ + parameter for parameter in list(cls._identifiers) + list(cls._attributes) if parameter in available_fields + ] # Here we identify any foreign keys (i.e. fields with '__' in them) so that we can load them directly in the # first query if this function hasn't been overridden. prefetch_related_parameters = [parameter.split("__")[0] for parameter in parameter_names if "__" in parameter] @@ -285,13 +432,16 @@ def _check_field(cls, name): raise ValueError(f"Field {name} is not defined on the model.") def get_from_db(self): - """Get the ORM object for this diffsync object from the database using the identifiers.""" + """Get the ORM object for this diffsync object from the database using the identifiers. + + TODO: Currently I don't think this works for custom fields, therefore those can't be identifiers. + """ return self._model.objects.get(**self.get_identifiers()) def update(self, attrs): """Update the ORM object corresponding to this diffsync object.""" obj = self.get_from_db() - self._update_obj_with_parameters(obj, attrs) + self._update_obj_with_parameters(obj, attrs, self.diffsync) return super().update(attrs) def delete(self): @@ -310,65 +460,113 @@ def create(cls, diffsync, ids, attrs): # This is in fact callable, because it is a model obj = cls._model() # pylint: disable=not-callable - cls._update_obj_with_parameters(obj, parameters) + cls._update_obj_with_parameters(obj, parameters, diffsync) return super().create(diffsync, ids, attrs) @classmethod - def _update_obj_with_parameters(cls, obj, parameters): - """Update a given Nautobot ORM object with the given parameters.""" - # Example: {"group": {"name": "Group Name", "_model_class": TenantGroup}} - foreign_keys = defaultdict(dict) - - # Example: {"tags": [Tag-1, Tag-2]} - many_to_many_fields = defaultdict(list) - + def _handle_single_field( + cls, field, obj, value, relationship_fields, diffsync + ): # pylint: disable=too-many-arguments + """Set a single field on a Django object to a given value, or, for relationship fields, prepare setting. + + :param field: The name of the field to set. + :param obj: The Django ORM object to set the field on. + :param value: The value to set the field to. + :param relationship_fields: Helper dictionary containing information on relationship fields. + This is mutated over the course of this function. + :param diffsync: The related diffsync adapter used for looking up things in the cache. + """ # Use type hints at runtime to determine which fields are custom fields type_hints = get_type_hints(cls, include_extras=True) - for field, value in parameters.items(): - cls._check_field(field) - - # Prepare handling of foreign keys. - # Example: If field is `tenant__group__name`, then - # `foreign_keys["tenant"]["group__name"] = value` - # Also, the model class will be added to the dictionary, so we can later use it - # for querying: - # `foreign_keys["tenant"]["_model_class"] = nautobot.tenancy.models.Tenant - if "__" in field: - related_model, lookup = field.split("__", maxsplit=1) + cls._check_field(field) + + # Handle custom fields. See CustomFieldAnnotation docstring for more details. + custom_relationship_annotation = None + metadata_for_this_field = getattr(type_hints[field], "__metadata__", []) + for metadata in metadata_for_this_field: + if isinstance(metadata, CustomFieldAnnotation): + obj.cf[metadata.name] = value + return + if isinstance(metadata, CustomRelationshipAnnotation): + custom_relationship_annotation = metadata + break + + # Prepare handling of foreign keys and custom relationship foreign keys. + # Example: If field is `tenant__group__name`, then + # `foreign_keys["tenant"]["group__name"] = value` or + # `custom_relationship_foreign_keys["tenant"]["group__name"] = value` + # Also, the model class will be added to the dictionary for normal foreign keys, so we can later use it + # for querying: + # `foreign_keys["tenant"]["_model_class"] = nautobot.tenancy.models.Tenant + # For custom relationship foreign keys, we add the annotation instead: + # `custom_relationship_foreign_keys["tenant"]["_annotation"] = CustomRelationshipAnnotation(...) + if "__" in field: + related_model, lookup = field.split("__", maxsplit=1) + # Custom relationship foreign keys + if custom_relationship_annotation: + relationship_fields["custom_relationship_foreign_keys"][related_model][lookup] = value + relationship_fields["custom_relationship_foreign_keys"][related_model][ + "_annotation" + ] = custom_relationship_annotation + # Normal foreign keys + else: django_field = cls._model._meta.get_field(related_model) - foreign_keys[related_model][lookup] = value + relationship_fields["foreign_keys"][related_model][lookup] = value # Add a special key to the dictionary to point to the related model's class - foreign_keys[related_model]["_model_class"] = django_field.related_model - continue + relationship_fields["foreign_keys"][related_model]["_model_class"] = django_field.related_model + return + + # Prepare handling of custom relationship many-to-many fields. + if custom_relationship_annotation: + relationship = diffsync.custom_relationship_cache.get( + custom_relationship_annotation.name, + Relationship.objects.get(label=custom_relationship_annotation.name), + ) + if custom_relationship_annotation.side == RelationshipSideEnum.DESTINATION: + related_object_content_type = relationship.source_type + else: + related_object_content_type = relationship.destination_type + relationship_fields["custom_relationship_many_to_many_fields"][field] = { + "objects": [ + related_object_content_type.model_class().objects.get(**parameters) for parameters in value + ], + "annotation": custom_relationship_annotation, + } + return - # Handle custom fields. See CustomFieldAnnotation docstring for more details. - is_custom_field = False - metadata_for_this_field = getattr(type_hints[field], "__metadata__", []) - for metadata in metadata_for_this_field: - if isinstance(metadata, CustomFieldAnnotation): - obj.cf[metadata.name] = value - is_custom_field = True - continue - if is_custom_field: - continue + django_field = cls._model._meta.get_field(field) - django_field = cls._model._meta.get_field(field) + # Prepare handling of many-to-many fields. If we are dealing with a many-to-many field, + # we get all the related objects here to later set them once the object has been saved. + if django_field.many_to_many or django_field.one_to_many: + relationship_fields["many_to_many_fields"][field] = [ + django_field.related_model.objects.get(**parameters) for parameters in value + ] + return - # Prepare handling of many-to-many fields. If we are dealing with a many-to-many field, - # we get all the related objects here to later set them once the object has been saved. - if django_field.many_to_many or django_field.one_to_many: - many_to_many_fields[field] = [ - django_field.related_model.objects.get(**parameters) for parameters in value - ] - continue + # As the default case, just set the attribute directly + setattr(obj, field, value) - # As the default case, just set the attribute directly - setattr(obj, field, value) + @classmethod + def _update_obj_with_parameters(cls, obj, parameters, diffsync): + """Update a given Nautobot ORM object with the given parameters.""" + relationship_fields = { + # Example: {"group": {"name": "Group Name", "_model_class": TenantGroup}} + "foreign_keys": defaultdict(dict), + # Example: {"tags": [Tag-1, Tag-2]} + "many_to_many_fields": defaultdict(list), + # Example: TODO + "custom_relationship_foreign_keys": defaultdict(dict), + # Example: TODO + "custom_relationship_many_to_many_fields": defaultdict(dict), + } + for field, value in parameters.items(): + cls._handle_single_field(field, obj, value, relationship_fields, diffsync) # Set foreign keys - cls._lookup_and_set_foreign_keys(foreign_keys, obj) + cls._lookup_and_set_foreign_keys(relationship_fields["foreign_keys"], obj) # Save the object to the database try: @@ -376,8 +574,58 @@ def _update_obj_with_parameters(cls, obj, parameters): except ValidationError as error: raise ValidationError(f"Parameters: {parameters}") from error - # Set many-to-many fields after saving - cls._set_many_to_many_fields(many_to_many_fields, obj) + # Handle relationship association creation. This needs to be after object creation, because relationship + # association objects rely on both sides already existing. + cls._lookup_and_set_custom_relationship_foreign_keys( + relationship_fields["custom_relationship_foreign_keys"], obj, diffsync + ) + cls._set_custom_relationship_to_many_fields( + relationship_fields["custom_relationship_many_to_many_fields"], obj, diffsync + ) + + # Set many-to-many fields after saving. + cls._set_many_to_many_fields(relationship_fields["many_to_many_fields"], obj) + + @classmethod + def _set_custom_relationship_to_many_fields(cls, custom_relationship_many_to_many_fields, obj, diffsync): + for _, dictionary in custom_relationship_many_to_many_fields.items(): + annotation = dictionary.pop("annotation") + objects = dictionary.pop("objects") + # TODO: Deduplicate this code + relationship = diffsync.custom_relationship_cache.get( + annotation.name, Relationship.objects.get(label=annotation.name) + ) + parameters = { + "relationship": relationship, + "source_type": relationship.source_type, + "destination_type": relationship.destination_type, + } + associations = [] + if annotation.side == RelationshipSideEnum.SOURCE: + parameters["source_id"] = obj.id + for object_to_relate in objects: + try: + association = RelationshipAssociation.objects.get( + **parameters, destination_id=object_to_relate.id + ) + except RelationshipAssociation.DoesNotExist: + association = RelationshipAssociation(**parameters, destination_id=object_to_relate.id) + association.validated_save() + associations.append(association) + else: + parameters["destination_id"] = obj.id + for object_to_relate in objects: + try: + association = RelationshipAssociation.objects.get(**parameters, source_id=object_to_relate.id) + except RelationshipAssociation.DoesNotExist: + association = RelationshipAssociation(**parameters, source_id=object_to_relate.id) + association.validated_save() + associations.append(association) + # Now we need to clean up any associations that we're not `get_or_create`'d in order to achieve + # declarativeness. + for existing_association in RelationshipAssociation.objects.filter(**parameters): + if existing_association not in associations: + existing_association.delete() @classmethod def _set_many_to_many_fields(cls, many_to_many_fields, obj): @@ -396,6 +644,36 @@ def _set_many_to_many_fields(cls, many_to_many_fields, obj): many_to_many_field = getattr(obj, field_name) many_to_many_field.set(related_objects) + @classmethod + def _lookup_and_set_custom_relationship_foreign_keys(cls, custom_relationship_foreign_keys, obj, diffsync): + for _, related_model_dict in custom_relationship_foreign_keys.items(): + annotation = related_model_dict.pop("_annotation") + # TODO: Deduplicate this code + relationship = diffsync.custom_relationship_cache.get( + annotation.name, Relationship.objects.get(label=annotation.name) + ) + parameters = { + "relationship": relationship, + "source_type": relationship.source_type, + "destination_type": relationship.destination_type, + } + if annotation.side == RelationshipSideEnum.SOURCE: + parameters["source_id"] = obj.id + RelationshipAssociation.objects.update_or_create( + **parameters, + defaults={ + "destination_id": relationship.destination_type.model_class() + .objects.get(**related_model_dict) + .id + }, + ) + else: + parameters["destination_id"] = obj.id + RelationshipAssociation.objects.update_or_create( + **parameters, + defaults={"source_id": relationship.source_type.model_class().objects.get(**related_model_dict).id}, + ) + @classmethod def _lookup_and_set_foreign_keys(cls, foreign_keys, obj): """ diff --git a/nautobot_ssot/tests/test_contrib.py b/nautobot_ssot/tests/test_contrib.py index 537f4f66a..672034bf9 100644 --- a/nautobot_ssot/tests/test_contrib.py +++ b/nautobot_ssot/tests/test_contrib.py @@ -7,7 +7,8 @@ from nautobot.circuits.models import Provider from nautobot.dcim.choices import InterfaceTypeChoices from nautobot.dcim.models import LocationType, Location, Manufacturer, DeviceType, Device, Interface -from nautobot.extras.models import Tag, Status, CustomField, Role +from nautobot.extras.choices import RelationshipTypeChoices +from nautobot.extras.models import Tag, Status, CustomField, Role, Relationship, RelationshipAssociation from nautobot.ipam.models import Prefix, IPAddress, Namespace from nautobot.tenancy.models import Tenant, TenantGroup from nautobot.core.testing import TestCase @@ -17,6 +18,8 @@ NautobotModel, NautobotAdapter, CustomFieldAnnotation, + CustomRelationshipAnnotation, + RelationshipSideEnum, ) @@ -534,6 +537,142 @@ def test_generic_relation_add_backwards(self): self.assertEqual(self.prefix, nautobot_ip_address.parent) +class TenantModelCustomRelationship(NautobotModel): + """Tenant model for testing custom relationship support.""" + + _model = Tenant + _modelname = "tenant" + _identifiers = ("name",) + _attributes = ("provider__name",) + + name: str + provider__name: Annotated[ + Optional[str], CustomRelationshipAnnotation(name="Test Relationship", side=RelationshipSideEnum.SOURCE) + ] = None + + +class TenantDict(TypedDict): + """Many-to-many relationship typed dict explaining which fields are interesting.""" + + name: str + + +class ProviderModelCustomRelationship(NautobotModel): + """Provider model for testing custom relationship support.""" + + _model = Provider + _modelname = "provider" + _identifiers = ("name",) + _attributes = ("tenants",) + + name: str + tenants: Annotated[ + List[TenantDict], CustomRelationshipAnnotation(name="Test Relationship", side=RelationshipSideEnum.DESTINATION) + ] = [] + + +class CustomRelationShipTestAdapterSource(NautobotAdapter): + """Adapter for testing custom relationship support.""" + + top_level = ["tenant"] + tenant = TenantModelCustomRelationship + + +class CustomRelationShipTestAdapterDestination(NautobotAdapter): + """Adapter for testing custom relationship support.""" + + top_level = ["provider"] + provider = ProviderModelCustomRelationship + + +class AdapterCustomRelationshipTest(TestCase): + """Test case for custom relationships.""" + + def setUp(self): + self.relationship = Relationship.objects.create( + label="Test Relationship", + source_type=ContentType.objects.get_for_model(Tenant), + destination_type=ContentType.objects.get_for_model(Provider), + type=RelationshipTypeChoices.TYPE_ONE_TO_MANY, + ) + self.tenant = Tenant.objects.create(name="Test Tenant") + self.provider = Provider.objects.create(name="Test Provider") + RelationshipAssociation.objects.create( + relationship=self.relationship, + source=self.tenant, + destination=self.provider, + ) + + def test_load_source(self): + """Test loading a single custom relationship from the source side.""" + adapter = CustomRelationShipTestAdapterSource(job=MagicMock()) + adapter.load() + self.assertEqual(adapter.get_all("tenant")[0].provider__name, self.provider.name) + + def test_load_destination(self): + """Test loading a single custom relationship from the destination side.""" + adapter = CustomRelationShipTestAdapterDestination(job=MagicMock()) + adapter.load() + message = "Loading custom relationships through the destination side doesn't work." + try: + diffsync_provider = adapter.get_all("provider")[0] + tenant_name = diffsync_provider.tenants[0]["name"] + except IndexError: + self.fail(message) + self.assertEqual(tenant_name, self.tenant.name, msg=message) + + +class BaseModelCustomRelationshipTest(TestCase): + """Tests for manipulating custom relationships through the shared base model code.""" + + @classmethod + def setUpTestData(cls): + cls.relationship = Relationship.objects.create( + label="Test Relationship", + source_type=ContentType.objects.get_for_model(Tenant), + destination_type=ContentType.objects.get_for_model(Provider), + ) + cls.tenant_one = Tenant.objects.create(name="Test Tenant 1") + cls.tenant_two = Tenant.objects.create(name="Test Tenant 2") + cls.provider_one = Provider.objects.create(name="Test Provider 1") + cls.provider_two = Provider.objects.create(name="Test Provider 2") + + def test_custom_relationship_add_foreign_key(self): + diffsync_tenant = TenantModelCustomRelationship( + name=self.tenant_one.name, + ) + diffsync_tenant.diffsync = CustomRelationShipTestAdapterSource(job=MagicMock()) + diffsync_tenant.update({"provider__name": self.provider_one.name}) + self.assertEqual(RelationshipAssociation.objects.count(), 1) + + def test_custom_relationship_update_foreign_key(self): + diffsync_tenant = TenantModelCustomRelationship( + name=self.tenant_one.name, + ) + diffsync_tenant.diffsync = CustomRelationShipTestAdapterSource(job=MagicMock()) + diffsync_tenant.update({"provider__name": self.provider_one.name}) + diffsync_tenant.update({"provider__name": self.provider_two.name}) + self.assertEqual(RelationshipAssociation.objects.first().destination, self.provider_two) + + def test_custom_relationship_add_to_many(self): + diffsync_provider = ProviderModelCustomRelationship( + name=self.provider_one.name, + ) + diffsync_provider.diffsync = CustomRelationShipTestAdapterDestination(job=MagicMock()) + diffsync_provider.update({"tenants": [{"name": self.tenant_one.name}, {"name": self.tenant_two.name}]}) + self.assertEqual(RelationshipAssociation.objects.count(), 2) + + def test_custom_relationship_update_to_many(self): + diffsync_provider = ProviderModelCustomRelationship( + name=self.provider_one.name, + ) + diffsync_provider.diffsync = CustomRelationShipTestAdapterDestination(job=MagicMock()) + diffsync_provider.update({"tenants": [{"name": self.tenant_one.name}]}) + diffsync_provider.update({"tenants": [{"name": self.tenant_two.name}]}) + self.assertEqual(RelationshipAssociation.objects.count(), 1) + self.assertEqual(RelationshipAssociation.objects.first().source, self.tenant_two) + + class BaseModelManyToManyTest(TestCase): """Tests for manipulating many-to-many relationships through the shared base model code.""" From 7e33929455d3575abeec511a05e31e26d7a11689 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 10 Jan 2024 16:56:48 -0600 Subject: [PATCH 17/47] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20APP=5FSET?= =?UTF-8?q?TING=20to=20use=20aristacv=5F=20prepend?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../aristacv/diffsync/adapters/cloudvision.py | 2 +- .../aristacv/diffsync/adapters/nautobot.py | 2 +- .../aristacv/diffsync/models/cloudvision.py | 12 ++-- .../aristacv/diffsync/models/nautobot.py | 2 +- nautobot_ssot/integrations/aristacv/jobs.py | 66 ++++++++++--------- .../integrations/aristacv/signals.py | 4 +- .../aristacv/utils/cloudvision.py | 10 ++- .../integrations/aristacv/utils/nautobot.py | 6 +- .../aristacv/test_cloudvision_adapter.py | 2 +- nautobot_ssot/tests/aristacv/test_jobs.py | 22 +++---- .../tests/aristacv/test_utils_cloudvision.py | 4 +- .../tests/aristacv/test_utils_nautobot.py | 34 +++++----- 12 files changed, 86 insertions(+), 80 deletions(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py index 1ec002ac7..20eb94cf2 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py @@ -39,7 +39,7 @@ def __init__(self, *args, job=None, conn: cloudvision.CloudvisionApi, **kwargs): def load_devices(self): """Load devices from CloudVision.""" - if APP_SETTINGS.get("create_controller"): + if APP_SETTINGS.get("aristacv_create_controller"): cvp_version = cloudvision.get_cvp_version() cvp_ver_cf = self.cf(name="arista_eos", value=cvp_version, device_name="CloudVision") try: diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py index 794c6ea2c..8e0c995d4 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py @@ -135,7 +135,7 @@ def sync_complete(self, source: DiffSync, *args, **kwargs): source (DiffSync): Source DiffSync DataSource adapter. """ # if Controller is created we need to ensure all imported Devices have RelationshipAssociation to it. - if APP_SETTINGS.get("create_controller"): + if APP_SETTINGS.get("aristacv_create_controller"): self.job.logger.info("Creating Relationships between CloudVision and connected Devices.") controller_relation = OrmRelationship.objects.get(name="Controller -> Device") device_ct = ContentType.objects.get_for_model(OrmDevice) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py index 5edc89e98..7edd8600c 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py @@ -112,12 +112,12 @@ class CloudvisionCustomField(CustomField): def connect_cvp(): """Connect to Cloudvision gRPC endpoint.""" return CloudvisionApi( - cvp_host=APP_SETTINGS["cvp_host"], - cvp_port=APP_SETTINGS.get("cvp_port", "8443"), - verify=APP_SETTINGS["verify"], - username=APP_SETTINGS["cvp_user"], - password=APP_SETTINGS["cvp_password"], - cvp_token=APP_SETTINGS["cvp_token"], + cvp_host=APP_SETTINGS["aristacv_cvp_host"], + cvp_port=APP_SETTINGS.get("aristacv_cvp_port", "8443"), + verify=APP_SETTINGS["aristacv_verify"], + username=APP_SETTINGS["aristacv_cvp_user"], + password=APP_SETTINGS["aristacv_cvp_password"], + cvp_token=APP_SETTINGS["aristacv_cvp_token"], ) @classmethod diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py index d6e44bc95..21270f601 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py @@ -240,7 +240,7 @@ def update(self, attrs): def delete(self): """Delete Interface in Nautobot.""" - if APP_SETTINGS.get("delete_devices_on_sync"): + if APP_SETTINGS.get("aristacv_delete_devices_on_sync"): super().delete() if self.diffsync.job.debug: self.diffsync.job.logger.warning(f"Interface {self.name} for {self.device} will be deleted.") diff --git a/nautobot_ssot/integrations/aristacv/jobs.py b/nautobot_ssot/integrations/aristacv/jobs.py index 507ba2741..b83ef1fa9 100644 --- a/nautobot_ssot/integrations/aristacv/jobs.py +++ b/nautobot_ssot/integrations/aristacv/jobs.py @@ -48,29 +48,31 @@ class Meta: @classmethod def config_information(cls): """Dictionary describing the configuration of this DataSource.""" - if APP_SETTINGS.get("cvp_host"): + if APP_SETTINGS.get("aristacv_cvp_host"): server_type = "On prem" - host = APP_SETTINGS.get("cvp_host") + host = APP_SETTINGS.get("aristacv_cvp_host") else: server_type = "CVaaS" - host = APP_SETTINGS.get("cvaas_url") + host = APP_SETTINGS.get("aristacv_cvaas_url") return { "Server type": server_type, "CloudVision host": host, - "Username": APP_SETTINGS.get("cvp_user"), - "Verify": str(APP_SETTINGS.get("verify")), + "Username": APP_SETTINGS.get("aristacv_cvp_user"), + "Verify": str(APP_SETTINGS.get("aristacv_verify")), "Delete devices on sync": APP_SETTINGS.get( - "delete_devices_on_sync", str(nautobot.DEFAULT_DELETE_DEVICES_ON_SYNC) + "aristacv_delete_devices_on_sync", str(nautobot.DEFAULT_DELETE_DEVICES_ON_SYNC) + ), + "New device default site": APP_SETTINGS.get( + "aristacv_from_cloudvision_default_site", nautobot.DEFAULT_SITE ), - "New device default site": APP_SETTINGS.get("from_cloudvision_default_site", nautobot.DEFAULT_SITE), "New device default role": APP_SETTINGS.get( - "from_cloudvision_default_device_role", nautobot.DEFAULT_DEVICE_ROLE + "aristacv_from_cloudvision_default_device_role", nautobot.DEFAULT_DEVICE_ROLE ), "New device default role color": APP_SETTINGS.get( - "from_cloudvision_default_device_role_color", nautobot.DEFAULT_DEVICE_ROLE_COLOR + "aristacv_from_cloudvision_default_device_role_color", nautobot.DEFAULT_DEVICE_ROLE_COLOR ), - "Apply import tag": str(APP_SETTINGS.get("apply_import_tag", nautobot.APPLY_IMPORT_TAG)), - "Import Active": str(APP_SETTINGS.get("import_active", "True")) + "Apply import tag": str(APP_SETTINGS.get("aristacv_apply_import_tag", nautobot.APPLY_IMPORT_TAG)), + "Import Active": str(APP_SETTINGS.get("aristacv_import_active", "True")) # Password and Token are intentionally omitted! } @@ -98,18 +100,18 @@ def data_mappings(cls): def load_source_adapter(self): """Load data from CloudVision into DiffSync models.""" - if not APP_SETTINGS.get("from_cloudvision_default_site"): + if not APP_SETTINGS.get("aristacv_from_cloudvision_default_site"): self.logger.error( "App setting `aristacv_from_cloudvision_default_site` is not defined. This setting is required for the App to function." ) raise MissingConfigSetting(setting="aristacv_from_cloudvision_default_site") - if not APP_SETTINGS.get("from_cloudvision_default_device_role"): + if not APP_SETTINGS.get("aristacv_from_cloudvision_default_device_role"): self.logger.error( "App setting `aristacv_from_cloudvision_default_device_role` is not defined. This setting is required for the App to function." ) raise MissingConfigSetting(setting="aristacv_from_cloudvision_default_device_role") if self.debug: - if APP_SETTINGS.get("delete_devices_on_sync"): + if APP_SETTINGS.get("aristacv_delete_devices_on_sync"): self.logger.warning( "Devices not present in Cloudvision but present in Nautobot will be deleted from Nautobot." ) @@ -119,12 +121,12 @@ def load_source_adapter(self): ) self.logger.info("Connecting to CloudVision") with CloudvisionApi( - cvp_host=APP_SETTINGS["cvp_host"], - cvp_port=APP_SETTINGS.get("cvp_port", "8443"), - verify=APP_SETTINGS["verify"], - username=APP_SETTINGS["cvp_user"], - password=APP_SETTINGS["cvp_password"], - cvp_token=APP_SETTINGS["cvp_token"], + cvp_host=APP_SETTINGS["aristacv_cvp_host"], + cvp_port=APP_SETTINGS.get("aristacv_cvp_port", "8443"), + verify=APP_SETTINGS["aristacv_verify"], + username=APP_SETTINGS["aristacv_cvp_user"], + password=APP_SETTINGS["aristacv_cvp_password"], + cvp_token=APP_SETTINGS["aristacv_cvp_token"], ) as client: self.logger.info("Loading data from CloudVision") self.source_adapter = CloudvisionAdapter(job=self, conn=client) @@ -162,17 +164,17 @@ class Meta: @classmethod def config_information(cls): """Dictionary describing the configuration of this DataTarget.""" - if APP_SETTINGS.get("cvp_host"): + if APP_SETTINGS.get("aristacv_cvp_host"): return { "Server type": "On prem", - "CloudVision host": APP_SETTINGS.get("cvp_host"), - "Username": APP_SETTINGS.get("cvp_user"), - "Verify": str(APP_SETTINGS.get("verify")) + "CloudVision host": APP_SETTINGS.get("aristacv_cvp_host"), + "Username": APP_SETTINGS.get("aristacv_cvp_user"), + "Verify": str(APP_SETTINGS.get("aristacv_verify")) # Password is intentionally omitted! } return { "Server type": "CVaaS", - "CloudVision host": APP_SETTINGS.get("cvaas_url"), + "CloudVision host": APP_SETTINGS.get("aristacv_cvaas_url"), # Token is intentionally omitted! } @@ -190,7 +192,7 @@ def load_source_adapter(self): def load_target_adapter(self): """Load data from CloudVision into DiffSync models.""" if self.debug: - if APP_SETTINGS.get("delete_devices_on_sync"): + if APP_SETTINGS.get("aristacv_delete_devices_on_sync"): self.logger.warning( "Devices not present in Cloudvision but present in Nautobot will be deleted from Nautobot." ) @@ -200,12 +202,12 @@ def load_target_adapter(self): ) self.logger.info("Connecting to CloudVision") with CloudvisionApi( - cvp_host=APP_SETTINGS["cvp_host"], - cvp_port=APP_SETTINGS.get("cvp_port", "8443"), - verify=APP_SETTINGS["verify"], - username=APP_SETTINGS["cvp_user"], - password=APP_SETTINGS["cvp_password"], - cvp_token=APP_SETTINGS["cvp_token"], + cvp_host=APP_SETTINGS["aristacv_cvp_host"], + cvp_port=APP_SETTINGS.get("aristacv_cvp_port", "8443"), + verify=APP_SETTINGS["aristacv_verify"], + username=APP_SETTINGS["aristacv_cvp_user"], + password=APP_SETTINGS["aristacv_cvp_password"], + cvp_token=APP_SETTINGS["aristacv_cvp_token"], ) as client: self.logger.info("Loading data from CloudVision") self.target_adapter = CloudvisionAdapter(job=self, conn=client) diff --git a/nautobot_ssot/integrations/aristacv/signals.py b/nautobot_ssot/integrations/aristacv/signals.py index e343251e0..7ba3c9b73 100644 --- a/nautobot_ssot/integrations/aristacv/signals.py +++ b/nautobot_ssot/integrations/aristacv/signals.py @@ -15,7 +15,7 @@ def register_signals(sender): post_migrate.connect(post_migrate_create_manufacturer) post_migrate.connect(post_migrate_create_platform) - if APP_SETTINGS.get("create_controller"): + if APP_SETTINGS.get("aristacv_create_controller"): post_migrate.connect(post_migrate_create_controller_relationship) @@ -129,7 +129,7 @@ def post_migrate_create_platform(apps=global_apps, **kwargs): }, ) - if APP_SETTINGS.get("create_controller"): + if APP_SETTINGS.get("aristacv_create_controller"): Platform.objects.get_or_create( name="Arista EOS-CloudVision", manufacturer=Manufacturer.objects.get(name="Arista"), diff --git a/nautobot_ssot/integrations/aristacv/utils/cloudvision.py b/nautobot_ssot/integrations/aristacv/utils/cloudvision.py index 2e0361895..8ca23317b 100644 --- a/nautobot_ssot/integrations/aristacv/utils/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/utils/cloudvision.py @@ -97,7 +97,7 @@ def __init__( self.metadata = ((self.AUTH_KEY_PATH, self.cvp_token),) # Set up credentials for CVaaS using supplied token. else: - self.cvp_url = APP_SETTINGS.get("cvaas_url", "www.arista.io:443") + self.cvp_url = APP_SETTINGS.get("aristacv_cvaas_url", "www.arista.io:443") call_creds = grpc.access_token_call_credentials(self.cvp_token) channel_creds = grpc.ssl_channel_credentials() conn_creds = grpc.composite_channel_credentials(channel_creds, call_creds) @@ -269,7 +269,7 @@ def search( # pylint:disable=dangerous-default-value, too-many-locals def get_devices(client): """Get devices from CloudVision inventory.""" device_stub = services.DeviceServiceStub(client) - if APP_SETTINGS.get("import_active"): + if APP_SETTINGS.get("aristacv_import_active"): req = services.DeviceStreamRequest( partial_eq_filter=[models.Device(streaming_status=models.STREAMING_STATUS_ACTIVE)] ) @@ -675,7 +675,11 @@ def get_cvp_version(): """ client = CvpClient() try: - client.connect([APP_SETTINGS["cvp_host"]], APP_SETTINGS["cvp_user"], APP_SETTINGS["cvp_password"]) + client.connect( + [APP_SETTINGS["aristacv_cvp_host"]], + APP_SETTINGS["aristacv_cvp_user"], + APP_SETTINGS["aristacv_cvp_password"], + ) version = client.api.get_cvp_info() if "version" in version: return version["version"] diff --git a/nautobot_ssot/integrations/aristacv/utils/nautobot.py b/nautobot_ssot/integrations/aristacv/utils/nautobot.py index 4de33f6a6..45430e026 100644 --- a/nautobot_ssot/integrations/aristacv/utils/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/utils/nautobot.py @@ -107,7 +107,7 @@ def parse_hostname(hostname: str): Args: hostname (str): Device hostname to be parsed for site and role. """ - hostname_patterns = APP_SETTINGS.get("hostname_patterns") + hostname_patterns = APP_SETTINGS.get("aristacv_hostname_patterns") site, role = None, None for pattern in hostname_patterns: @@ -129,7 +129,7 @@ def get_site_from_map(site_code: str): Returns: str|None: Name of Site if site code found else None. """ - site_map = APP_SETTINGS.get("site_mappings") + site_map = APP_SETTINGS.get("aristacv_site_mappings") site_name = None if site_code in site_map: site_name = site_map[site_code] @@ -145,7 +145,7 @@ def get_role_from_map(role_code: str): Returns: str|None: Name of Device Role if role code found else None. """ - role_map = APP_SETTINGS.get("role_mappings") + role_map = APP_SETTINGS.get("aristacv_role_mappings") role_name = None if role_code in role_map: role_name = role_map[role_code] diff --git a/nautobot_ssot/tests/aristacv/test_cloudvision_adapter.py b/nautobot_ssot/tests/aristacv/test_cloudvision_adapter.py index 0a6675c5d..0ed9b1fc3 100644 --- a/nautobot_ssot/tests/aristacv/test_cloudvision_adapter.py +++ b/nautobot_ssot/tests/aristacv/test_cloudvision_adapter.py @@ -49,7 +49,7 @@ def setUp(self): @patch.dict( "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", - {"create_controller": False}, + {"aristacv_create_controller": False}, ) def test_load_devices(self): """Test the load_devices() adapter method.""" diff --git a/nautobot_ssot/tests/aristacv/test_jobs.py b/nautobot_ssot/tests/aristacv/test_jobs.py index c54373261..ab89b4f48 100644 --- a/nautobot_ssot/tests/aristacv/test_jobs.py +++ b/nautobot_ssot/tests/aristacv/test_jobs.py @@ -104,15 +104,15 @@ def test_data_mapping(self): # pylint: disable=too-many-statements @patch.dict( "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", { - "cvp_host": "https://localhost", - "cvp_user": "admin", - "verify": True, - "delete_devices_on_sync": True, - "from_cloudvision_default_site": "HQ", - "from_cloudvision_default_device_role": "Router", - "from_cloudvision_default_device_role_color": "ff0000", - "apply_import_tag": True, - "import_active": True, + "aristacv_cvp_host": "https://localhost", + "aristacv_cvp_user": "admin", + "aristacv_verify": True, + "aristacv_delete_devices_on_sync": True, + "aristacv_from_cloudvision_default_site": "HQ", + "aristacv_from_cloudvision_default_device_role": "Router", + "aristacv_from_cloudvision_default_device_role_color": "ff0000", + "aristacv_apply_import_tag": True, + "aristacv_import_active": True, }, ) def test_config_information_on_prem(self): @@ -133,8 +133,8 @@ def test_config_information_on_prem(self): @patch.dict( "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", { - "cvaas_url": "https://www.arista.io", - "cvp_user": "admin", + "aristacv_cvaas_url": "https://www.arista.io", + "aristacv_cvp_user": "admin", }, ) def test_config_information_cvaas(self): diff --git a/nautobot_ssot/tests/aristacv/test_utils_cloudvision.py b/nautobot_ssot/tests/aristacv/test_utils_cloudvision.py index 5f6dadfbf..9c40cd4df 100644 --- a/nautobot_ssot/tests/aristacv/test_utils_cloudvision.py +++ b/nautobot_ssot/tests/aristacv/test_utils_cloudvision.py @@ -37,7 +37,7 @@ def setUp(self): @patch.dict( "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", - {"import_active": False}, + {"aristacv_import_active": False}, ) def test_get_all_devices(self): """Test get_devices function for active and inactive devices.""" @@ -71,7 +71,7 @@ def test_get_all_devices(self): @patch.dict( "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", - {"import_active": True}, + {"aristacv_import_active": True}, ) def test_get_active_devices(self): """Test get_devices function for active devices.""" diff --git a/nautobot_ssot/tests/aristacv/test_utils_nautobot.py b/nautobot_ssot/tests/aristacv/test_utils_nautobot.py index ebf3c948c..070169d05 100644 --- a/nautobot_ssot/tests/aristacv/test_utils_nautobot.py +++ b/nautobot_ssot/tests/aristacv/test_utils_nautobot.py @@ -105,9 +105,9 @@ def test_get_device_version_dlc_exception(self): @patch.dict( "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", { - "hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], - "site_mappings": {"ams01": "Amsterdam"}, - "role_mappings": {"leaf": "leaf"}, + "aristacv_hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], + "aristacv_site_mappings": {"ams01": "Amsterdam"}, + "aristacv_role_mappings": {"leaf": "leaf"}, }, ) def test_parse_hostname(self): @@ -120,9 +120,9 @@ def test_parse_hostname(self): @patch.dict( "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", { - "hostname_patterns": [r"(?P\w{2,3}\d+)-.+-\d+"], - "site_mappings": {"ams01": "Amsterdam"}, - "role_mappings": {}, + "aristacv_hostname_patterns": [r"(?P\w{2,3}\d+)-.+-\d+"], + "aristacv_site_mappings": {"ams01": "Amsterdam"}, + "aristacv_role_mappings": {}, }, ) def test_parse_hostname_only_site(self): @@ -135,9 +135,9 @@ def test_parse_hostname_only_site(self): @patch.dict( "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", { - "hostname_patterns": [r".+-(?P\w+)-\d+"], - "site_mappings": {}, - "role_mappings": {"leaf": "leaf"}, + "aristacv_hostname_patterns": [r".+-(?P\w+)-\d+"], + "aristacv_site_mappings": {}, + "aristacv_role_mappings": {"leaf": "leaf"}, }, ) def test_parse_hostname_only_role(self): @@ -150,8 +150,8 @@ def test_parse_hostname_only_role(self): @patch.dict( "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", { - "hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], - "site_mappings": {"ams01": "Amsterdam"}, + "aristacv_hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], + "aristacv_site_mappings": {"ams01": "Amsterdam"}, }, ) def test_get_site_from_map_success(self): @@ -163,8 +163,8 @@ def test_get_site_from_map_success(self): @patch.dict( "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", { - "hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], - "site_mappings": {}, + "aristacv_hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], + "aristacv_site_mappings": {}, }, ) def test_get_site_from_map_fail(self): @@ -176,8 +176,8 @@ def test_get_site_from_map_fail(self): @patch.dict( "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", { - "hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], - "role_mappings": {"edge": "Edge Router"}, + "aristacv_hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], + "aristacv_role_mappings": {"edge": "Edge Router"}, }, ) def test_get_role_from_map_success(self): @@ -189,8 +189,8 @@ def test_get_role_from_map_success(self): @patch.dict( "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", { - "hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], - "role_mappings": {}, + "aristacv_hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], + "aristacv_role_mappings": {}, }, ) def test_get_role_from_map_fail(self): From 85196d3c021f17fab2ef988e0ea49c2a3e7245dc Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 10 Jan 2024 16:57:17 -0600 Subject: [PATCH 18/47] =?UTF-8?q?refactor:=20=E2=99=BB=EF=B8=8F=20Update?= =?UTF-8?q?=20verify=5Fdevice=5Frole()=20to=20ensure=20Device=20ContentTyp?= =?UTF-8?q?e=20always=20applied.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/integrations/aristacv/utils/nautobot.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/integrations/aristacv/utils/nautobot.py b/nautobot_ssot/integrations/aristacv/utils/nautobot.py index 45430e026..f6378ea57 100644 --- a/nautobot_ssot/integrations/aristacv/utils/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/utils/nautobot.py @@ -60,8 +60,8 @@ def verify_device_role_object(role_name, role_color): role_obj = Role.objects.get(name=role_name) except Role.DoesNotExist: role_obj = Role.objects.create(name=role_name, color=role_color) - role_obj.content_types.add(ContentType.objects.get_for_model(Device)) - role_obj.validated_save() + role_obj.content_types.add(ContentType.objects.get_for_model(Device)) + role_obj.validated_save() return role_obj From 194b4dea2e9630c835be49777648380476a90c81 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Fri, 12 Jan 2024 11:43:37 -0600 Subject: [PATCH 19/47] =?UTF-8?q?build:=20=F0=9F=90=9B=20Correct=20setting?= =?UTF-8?q?=20example=20to=20not=20be=20list=20in=20list,=20it=20should=20?= =?UTF-8?q?be=20string=20in=20list.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- development/nautobot_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 07f784400..39d5c52d7 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -178,7 +178,7 @@ "aristacv_from_cloudvision_default_device_role": "network", "aristacv_from_cloudvision_default_device_role_color": "ff0000", "aristacv_from_cloudvision_default_site": "cloudvision_imported", - "aristacv_hostname_patterns": [[r"(?P\w{2,3}\d+)-(?P\w+)-\d+"]], + "aristacv_hostname_patterns": [r"(?P\w{2,3}\d+)-(?P\w+)-\d+"], "aristacv_import_active": is_truthy(os.getenv("NAUTOBOT_ARISTACV_IMPORT_ACTIVE", False)), "aristacv_role_mappings": { "bb": "backbone", From 4e7db91443cbb09dd0b5361108b6d579e818f08c Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Fri, 12 Jan 2024 11:46:24 -0600 Subject: [PATCH 20/47] =?UTF-8?q?feat:=20=E2=9C=A8=20Add=20Namespace=20Dif?= =?UTF-8?q?fSync=20model=20and=20add=20to=20IPAddress=20and=20IPAssignment?= =?UTF-8?q?.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is to allow for duplicate Prefixes/IPAddresses as long as they are in their own Namespace. This pairs with pulling the VRF from CVP. --- .../aristacv/diffsync/adapters/cloudvision.py | 9 +++-- .../aristacv/diffsync/adapters/nautobot.py | 15 +++++++- .../aristacv/diffsync/models/base.py | 21 +++++++++-- .../aristacv/diffsync/models/cloudvision.py | 21 +++++++++++ .../aristacv/diffsync/models/nautobot.py | 36 ++++++++++++++++--- 5 files changed, 93 insertions(+), 9 deletions(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py index 20eb94cf2..0fd98f374 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py @@ -11,6 +11,7 @@ from nautobot_ssot.integrations.aristacv.diffsync.models.cloudvision import ( CloudvisionCustomField, CloudvisionDevice, + CloudvisionNamespace, CloudvisionPort, CloudvisionPrefix, CloudvisionIPAddress, @@ -24,12 +25,13 @@ class CloudvisionAdapter(DiffSync): device = CloudvisionDevice port = CloudvisionPort + namespace = CloudvisionNamespace prefix = CloudvisionPrefix ipaddr = CloudvisionIPAddress ipassignment = CloudvisionIPAssignment cf = CloudvisionCustomField - top_level = ["device", "prefix", "ipaddr", "ipassignment", "cf"] + top_level = ["device", "namespace", "prefix", "ipaddr", "ipassignment", "cf"] def __init__(self, *args, job=None, conn: cloudvision.CloudvisionApi, **kwargs): """Initialize the CloudVision DiffSync adapter.""" @@ -188,13 +190,15 @@ def load_ip_addresses(self, dev: device): ) if intf["address"] and intf["address"] != "none": prefix = ipaddress.ip_interface(intf["address"]).network.with_prefixlen + self.get_or_instantiate(self.namespace, ids={"name": intf_vrf}) self.get_or_instantiate( self.prefix, - ids={"prefix": prefix}, + ids={"prefix": prefix, "namespace": intf_vrf}, ) new_ip = self.ipaddr( address=intf["address"], prefix=prefix, + namespace=intf_vrf, uuid=None, ) try: @@ -208,6 +212,7 @@ def load_ip_addresses(self, dev: device): self.ipassignment, ids={ "address": intf["address"], + "namespace": intf_vrf, "device": dev.name, "interface": intf["interface"], }, diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py index 8e0c995d4..2f5db669e 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py @@ -13,6 +13,7 @@ from nautobot_ssot.integrations.aristacv.diffsync.models.nautobot import ( NautobotDevice, NautobotCustomField, + NautobotNamespace, NautobotPrefix, NautobotIPAddress, NautobotIPAssignment, @@ -26,12 +27,13 @@ class NautobotAdapter(DiffSync): device = NautobotDevice port = NautobotPort + namespace = NautobotNamespace prefix = NautobotPrefix ipaddr = NautobotIPAddress ipassignment = NautobotIPAssignment cf = NautobotCustomField - top_level = ["device", "prefix", "ipaddr", "ipassignment", "cf"] + top_level = ["device", "namespace", "prefix", "ipaddr", "ipassignment", "cf"] def __init__(self, *args, job=None, **kwargs): """Initialize the Nautobot DiffSync adapter.""" @@ -99,17 +101,27 @@ def load_interfaces(self): def load_ip_addresses(self): """Add Nautobot IPAddress objects as DiffSync IPAddress models.""" for ipaddr in OrmIPAddress.objects.filter(interfaces__device__device_type__manufacturer__name__in=["Arista"]): + try: + self.get(self.namespace, ipaddr.parent.namespace.name) + except ObjectNotFound: + new_ns = self.namespace( + name=ipaddr.parent.namespace.name, + uuid=ipaddr.parent.namespace.id, + ) + self.add(new_ns) try: self.get(self.prefix, ipaddr.parent.prefix.with_prefixlen) except ObjectNotFound: new_pf = self.prefix( prefix=ipaddr.parent.prefix.with_prefixlen, + namespace=ipaddr.parent.namespace.name, uuid=ipaddr.parent.prefix.id, ) self.add(new_pf) new_ip = self.ipaddr( address=str(ipaddr.address), prefix=ipaddr.parent.prefix.with_prefixlen, + namespace=ipaddr.parent.namespace.name, uuid=ipaddr.id, ) try: @@ -120,6 +132,7 @@ def load_ip_addresses(self): for mapping in ip_to_intfs: new_map = self.ipassignment( address=str(ipaddr.address), + namespace=mapping.ip_address.namespace.name, device=mapping.device.name, interface=mapping.interface.name, primary=len(mapping.ip_address.primary_ip4_for.all()) > 0 diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/base.py b/nautobot_ssot/integrations/aristacv/diffsync/models/base.py index f51f65484..c0a0a0971 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/base.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/base.py @@ -58,17 +58,30 @@ class Port(DiffSyncModel): uuid: Optional[UUID] +class Namespace(DiffSyncModel): + """Namespace Model.""" + + _modelname = "namespace" + _identifiers = ("name",) + _attributes = () + _children = {} + + name: str + uuid: Optional[UUID] + + class Prefix(DiffSyncModel): - """DiffSync Model for Ringhealth nodes management network.""" + """Prefix Model.""" model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST _modelname = "prefix" - _identifiers = ("prefix",) + _identifiers = ("prefix", "namespace") _attributes = () _children = {} prefix: str + namespace: str uuid: Optional[UUID] @@ -81,12 +94,14 @@ class IPAddress(DiffSyncModel): _identifiers = ( "address", "prefix", + "namespace", ) _attributes = () _children = {} address: str prefix: str + namespace: str uuid: Optional[UUID] @@ -96,6 +111,7 @@ class IPAssignment(DiffSyncModel): _modelname = "ipassignment" _identifiers = ( "address", + "namespace", "device", "interface", ) @@ -103,6 +119,7 @@ class IPAssignment(DiffSyncModel): _children = {} address: str + namespace: str device: str interface: str primary: bool diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py index 7edd8600c..015e8f5c0 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/cloudvision.py @@ -3,6 +3,7 @@ from nautobot_ssot.integrations.aristacv.diffsync.models.base import ( Device, CustomField, + Namespace, Prefix, IPAddress, IPAssignment, @@ -45,6 +46,26 @@ def delete(self): return self +class CloudvisionNamespace(Namespace): + """Cloudvision Namespace model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Namespace in AristaCV from Namespace object.""" + ... + return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + + def update(self, attrs): + """Update Namespace in AristaCV from Namespace object.""" + ... + return super().update(attrs) + + def delete(self): + """Delete Namespace in AristaCV from Namespace object.""" + ... + return self + + class CloudvisionPrefix(Prefix): """Cloudvision IPAdress model.""" diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py index 21270f601..d02398f7f 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py @@ -10,7 +10,8 @@ from nautobot.extras.models import Status as OrmStatus from nautobot.ipam.models import IPAddress as OrmIPAddress from nautobot.ipam.models import Prefix as OrmPrefix -from nautobot.ipam.models import Namespace, IPAddressToInterface +from nautobot.ipam.models import Namespace as OrmNamespace +from nautobot.ipam.models import IPAddressToInterface import distutils from nautobot_ssot.integrations.aristacv.constant import ( @@ -23,6 +24,7 @@ CustomField, IPAddress, IPAssignment, + Namespace, Port, Prefix, ) @@ -249,6 +251,28 @@ def delete(self): return self +class NautobotNamespace(Namespace): + """Nautobot Prefix model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Prefix in Nautobot from NautobotPrefix objects.""" + if diffsync.job.debug: + diffsync.job.logger.info(f"Creating Namespace {ids['name']}.") + _ns = OrmNamespace( + name=ids["name"], + ) + _ns.validated_save() + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def delete(self): + """Delete Namespace in Nautobot.""" + super().delete() + _ns = OrmNamespace.objects.get(id=self.uuid) + _ns.delete() + return self + + class NautobotPrefix(Prefix): """Nautobot Prefix model.""" @@ -259,7 +283,7 @@ def create(cls, diffsync, ids, attrs): diffsync.job.logger.info(f"Creating Prefix {ids['prefix']}.") _pf = OrmPrefix( prefix=ids["prefix"], - namespace=Namespace.objects.get(name="Global"), + namespace=OrmNamespace.objects.get(name=ids["namespace"]), status=OrmStatus.objects.get(name="Active"), ) _pf.validated_save() @@ -274,7 +298,9 @@ def create(cls, diffsync, ids, attrs): """Create IPAddress in Nautobot.""" new_ip = OrmIPAddress( address=ids["address"], - parent=OrmPrefix.objects.get(prefix=ids["prefix"], namespace=Namespace.objects.get(name="Global")), + parent=OrmPrefix.objects.get( + prefix=ids["prefix"], namespace=OrmNamespace.objects.get(name=ids["namespace"]) + ), status=OrmStatus.objects.get(name="Active"), ) new_ip.validated_save() @@ -288,7 +314,9 @@ class NautobotIPAssignment(IPAssignment): def create(cls, diffsync, ids, attrs): """Create IPAddressToInterface in Nautobot.""" try: - ipaddr = OrmIPAddress.objects.get(address=ids["address"]) + ipaddr = OrmIPAddress.objects.get( + address=ids["address"], parent__namespace=OrmNamespace.objects.get(name=ids["namespace"]) + ) intf = OrmInterface.objects.get(name=ids["interface"], device__name=ids["device"]) new_map = IPAddressToInterface(ip_address=ipaddr, interface=intf) if "loopback" in ids["interface"]: From 35ad881601885063c5e1cddca9f1925e6fcef2c7 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Fri, 12 Jan 2024 11:48:21 -0600 Subject: [PATCH 21/47] =?UTF-8?q?feat:=20=E2=9C=A8=20Add=20method=20to=20g?= =?UTF-8?q?et=20an=20Interface's=20VRF.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is being done to enable Namespace functionality to allow for duplicate prefixes and IPAddresses. --- .../aristacv/diffsync/adapters/cloudvision.py | 1 + .../aristacv/utils/cloudvision.py | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py index 0fd98f374..4431bae82 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py @@ -188,6 +188,7 @@ def load_ip_addresses(self, dev: device): self.job.logger.info( f"Attempting to load IP Address {intf['address']} for {intf['interface']} on {dev.name}." ) + intf_vrf = cloudvision.get_interface_vrf(client=self.conn, dId=dev.serial, interface=intf["interface"]) if intf["address"] and intf["address"] != "none": prefix = ipaddress.ip_interface(intf["address"]).network.with_prefixlen self.get_or_instantiate(self.namespace, ids={"name": intf_vrf}) diff --git a/nautobot_ssot/integrations/aristacv/utils/cloudvision.py b/nautobot_ssot/integrations/aristacv/utils/cloudvision.py index 8ca23317b..4dbe8f464 100644 --- a/nautobot_ssot/integrations/aristacv/utils/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/utils/cloudvision.py @@ -640,6 +640,25 @@ def get_interface_description(client: CloudvisionApi, dId: str, interface: str): return "" +def get_interface_vrf(client: CloudvisionApi, dId: str, interface: str) -> str: + """Gets interface VRF. + + Args: + client (CloudvisionApi): Cloudvision connection. + dId (str): Device ID to determine type for. + interface (str): Name of interface to get mode information for. + """ + pathElts = ["Sysdb", "l3", "intf", "config", "intfConfig", interface] + query = [create_query([(pathElts, [])], dId)] + query = unfreeze_frozen_dict(query) + + for batch in client.get(query): + for notif in batch["notifications"]: + if notif["updates"].get("vrf"): + return notif["updates"]["vrf"]["value"] + return "Global" + + def get_ip_interfaces(client: CloudvisionApi, dId: str): """Gets interfaces with IP Addresses configured from specified device. From 1d80b272363c93a0643190f52836075f2241061c Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Fri, 12 Jan 2024 11:48:41 -0600 Subject: [PATCH 22/47] =?UTF-8?q?refactor:=20=E2=99=BB=EF=B8=8F=20Move=20d?= =?UTF-8?q?evice=20index=20logging=20behind=20debug?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/aristacv/diffsync/adapters/cloudvision.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py index 4431bae82..871444d83 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py @@ -64,7 +64,8 @@ def load_devices(self): self.job.logger.warning(f"Error attempting to add CloudVision device. {err}") for index, dev in enumerate(cloudvision.get_devices(client=self.conn.comm_channel), start=1): - self.job.logger.info(f"Loading {index}° device") + if self.job.debug: + self.job.logger.info(f"Loading {index}° device") if dev["hostname"] != "": new_device = self.device( name=dev["hostname"], From 8b1e23e9a243b1d70b5c658cf32b768f3e3e409e Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Fri, 12 Jan 2024 11:49:48 -0600 Subject: [PATCH 23/47] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Change=20read=5Fset?= =?UTF-8?q?tings=20to=20return=20config=20without=20altering=20key.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Having the keys used for PLUGINS_CFG not matching what's in nautobot_config was confusing and not obvious unless you saw this. I've removed this tweak as it's just adding complexity unnecessarily. All settings have already been updated to use form from nautobot_config.py. --- nautobot_ssot/integrations/aristacv/constant.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/aristacv/constant.py b/nautobot_ssot/integrations/aristacv/constant.py index 44adcbd0d..5a30f014a 100644 --- a/nautobot_ssot/integrations/aristacv/constant.py +++ b/nautobot_ssot/integrations/aristacv/constant.py @@ -5,7 +5,7 @@ def _read_settings() -> dict: config = settings.PLUGINS_CONFIG["nautobot_ssot"] - return {key[9:]: value for key, value in config.items() if key.startswith("aristacv_")} + return config APP_SETTINGS = _read_settings() From 1dec3fa96787b0d6354a143025e05df6d3d92fb8 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Fri, 12 Jan 2024 11:50:14 -0600 Subject: [PATCH 24/47] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Ensure=20that=20Loc?= =?UTF-8?q?ationType=20is=20saved=20after=20applying=20ContentTypes.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/integrations/device42/signals.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nautobot_ssot/integrations/device42/signals.py b/nautobot_ssot/integrations/device42/signals.py index 07042e693..2b19162b6 100644 --- a/nautobot_ssot/integrations/device42/signals.py +++ b/nautobot_ssot/integrations/device42/signals.py @@ -26,3 +26,4 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): loc_type = LocationType.objects.update_or_create(name="Site")[0] for obj_type in [Site, RackGroup, Rack, Device, VirtualChassis, Prefix, VLAN]: loc_type.content_types.add(ContentType.objects.get_for_model(obj_type)) + loc_type.save() From 97e814a2704a64247dc1ffb466221270d9d3b720 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Fri, 12 Jan 2024 11:50:39 -0600 Subject: [PATCH 25/47] =?UTF-8?q?refactor:=20=E2=99=BB=EF=B8=8F=20Change?= =?UTF-8?q?=20LocationType=20to=20get=20as=20it=20should=20exist=20from=20?= =?UTF-8?q?signals.=20No=20need=20to=20do=20get=5For=5Fcreate.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/tests/device42/unit/test_utils_nautobot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/tests/device42/unit/test_utils_nautobot.py b/nautobot_ssot/tests/device42/unit/test_utils_nautobot.py index 41984f8fe..d91cdb1cd 100644 --- a/nautobot_ssot/tests/device42/unit/test_utils_nautobot.py +++ b/nautobot_ssot/tests/device42/unit/test_utils_nautobot.py @@ -30,7 +30,7 @@ def setUp(self): self.site = Location.objects.create( name="Test Site", status=self.status_active, - location_type=LocationType.objects.get_or_create(name="Site")[0], + location_type=LocationType.objects.get(name="Site"), ) self.site.validated_save() _dt = DeviceType.objects.create(model="CSR1000v", manufacturer=self.cisco_manu) From 313f56deed0166b73b73b9ddc799109d6338c397 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Fri, 12 Jan 2024 12:12:57 -0600 Subject: [PATCH 26/47] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20prefix=20?= =?UTF-8?q?definition=20to=20just=20use=20str=20on=20prefix=20object.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/aristacv/diffsync/adapters/nautobot.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py index 2f5db669e..63293b8a2 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py @@ -110,17 +110,17 @@ def load_ip_addresses(self): ) self.add(new_ns) try: - self.get(self.prefix, ipaddr.parent.prefix.with_prefixlen) + self.get(self.prefix, str(ipaddr.parent.prefix)) except ObjectNotFound: new_pf = self.prefix( - prefix=ipaddr.parent.prefix.with_prefixlen, + prefix=str(ipaddr.parent.prefix), namespace=ipaddr.parent.namespace.name, uuid=ipaddr.parent.prefix.id, ) self.add(new_pf) new_ip = self.ipaddr( address=str(ipaddr.address), - prefix=ipaddr.parent.prefix.with_prefixlen, + prefix=str(ipaddr.parent.prefix), namespace=ipaddr.parent.namespace.name, uuid=ipaddr.id, ) From 1155d06f3ccd43d66e87b31168a52e32499e4169 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Fri, 12 Jan 2024 12:13:24 -0600 Subject: [PATCH 27/47] =?UTF-8?q?test:=20=E2=9C=85=20Update=20test=20to=20?= =?UTF-8?q?account=20for=20DiffSync=20model=20changes.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../tests/aristacv/test_cloudvision_adapter.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/tests/aristacv/test_cloudvision_adapter.py b/nautobot_ssot/tests/aristacv/test_cloudvision_adapter.py index 0ed9b1fc3..e2229e418 100644 --- a/nautobot_ssot/tests/aristacv/test_cloudvision_adapter.py +++ b/nautobot_ssot/tests/aristacv/test_cloudvision_adapter.py @@ -40,6 +40,8 @@ def setUp(self): self.cloudvision.get_interface_description.return_value = "Uplink to DC1" self.cloudvision.get_ip_interfaces = MagicMock() self.cloudvision.get_ip_interfaces.return_value = fixtures.IP_INTF_FIXTURE + self.cloudvision.get_interface_vrf = MagicMock() + self.cloudvision.get_interface_vrf.return_value = "Global" self.job = self.job_class() self.job.job_result = JobResult.objects.create( @@ -121,10 +123,14 @@ def test_load_ip_addresses(self): "nautobot_ssot.integrations.aristacv.utils.cloudvision.get_interface_description", self.cloudvision.get_interface_description, ): - self.cvp.load_ip_addresses(dev=mock_device) + with patch( + "nautobot_ssot.integrations.aristacv.utils.cloudvision.get_interface_vrf", + self.cloudvision.get_interface_vrf, + ): + self.cvp.load_ip_addresses(dev=mock_device) self.assertEqual( { - f"{ipaddr['address']}__{ipaddress.ip_interface(ipaddr['address']).network.with_prefixlen}" + f"{ipaddr['address']}__{ipaddress.ip_interface(ipaddr['address']).network.with_prefixlen}__Global" for ipaddr in fixtures.IP_INTF_FIXTURE }, {ipaddr.get_unique_id() for ipaddr in self.cvp.get_all("ipaddr")}, From dc59c6207abe19224d348d6e483c3b11d512aaac Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Fri, 12 Jan 2024 12:48:51 -0600 Subject: [PATCH 28/47] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20id=20to?= =?UTF-8?q?=20be=20on=20parent,=20not=20prefix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/aristacv/diffsync/adapters/nautobot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py index 63293b8a2..4a76a7ab5 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py @@ -115,7 +115,7 @@ def load_ip_addresses(self): new_pf = self.prefix( prefix=str(ipaddr.parent.prefix), namespace=ipaddr.parent.namespace.name, - uuid=ipaddr.parent.prefix.id, + uuid=ipaddr.parent.id, ) self.add(new_pf) new_ip = self.ipaddr( From 649909289805c21555bb255d3f975f6d3b422da7 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Fri, 12 Jan 2024 14:26:26 -0600 Subject: [PATCH 29/47] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Namespace=20is=20on?= =?UTF-8?q?=20parent,=20not=20IPAddress?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/aristacv/diffsync/adapters/nautobot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py index 4a76a7ab5..f4747b17f 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py @@ -132,7 +132,7 @@ def load_ip_addresses(self): for mapping in ip_to_intfs: new_map = self.ipassignment( address=str(ipaddr.address), - namespace=mapping.ip_address.namespace.name, + namespace=mapping.ip_address.parent.namespace.name, device=mapping.device.name, interface=mapping.interface.name, primary=len(mapping.ip_address.primary_ip4_for.all()) > 0 From 7e1e1d27db28e3dac2eaf51237c13e15d3e1701e Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Fri, 12 Jan 2024 14:44:19 -0600 Subject: [PATCH 30/47] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Device=20is=20off?= =?UTF-8?q?=20Interface?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/aristacv/diffsync/adapters/nautobot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py index f4747b17f..0bd6f1db0 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py @@ -133,7 +133,7 @@ def load_ip_addresses(self): new_map = self.ipassignment( address=str(ipaddr.address), namespace=mapping.ip_address.parent.namespace.name, - device=mapping.device.name, + device=mapping.interface.device.name, interface=mapping.interface.name, primary=len(mapping.ip_address.primary_ip4_for.all()) > 0 or len(mapping.ip_address.primary_ip6_for.all()) > 0, From 21c6dfda249066a698f4c3f6bbd9fa442e891acd Mon Sep 17 00:00:00 2001 From: Adam Byczkowski <38091261+qduk@users.noreply.github.com> Date: Mon, 15 Jan 2024 02:10:34 -0600 Subject: [PATCH 31/47] Added tests --- development/development.env | 1 + development/nautobot_config.py | 8 +- .../infoblox/diffsync/adapters/infoblox.py | 4 +- .../infoblox/fixtures/get_all_subnets.json | 26 ++--- .../fixtures/get_network_containers.json | 70 ++++++------ .../fixtures/get_network_containers_ipv6.json | 92 ++++++++------- .../tests/infoblox/test_infoblox_adapter.py | 106 +++++++----------- 7 files changed, 136 insertions(+), 171 deletions(-) diff --git a/development/development.env b/development/development.env index f6fca705c..3eb37964b 100644 --- a/development/development.env +++ b/development/development.env @@ -79,6 +79,7 @@ NAUTOBOT_SSOT_INFOBLOX_DEFAULT_STATUS="Active" NAUTOBOT_SSOT_INFOBLOX_ENABLE_SYNC_TO_INFOBLOX="True" NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_IP_ADDRESSES="True" NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_SUBNETS="True" +NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_SUBNETS_IPV6="False" NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLANS="True" NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLAN_VIEWS="True" NAUTOBOT_SSOT_INFOBLOX_IMPORT_SUBNETS="10.46.128.0/18,192.168.1.0/24" diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 2816e1cc4..e4ebbc141 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -222,11 +222,13 @@ os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_IP_ADDRESSES") ), "infoblox_import_objects_subnets": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_SUBNETS")), - "infoblox_import_objects_subnets_ipv6": os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_SUBNETS_IPV^", "").split(","), + "infoblox_import_objects_subnets_ipv6": is_truthy( + os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_SUBNETS_IPV6") + ), "infoblox_import_objects_vlan_views": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLAN_VIEWS")), "infoblox_import_objects_vlans": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLANS")), - # "infoblox_import_subnets": os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_SUBNETS", "").split(","), - "infoblox_import_subnets": False, + "infoblox_import_subnets": os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_SUBNETS", "").split(","), + # "infoblox_import_subnets": False, "infoblox_password": os.getenv("NAUTOBOT_SSOT_INFOBLOX_PASSWORD"), "infoblox_url": os.getenv("NAUTOBOT_SSOT_INFOBLOX_URL"), "infoblox_username": os.getenv("NAUTOBOT_SSOT_INFOBLOX_USERNAME"), diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index 251210d24..873a4e9f2 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -72,12 +72,10 @@ def load_prefixes(self): containers = self.conn.get_network_containers() # raise Exception("Hello") subnets = self.conn.get_all_subnets() - if PLUGIN_CFG["infoblox_import_objects"].get("subnets_ipv6"): + if PLUGIN_CFG.get("infoblox_import_objects_subnets_ipv6"): containers += self.conn.get_network_containers(ipv6=True) subnets += self.conn.get_all_subnets(ipv6=True) all_networks = containers + subnets - raise Exception(f"{all_networks}") - # raise Exception(f"{containers}") self.subnets = [(x["network"], x["network_view"]) for x in subnets] default_ext_attrs = get_default_ext_attrs(review_list=all_networks) for _pf in all_networks: diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_all_subnets.json b/nautobot_ssot/tests/infoblox/fixtures/get_all_subnets.json index 6feccdd01..d910b8ee1 100644 --- a/nautobot_ssot/tests/infoblox/fixtures/get_all_subnets.json +++ b/nautobot_ssot/tests/infoblox/fixtures/get_all_subnets.json @@ -1,14 +1,12 @@ -{ - "result": [ - { - "_ref": "network/ZG5zLm5ldHdvcmskMTAuMjIzLjAuMC8yMS8w:10.223.0.0/21/default", - "network": "10.223.0.0/21", - "network_view": "default" - }, - { - "_ref": "network/ZG5zLm5ldHdvcmskMTAuMjIwLjY0LjAvMjEvMA:10.220.64.0/21/default", - "network": "10.220.64.0/21", - "network_view": "default" - } - ] -} \ No newline at end of file +[ + { + "_ref": "network/ZG5zLm5ldHdvcmskMTAuMjIzLjAuMC8yMS8w:10.223.0.0/21/default", + "network": "10.223.0.0/21", + "network_view": "default" + }, + { + "_ref": "network/ZG5zLm5ldHdvcmskMTAuMjIwLjY0LjAvMjEvMA:10.220.64.0/21/default", + "network": "10.220.64.0/21", + "network_view": "default" + } +] \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_network_containers.json b/nautobot_ssot/tests/infoblox/fixtures/get_network_containers.json index ef019768d..6e4fa8b1a 100644 --- a/nautobot_ssot/tests/infoblox/fixtures/get_network_containers.json +++ b/nautobot_ssot/tests/infoblox/fixtures/get_network_containers.json @@ -1,38 +1,36 @@ -{ - "result": [ - { - "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDEwLjYxLjE1LjAvMjQvMA:10.61.15.0/24/NAT", - "comment": "NAT", - "extattrs": { - "IPPlan Last Modified": { - "value": "2011-09-28T11:01:00Z" - }, - "IPPlan UserID": { - "value": "admin" - } +[ + { + "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDEwLjYxLjE1LjAvMjQvMA:10.61.15.0/24/NAT", + "comment": "NAT", + "extattrs": { + "IPPlan Last Modified": { + "value": "2011-09-28T11:01:00Z" }, - "network": "10.61.15.0/24", - "network_view": "default", - "rir": "NONE", - "status": "container" + "IPPlan UserID": { + "value": "admin" + } }, - { - "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDE3Mi4zMC4zMi4wLzIyLzA:172.30.32.0/22/2%20NAT", - "comment": "NAT", - "extattrs": {}, - "network": "172.30.32.0/22", - "network_view": "default", - "rir": "NONE", - "status": "container" - }, - { - "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDE3Mi4xOS42NS4wLzI0LzA:172.19.65.0/24/3%20NAT", - "comment": "NAT", - "extattrs": {}, - "network": "172.19.65.0/24", - "network_view": "default", - "rir": "NONE", - "status": "container" - } - ] -} \ No newline at end of file + "network": "10.61.15.0/24", + "network_view": "default", + "rir": "NONE", + "status": "container" + }, + { + "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDE3Mi4zMC4zMi4wLzIyLzA:172.30.32.0/22/2%20NAT", + "comment": "NAT", + "extattrs": {}, + "network": "172.30.32.0/22", + "network_view": "default", + "rir": "NONE", + "status": "container" + }, + { + "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDE3Mi4xOS42NS4wLzI0LzA:172.19.65.0/24/3%20NAT", + "comment": "NAT", + "extattrs": {}, + "network": "172.19.65.0/24", + "network_view": "default", + "rir": "NONE", + "status": "container" + } +] \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6.json b/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6.json index f5cf8b97b..cf501eb4a 100644 --- a/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6.json +++ b/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6.json @@ -1,52 +1,50 @@ -{ - "result": [ - { - "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQxMDA6Oi80MC8w:2001%3A5b0%3A4100%3A%3A/40/Gateway%201", - "comment": "Gateway 1", - "extattrs": { - "GWID": { - "value": "ABC" - }, - "GWType": { - "value": "Test" - } +[ + { + "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQxMDA6Oi80MC8w:2001%3A5b0%3A4100%3A%3A/40/Gateway%201", + "comment": "Gateway 1", + "extattrs": { + "GWID": { + "value": "ABC" }, - "network": "2001:5b0:4100::/40", - "network_view": "default", - "rir": "NONE", - "status": "container" + "GWType": { + "value": "Test" + } }, - { - "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQyMDA6Oi80MC8w:2001%3A5b0%3A4200%3A%3A/40/Gateway%202", - "comment": "Gateway 2", - "extattrs": { - "GWID": { - "value": "ABC" - }, - "GWType": { - "value": "Test" - } + "network": "2001:5b0:4100::/40", + "network_view": "default", + "rir": "NONE", + "status": "container" + }, + { + "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQyMDA6Oi80MC8w:2001%3A5b0%3A4200%3A%3A/40/Gateway%202", + "comment": "Gateway 2", + "extattrs": { + "GWID": { + "value": "ABC" }, - "network": "2001:5b0:4200::/40", - "network_view": "default", - "rir": "NONE", - "status": "container" + "GWType": { + "value": "Test" + } }, - { - "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQzMDA6Oi80MC8w:2001%3A5b0%3A4300%3A%3A/40/Gateway%203", - "comment": "Gateway 3", - "extattrs": { - "GWID": { - "value": "XYZ" - }, - "GWType": { - "value": "Test" - } + "network": "2001:5b0:4200::/40", + "network_view": "default", + "rir": "NONE", + "status": "container" + }, + { + "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQzMDA6Oi80MC8w:2001%3A5b0%3A4300%3A%3A/40/Gateway%203", + "comment": "Gateway 3", + "extattrs": { + "GWID": { + "value": "XYZ" }, - "network": "2001:5b0:4300::/40", - "network_view": "default", - "rir": "NONE", - "status": "container" - } - ] -} \ No newline at end of file + "GWType": { + "value": "Test" + } + }, + "network": "2001:5b0:4300::/40", + "network_view": "default", + "rir": "NONE", + "status": "container" + } +] \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py index 144f5ea62..3fdf5ac20 100644 --- a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py @@ -10,6 +10,8 @@ from nautobot_ssot.integrations.infoblox.jobs import InfobloxDataSource from nautobot_ssot.integrations.infoblox.constant import PLUGIN_CFG +from diffsync.exceptions import ObjectNotFound + def load_json(path): """Load a json file.""" @@ -19,28 +21,15 @@ def load_json(path): CONTAINER_FIXTURE = load_json("./nautobot_ssot/tests/infoblox/fixtures/get_network_containers.json") SUBNET_FIXTURE = load_json("./nautobot_ssot/tests/infoblox/fixtures/get_all_subnets.json") -# DEVICE_INVENTORY_FIXTURE = load_json("./nautobot_ssot/tests/ipfabric/fixtures/get_device_inventory.json") -# VLAN_FIXTURE = load_json("./nautobot_ssot/tests/ipfabric/fixtures/get_vlans.json") -# INTERFACE_FIXTURE = load_json("./nautobot_ssot/tests/ipfabric/fixtures/get_interface_inventory.json") +IPV6_CONTAINER_FIXTURE = load_json("./nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6.json") +IPV6_SUBNET_FIXTURE = load_json("./nautobot_ssot/tests/infoblox/fixtures/get_all_subnets.json") class InfobloxDiffSyncTestCase(TestCase): """Test the InfobloxDiffSync adapter class.""" - # def setUp(self) -> None: - # # Create a mock client - # self.conn = MagicMock() - - # self.job = InfobloxDataSource() - # self.job.job_result = JobResult.objects.create( - # name=self.job.class_path, task_name="fake task", worker="default" - # ) - # self.infoblox = InfobloxAdapter(job=self.job, sync=None, conn=self.conn) - # return super().setUp() - - @patch("PLUGIN_CFG", {"infoblox_import_subnets": False}) - def test_load_prefixes(self): - """Test the load_prefixes function.""" + def setUp(self) -> None: + # Create a mock client self.conn = MagicMock() self.job = InfobloxDataSource() @@ -48,57 +37,38 @@ def test_load_prefixes(self): name=self.job.class_path, task_name="fake task", worker="default" ) self.infoblox = InfobloxAdapter(job=self.job, sync=None, conn=self.conn) + return super().setUp() + + @patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox.PLUGIN_CFG", + {"infoblox_import_subnets": False, "infoblox_import_objects_subnets_ipv6": False}, + ) + def test_load_prefixes(self): + """Test the load_prefixes function.""" self.conn.get_all_subnets.return_value = SUBNET_FIXTURE self.conn.get_network_containers.return_value = CONTAINER_FIXTURE - # print(self.conn.get_network_containers()) - # with patch.object(InfobloxApi, "get_all_subnets", self.conn.get_all_subnets): - # with patch.object(InfobloxApi, "get_network_containers", self.conn.get_network_containers): - # with patch.object(PLUGIN_CFG, {"infoblox_import_subnets": False}): self.infoblox.load_prefixes() - # # print(self.infoblox.get(InfobloxNetwork, {"network": "10.61.15.0/24"})) - # self.assertEqual(True, False) - - # self.ipfabric.load() - # self.assertEqual( - # {site["siteName"] for site in SITE_FIXTURE}, - # {site.get_unique_id() for site in ipfabric.get_all("location")}, - # ) - # self.assertEqual( - # {dev["hostname"] for dev in DEVICE_INVENTORY_FIXTURE}, - # {dev.get_unique_id() for dev in ipfabric.get_all("device")}, - # ) - # self.assertEqual( - # {f"{vlan['vlanName']}__{vlan['siteName']}" for vlan in VLAN_FIXTURE}, - # {vlan.get_unique_id() for vlan in ipfabric.get_all("vlan")}, - # ) - - # # Assert each site has a device tied to it. - # for site in ipfabric.get_all("location"): - # self.assertEqual(len(site.devices), 1, f"{site} does not have the expected single device tied to it.") - # self.assertTrue(hasattr(site, "vlans")) - - # # Assert each device has the necessary attributes - # for device in ipfabric.get_all("device"): - # self.assertTrue(hasattr(device, "location_name")) - # self.assertTrue(hasattr(device, "model")) - # self.assertTrue(hasattr(device, "vendor")) - # self.assertTrue(hasattr(device, "serial_number")) - # self.assertTrue(hasattr(device, "interfaces")) - - # # Assert each vlan has the necessary attributes - # for vlan in ipfabric.get_all("vlan"): - # self.assertTrue(hasattr(vlan, "name")) - # self.assertTrue(hasattr(vlan, "vid")) - # self.assertTrue(hasattr(vlan, "status")) - # self.assertTrue(hasattr(vlan, "location")) - # self.assertTrue(hasattr(vlan, "description")) - - # # Assert each interface has the necessary attributes - # for interface in ipfabric.get_all("interface"): - # self.assertTrue(hasattr(interface, "name")) - # self.assertTrue(hasattr(interface, "device_name")) - # self.assertTrue(hasattr(interface, "mac_address")) - # self.assertTrue(hasattr(interface, "mtu")) - # self.assertTrue(hasattr(interface, "ip_address")) - # self.assertTrue(hasattr(interface, "subnet_mask")) - # self.assertTrue(hasattr(interface, "type")) + self.assertEqual(str(self.infoblox.get(InfobloxNetwork, {"network": "10.61.15.0/24"})), "10.61.15.0/24") + with self.assertRaises(ObjectNotFound): + self.infoblox.get(InfobloxNetwork, {"network": "2001:5b0:4100::/40"}) + + @patch( + "nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox.PLUGIN_CFG", + {"infoblox_import_subnets": False, "infoblox_import_objects_subnets_ipv6": True}, + ) + def test_load_prefixes_ipv6(self): + """Test the load_prefixes function with IPv6 import set in nautobot_config.""" + + def mock_get_network_containers(ipv6=False): + if ipv6: + return IPV6_CONTAINER_FIXTURE + else: + return CONTAINER_FIXTURE + + self.conn.get_network_containers.side_effect = mock_get_network_containers + self.conn.get_all_subnets.return_value = SUBNET_FIXTURE + self.infoblox.load_prefixes() + self.assertEqual(str(self.infoblox.get(InfobloxNetwork, {"network": "10.61.15.0/24"})), "10.61.15.0/24") + self.assertEqual( + str(self.infoblox.get(InfobloxNetwork, {"network": "2001:5b0:4100::/40"})), "2001:5b0:4100::/40" + ) From bb3b8e29e2746f644796adffcf26c17c5cbef5c0 Mon Sep 17 00:00:00 2001 From: Adam Byczkowski <38091261+qduk@users.noreply.github.com> Date: Mon, 15 Jan 2024 11:57:34 -0600 Subject: [PATCH 32/47] Updated tests --- .../infoblox/fixtures/get_all_subnets.json | 26 +++--- .../fixtures/get_all_subnets_list.json | 12 +++ .../fixtures/get_network_containers.json | 70 +++++++------- .../fixtures/get_network_containers_ipv6.json | 92 ++++++++++--------- .../get_network_containers_ipv6_list.json | 50 ++++++++++ .../fixtures/get_network_containers_list.json | 36 ++++++++ .../tests/infoblox/test_infoblox_adapter.py | 16 ++-- 7 files changed, 201 insertions(+), 101 deletions(-) create mode 100644 nautobot_ssot/tests/infoblox/fixtures/get_all_subnets_list.json create mode 100644 nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6_list.json create mode 100644 nautobot_ssot/tests/infoblox/fixtures/get_network_containers_list.json diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_all_subnets.json b/nautobot_ssot/tests/infoblox/fixtures/get_all_subnets.json index d910b8ee1..ff9bc5c58 100644 --- a/nautobot_ssot/tests/infoblox/fixtures/get_all_subnets.json +++ b/nautobot_ssot/tests/infoblox/fixtures/get_all_subnets.json @@ -1,12 +1,14 @@ -[ - { - "_ref": "network/ZG5zLm5ldHdvcmskMTAuMjIzLjAuMC8yMS8w:10.223.0.0/21/default", - "network": "10.223.0.0/21", - "network_view": "default" - }, - { - "_ref": "network/ZG5zLm5ldHdvcmskMTAuMjIwLjY0LjAvMjEvMA:10.220.64.0/21/default", - "network": "10.220.64.0/21", - "network_view": "default" - } -] \ No newline at end of file +{ + "result": [ + { + "_ref": "network/ZG5zLm5ldHdvcmskMTAuMjIzLjAuMC8yMS8w:10.223.0.0/21/default", + "network": "10.223.0.0/21", + "network_view": "default" + }, + { + "_ref": "network/ZG5zLm5ldHdvcmskMTAuMjIwLjY0LjAvMjEvMA:10.220.64.0/21/default", + "network": "10.220.64.0/21", + "network_view": "default" + } + ] +} \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_all_subnets_list.json b/nautobot_ssot/tests/infoblox/fixtures/get_all_subnets_list.json new file mode 100644 index 000000000..d910b8ee1 --- /dev/null +++ b/nautobot_ssot/tests/infoblox/fixtures/get_all_subnets_list.json @@ -0,0 +1,12 @@ +[ + { + "_ref": "network/ZG5zLm5ldHdvcmskMTAuMjIzLjAuMC8yMS8w:10.223.0.0/21/default", + "network": "10.223.0.0/21", + "network_view": "default" + }, + { + "_ref": "network/ZG5zLm5ldHdvcmskMTAuMjIwLjY0LjAvMjEvMA:10.220.64.0/21/default", + "network": "10.220.64.0/21", + "network_view": "default" + } +] \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_network_containers.json b/nautobot_ssot/tests/infoblox/fixtures/get_network_containers.json index 6e4fa8b1a..ef019768d 100644 --- a/nautobot_ssot/tests/infoblox/fixtures/get_network_containers.json +++ b/nautobot_ssot/tests/infoblox/fixtures/get_network_containers.json @@ -1,36 +1,38 @@ -[ - { - "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDEwLjYxLjE1LjAvMjQvMA:10.61.15.0/24/NAT", - "comment": "NAT", - "extattrs": { - "IPPlan Last Modified": { - "value": "2011-09-28T11:01:00Z" +{ + "result": [ + { + "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDEwLjYxLjE1LjAvMjQvMA:10.61.15.0/24/NAT", + "comment": "NAT", + "extattrs": { + "IPPlan Last Modified": { + "value": "2011-09-28T11:01:00Z" + }, + "IPPlan UserID": { + "value": "admin" + } }, - "IPPlan UserID": { - "value": "admin" - } + "network": "10.61.15.0/24", + "network_view": "default", + "rir": "NONE", + "status": "container" }, - "network": "10.61.15.0/24", - "network_view": "default", - "rir": "NONE", - "status": "container" - }, - { - "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDE3Mi4zMC4zMi4wLzIyLzA:172.30.32.0/22/2%20NAT", - "comment": "NAT", - "extattrs": {}, - "network": "172.30.32.0/22", - "network_view": "default", - "rir": "NONE", - "status": "container" - }, - { - "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDE3Mi4xOS42NS4wLzI0LzA:172.19.65.0/24/3%20NAT", - "comment": "NAT", - "extattrs": {}, - "network": "172.19.65.0/24", - "network_view": "default", - "rir": "NONE", - "status": "container" - } -] \ No newline at end of file + { + "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDE3Mi4zMC4zMi4wLzIyLzA:172.30.32.0/22/2%20NAT", + "comment": "NAT", + "extattrs": {}, + "network": "172.30.32.0/22", + "network_view": "default", + "rir": "NONE", + "status": "container" + }, + { + "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDE3Mi4xOS42NS4wLzI0LzA:172.19.65.0/24/3%20NAT", + "comment": "NAT", + "extattrs": {}, + "network": "172.19.65.0/24", + "network_view": "default", + "rir": "NONE", + "status": "container" + } + ] +} \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6.json b/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6.json index cf501eb4a..f5cf8b97b 100644 --- a/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6.json +++ b/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6.json @@ -1,50 +1,52 @@ -[ - { - "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQxMDA6Oi80MC8w:2001%3A5b0%3A4100%3A%3A/40/Gateway%201", - "comment": "Gateway 1", - "extattrs": { - "GWID": { - "value": "ABC" +{ + "result": [ + { + "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQxMDA6Oi80MC8w:2001%3A5b0%3A4100%3A%3A/40/Gateway%201", + "comment": "Gateway 1", + "extattrs": { + "GWID": { + "value": "ABC" + }, + "GWType": { + "value": "Test" + } }, - "GWType": { - "value": "Test" - } + "network": "2001:5b0:4100::/40", + "network_view": "default", + "rir": "NONE", + "status": "container" }, - "network": "2001:5b0:4100::/40", - "network_view": "default", - "rir": "NONE", - "status": "container" - }, - { - "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQyMDA6Oi80MC8w:2001%3A5b0%3A4200%3A%3A/40/Gateway%202", - "comment": "Gateway 2", - "extattrs": { - "GWID": { - "value": "ABC" + { + "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQyMDA6Oi80MC8w:2001%3A5b0%3A4200%3A%3A/40/Gateway%202", + "comment": "Gateway 2", + "extattrs": { + "GWID": { + "value": "ABC" + }, + "GWType": { + "value": "Test" + } }, - "GWType": { - "value": "Test" - } + "network": "2001:5b0:4200::/40", + "network_view": "default", + "rir": "NONE", + "status": "container" }, - "network": "2001:5b0:4200::/40", - "network_view": "default", - "rir": "NONE", - "status": "container" - }, - { - "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQzMDA6Oi80MC8w:2001%3A5b0%3A4300%3A%3A/40/Gateway%203", - "comment": "Gateway 3", - "extattrs": { - "GWID": { - "value": "XYZ" + { + "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQzMDA6Oi80MC8w:2001%3A5b0%3A4300%3A%3A/40/Gateway%203", + "comment": "Gateway 3", + "extattrs": { + "GWID": { + "value": "XYZ" + }, + "GWType": { + "value": "Test" + } }, - "GWType": { - "value": "Test" - } - }, - "network": "2001:5b0:4300::/40", - "network_view": "default", - "rir": "NONE", - "status": "container" - } -] \ No newline at end of file + "network": "2001:5b0:4300::/40", + "network_view": "default", + "rir": "NONE", + "status": "container" + } + ] +} \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6_list.json b/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6_list.json new file mode 100644 index 000000000..cf501eb4a --- /dev/null +++ b/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6_list.json @@ -0,0 +1,50 @@ +[ + { + "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQxMDA6Oi80MC8w:2001%3A5b0%3A4100%3A%3A/40/Gateway%201", + "comment": "Gateway 1", + "extattrs": { + "GWID": { + "value": "ABC" + }, + "GWType": { + "value": "Test" + } + }, + "network": "2001:5b0:4100::/40", + "network_view": "default", + "rir": "NONE", + "status": "container" + }, + { + "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQyMDA6Oi80MC8w:2001%3A5b0%3A4200%3A%3A/40/Gateway%202", + "comment": "Gateway 2", + "extattrs": { + "GWID": { + "value": "ABC" + }, + "GWType": { + "value": "Test" + } + }, + "network": "2001:5b0:4200::/40", + "network_view": "default", + "rir": "NONE", + "status": "container" + }, + { + "_ref": "ipv6networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDIwMDE6NWIwOjQzMDA6Oi80MC8w:2001%3A5b0%3A4300%3A%3A/40/Gateway%203", + "comment": "Gateway 3", + "extattrs": { + "GWID": { + "value": "XYZ" + }, + "GWType": { + "value": "Test" + } + }, + "network": "2001:5b0:4300::/40", + "network_view": "default", + "rir": "NONE", + "status": "container" + } +] \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_list.json b/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_list.json new file mode 100644 index 000000000..6e4fa8b1a --- /dev/null +++ b/nautobot_ssot/tests/infoblox/fixtures/get_network_containers_list.json @@ -0,0 +1,36 @@ +[ + { + "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDEwLjYxLjE1LjAvMjQvMA:10.61.15.0/24/NAT", + "comment": "NAT", + "extattrs": { + "IPPlan Last Modified": { + "value": "2011-09-28T11:01:00Z" + }, + "IPPlan UserID": { + "value": "admin" + } + }, + "network": "10.61.15.0/24", + "network_view": "default", + "rir": "NONE", + "status": "container" + }, + { + "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDE3Mi4zMC4zMi4wLzIyLzA:172.30.32.0/22/2%20NAT", + "comment": "NAT", + "extattrs": {}, + "network": "172.30.32.0/22", + "network_view": "default", + "rir": "NONE", + "status": "container" + }, + { + "_ref": "networkcontainer/ZG5zLm5ldHdvcmtfY29udGFpbmVyJDE3Mi4xOS42NS4wLzI0LzA:172.19.65.0/24/3%20NAT", + "comment": "NAT", + "extattrs": {}, + "network": "172.19.65.0/24", + "network_view": "default", + "rir": "NONE", + "status": "container" + } +] \ No newline at end of file diff --git a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py index 3fdf5ac20..a24c4f953 100644 --- a/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py +++ b/nautobot_ssot/tests/infoblox/test_infoblox_adapter.py @@ -2,15 +2,13 @@ import json from unittest.mock import MagicMock, patch -from django.test import TestCase, override_settings +from django.test import TestCase from nautobot.extras.models import JobResult +from diffsync.exceptions import ObjectNotFound from nautobot_ssot.integrations.infoblox.diffsync.adapters.infoblox import InfobloxAdapter from nautobot_ssot.integrations.infoblox.diffsync.models.infoblox import InfobloxNetwork from nautobot_ssot.integrations.infoblox.jobs import InfobloxDataSource -from nautobot_ssot.integrations.infoblox.constant import PLUGIN_CFG - -from diffsync.exceptions import ObjectNotFound def load_json(path): @@ -19,10 +17,9 @@ def load_json(path): return json.loads(file.read()) -CONTAINER_FIXTURE = load_json("./nautobot_ssot/tests/infoblox/fixtures/get_network_containers.json") -SUBNET_FIXTURE = load_json("./nautobot_ssot/tests/infoblox/fixtures/get_all_subnets.json") -IPV6_CONTAINER_FIXTURE = load_json("./nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6.json") -IPV6_SUBNET_FIXTURE = load_json("./nautobot_ssot/tests/infoblox/fixtures/get_all_subnets.json") +CONTAINER_FIXTURE = load_json("./nautobot_ssot/tests/infoblox/fixtures/get_network_containers_list.json") +SUBNET_FIXTURE = load_json("./nautobot_ssot/tests/infoblox/fixtures/get_all_subnets_list.json") +IPV6_CONTAINER_FIXTURE = load_json("./nautobot_ssot/tests/infoblox/fixtures/get_network_containers_ipv6_list.json") class InfobloxDiffSyncTestCase(TestCase): @@ -62,8 +59,7 @@ def test_load_prefixes_ipv6(self): def mock_get_network_containers(ipv6=False): if ipv6: return IPV6_CONTAINER_FIXTURE - else: - return CONTAINER_FIXTURE + return CONTAINER_FIXTURE self.conn.get_network_containers.side_effect = mock_get_network_containers self.conn.get_all_subnets.return_value = SUBNET_FIXTURE From e45e2ec8bd343f80d611f778eb3e39df751f510c Mon Sep 17 00:00:00 2001 From: Adam Byczkowski <38091261+qduk@users.noreply.github.com> Date: Tue, 16 Jan 2024 11:56:59 -0600 Subject: [PATCH 33/47] Updated per personal review --- development/nautobot_config.py | 1 - .../infoblox/diffsync/adapters/infoblox.py | 1 - nautobot_ssot/integrations/infoblox/utils/client.py | 11 ++++++----- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/development/nautobot_config.py b/development/nautobot_config.py index e4ebbc141..6a9504d52 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -228,7 +228,6 @@ "infoblox_import_objects_vlan_views": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLAN_VIEWS")), "infoblox_import_objects_vlans": is_truthy(os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_OBJECTS_VLANS")), "infoblox_import_subnets": os.getenv("NAUTOBOT_SSOT_INFOBLOX_IMPORT_SUBNETS", "").split(","), - # "infoblox_import_subnets": False, "infoblox_password": os.getenv("NAUTOBOT_SSOT_INFOBLOX_PASSWORD"), "infoblox_url": os.getenv("NAUTOBOT_SSOT_INFOBLOX_URL"), "infoblox_username": os.getenv("NAUTOBOT_SSOT_INFOBLOX_USERNAME"), diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index 873a4e9f2..493246ac6 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -70,7 +70,6 @@ def load_prefixes(self): else: # Need to load containers here to prevent duplicates when syncing back to Infoblox containers = self.conn.get_network_containers() - # raise Exception("Hello") subnets = self.conn.get_all_subnets() if PLUGIN_CFG.get("infoblox_import_objects_subnets_ipv6"): containers += self.conn.get_network_containers(ipv6=True) diff --git a/nautobot_ssot/integrations/infoblox/utils/client.py b/nautobot_ssot/integrations/infoblox/utils/client.py index 219606dc5..c62d5cc8d 100644 --- a/nautobot_ssot/integrations/infoblox/utils/client.py +++ b/nautobot_ssot/integrations/infoblox/utils/client.py @@ -20,11 +20,11 @@ def parse_url(address): """Handle outside case where protocol isn't included in URL address. - Args: - address (str): URL set by end user for Infoblox instance. + Args: + address (str): URL set by end user for Infoblox instance. - Returns: - ParseResult: The parsed results from urllib. + get Returns: + ParseResult: The parsed results from urllib. """ if not re.search(r"^[A-Za-z0-9+.\-]+://", address): address = f"https://{address}" @@ -1270,7 +1270,8 @@ def get_network_containers(self, prefix: str = "", ipv6: bool = False): """Get all Network Containers. Args: - prefix (Str): Specific prefix (192.168.0.1/24) + prefix (str): Specific prefix (192.168.0.1/24) + ipv6 (bool): Whether the call should be made for IPv6 network containers. Returns: (list) of record dicts From 0a6dd55b89bf564f90a4668a350f64552af0e205 Mon Sep 17 00:00:00 2001 From: Jacob McGill Date: Mon, 8 Jan 2024 18:42:11 -0500 Subject: [PATCH 34/47] BugFix: Use correct attr name for vlan_group --- .../infoblox/diffsync/adapters/nautobot.py | 2 +- .../infoblox/diffsync/models/nautobot.py | 8 +-- .../integrations/infoblox/utils/nautobot.py | 2 +- .../tests/infoblox/test_nautobot_adapter.py | 57 ++++++++++++++++++ nautobot_ssot/tests/infoblox/test_utils.py | 59 +++++++++++++++++++ 5 files changed, 122 insertions(+), 6 deletions(-) create mode 100644 nautobot_ssot/tests/infoblox/test_nautobot_adapter.py diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py index 7aa21f238..97c3c9b8d 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/nautobot.py @@ -208,7 +208,7 @@ def load_vlans(self): # To ensure we are only dealing with VLANs imported from Infoblox we need to filter to those with a # VLAN Group assigned to match how Infoblox requires a VLAN View to be associated to VLANs. for vlan in VLAN.objects.filter(vlan_group__isnull=False): - if vlan.group.name not in self.vlan_map: + if vlan.vlan_group.name not in self.vlan_map: self.vlan_map[vlan.vlan_group.name] = {} self.vlan_map[vlan.vlan_group.name][vlan.vid] = vlan.id if "ssot_synced_to_infoblox" in vlan.custom_field_data: diff --git a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py index de5bd8ed5..0689626f2 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/models/nautobot.py @@ -149,7 +149,7 @@ def update(self, attrs): # pylint: disable=too-many-branches if len(current_vlans) < len(attrs["vlans"]): for _, item in attrs["vlans"].items(): try: - vlan = OrmVlan.objects.get(vid=item["vid"], name=item["name"], group__name=item["group"]) + vlan = OrmVlan.objects.get(vid=item["vid"], name=item["name"], vlan_group__name=item["group"]) if vlan not in current_vlans: if self.diffsync.job.get("debug"): self.diffsync.job.logger.debug(f"Adding VLAN {vlan.vid} to {_pf.prefix}.") @@ -339,9 +339,9 @@ def update(self, attrs): _vlan.description = attrs["description"] if "ext_attrs" in attrs: process_ext_attrs(diffsync=self.diffsync, obj=_vlan, extattrs=attrs["ext_attrs"]) - if not _vlan.group.location and _vlan.location: - _vlan.group.location = _vlan.location - _vlan.group.validated_save() + if not _vlan.vlan_group.location and _vlan.location: + _vlan.vlan_group.location = _vlan.location + _vlan.vlan_group.validated_save() try: _vlan.validated_save() except ValidationError as err: diff --git a/nautobot_ssot/integrations/infoblox/utils/nautobot.py b/nautobot_ssot/integrations/infoblox/utils/nautobot.py index e1632084c..3f148b3ee 100644 --- a/nautobot_ssot/integrations/infoblox/utils/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/utils/nautobot.py @@ -13,7 +13,7 @@ def build_vlan_map_from_relations(vlans: list): """ vlan_map = {} for vlan in vlans: - vlan_map[vlan.vid] = {"vid": vlan.vid, "name": vlan.name, "group": vlan.group.name} + vlan_map[vlan.vid] = {"vid": vlan.vid, "name": vlan.name, "group": vlan.vlan_group.name} return vlan_map diff --git a/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py new file mode 100644 index 000000000..350d210a7 --- /dev/null +++ b/nautobot_ssot/tests/infoblox/test_nautobot_adapter.py @@ -0,0 +1,57 @@ +"""Nautobot Adapter tests.""" +from django.test import TestCase + +from nautobot.extras.models import Status +from nautobot.ipam.models import VLAN, VLANGroup + +from nautobot_ssot.integrations.infoblox.diffsync.adapters.nautobot import NautobotAdapter + + +class TestNautobotAdapter(TestCase): + """Test cases for InfoBlox Nautobot adapter.""" + + def setUp(self): + active_status = Status.objects.get(name="Active") + vlan_group1 = VLANGroup.objects.create(name="one") + vlan_group2 = VLANGroup.objects.create(name="two") + VLAN.objects.create( + vid=10, + name="ten", + status=active_status, + ) + VLAN.objects.create( + vid=20, + name="twenty", + status=active_status, + vlan_group=vlan_group1, + ) + VLAN.objects.create( + vid=30, + name="thirty", + status=active_status, + vlan_group=vlan_group1, + ) + VLAN.objects.create( + vid=40, + name="forty", + status=active_status, + vlan_group=vlan_group2, + ) + VLAN.objects.create( + vid=50, + name="fifty", + status=active_status, + vlan_group=vlan_group2, + ) + self.nb_adapter = NautobotAdapter() + + def test_load_vlans_loads_expected_vlans(self): + self.nb_adapter.load_vlans() + expected_vlans = {"20__twenty__one", "30__thirty__one", "40__forty__two", "50__fifty__two"} + actual_vlans = {vlan.get_unique_id() for vlan in self.nb_adapter.get_all("vlan")} + self.assertEqual(expected_vlans, actual_vlans) + + def test_load_vlans_does_not_load_ungrouped_vlans(self): + self.nb_adapter.load_vlans() + actual_vlan_ids = {vlan.get_identifiers()["vid"] for vlan in self.nb_adapter.get_all("vlan")} + self.assertFalse(10 in actual_vlan_ids) diff --git a/nautobot_ssot/tests/infoblox/test_utils.py b/nautobot_ssot/tests/infoblox/test_utils.py index d0069be05..187fbf141 100644 --- a/nautobot_ssot/tests/infoblox/test_utils.py +++ b/nautobot_ssot/tests/infoblox/test_utils.py @@ -1,11 +1,15 @@ """Util tests that do not require Django.""" import unittest +from nautobot.extras.models import Status +from nautobot.ipam.models import VLAN, VLANGroup + from nautobot_ssot.integrations.infoblox.utils.diffsync import ( get_vlan_view_name, nautobot_vlan_status, get_ext_attr_dict, ) +from nautobot_ssot.integrations.infoblox.utils.nautobot import build_vlan_map_from_relations class TestUtils(unittest.TestCase): @@ -29,3 +33,58 @@ def test_get_ext_attr_dict(self): expected = {"site": "HQ", "region": "Central"} standardized_dict = get_ext_attr_dict(test_dict) self.assertEqual(standardized_dict, expected) + + +class TestNautobotUtils(unittest.TestCase): + """Test infoblox.utils.nautobot.py.""" + + def setUp(self): + """Setup Test Cases.""" + active_status = Status.objects.get(name="Active") + self.vlan_group_1 = VLANGroup.objects.create(name="one") + self.vlan_group_2 = VLANGroup.objects.create(name="two") + self.vlan_10 = VLAN.objects.create( + vid=10, + name="ten", + status=active_status, + vlan_group=self.vlan_group_1, + ) + self.vlan_20 = VLAN.objects.create( + vid=20, + name="twenty", + status=active_status, + vlan_group=self.vlan_group_1, + ) + self.vlan_30 = VLAN.objects.create( + vid=30, + name="thirty", + status=active_status, + vlan_group=self.vlan_group_2, + ) + + def tearDown(self): + for obj in [self.vlan_10, self.vlan_20, self.vlan_30, self.vlan_group_1, self.vlan_group_2]: + obj.delete() + + def test_build_vlan_map_from_relations(self): + """Test VLAN map is built correctly.""" + + actual = build_vlan_map_from_relations([self.vlan_10, self.vlan_20, self.vlan_30]) + expected = { + 10: { + "vid": 10, + "name": "ten", + "group": "one", + }, + 20: { + "vid": 20, + "name": "twenty", + "group": "one", + }, + 30: { + "vid": 30, + "name": "thirty", + "group": "two", + }, + } + self.assertEqual(actual, expected) From 4952233158edee402fd242ed2b7a635d6c8befdd Mon Sep 17 00:00:00 2001 From: Adam Byczkowski <38091261+qduk@users.noreply.github.com> Date: Tue, 16 Jan 2024 12:13:57 -0600 Subject: [PATCH 35/47] Linted --- nautobot_ssot/integrations/infoblox/utils/client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/utils/client.py b/nautobot_ssot/integrations/infoblox/utils/client.py index c62d5cc8d..cf4c066dc 100644 --- a/nautobot_ssot/integrations/infoblox/utils/client.py +++ b/nautobot_ssot/integrations/infoblox/utils/client.py @@ -20,11 +20,11 @@ def parse_url(address): """Handle outside case where protocol isn't included in URL address. - Args: - address (str): URL set by end user for Infoblox instance. + Args: + address (str): URL set by end user for Infoblox instance. - get Returns: - ParseResult: The parsed results from urllib. + Returns: + ParseResult: The parsed results from urllib. """ if not re.search(r"^[A-Za-z0-9+.\-]+://", address): address = f"https://{address}" From 70eccf2a0f8e14801a8c989d0cd1c94c4687381c Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 17 Jan 2024 10:05:57 -0600 Subject: [PATCH 36/47] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20check=20f?= =?UTF-8?q?or=20existing=20Prefix=20to=20use=20both=20identifiers.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/aristacv/diffsync/adapters/nautobot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py index 0bd6f1db0..165f98efe 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py @@ -110,7 +110,7 @@ def load_ip_addresses(self): ) self.add(new_ns) try: - self.get(self.prefix, str(ipaddr.parent.prefix)) + self.get(self.prefix, {"prefix": str(ipaddr.parent.prefix), "namespace": ipaddr.parent.namespace.name}) except ObjectNotFound: new_pf = self.prefix( prefix=str(ipaddr.parent.prefix), From 347f375d5bd912d2bdd010cbb8399c4a3d2bce1a Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 17 Jan 2024 10:06:39 -0600 Subject: [PATCH 37/47] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20get=20for?= =?UTF-8?q?=20Controller=20Relationship=20to=20use=20label=20and=20not=20n?= =?UTF-8?q?ame.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/aristacv/diffsync/adapters/nautobot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py index 165f98efe..a44f38a41 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py @@ -150,7 +150,7 @@ def sync_complete(self, source: DiffSync, *args, **kwargs): # if Controller is created we need to ensure all imported Devices have RelationshipAssociation to it. if APP_SETTINGS.get("aristacv_create_controller"): self.job.logger.info("Creating Relationships between CloudVision and connected Devices.") - controller_relation = OrmRelationship.objects.get(name="Controller -> Device") + controller_relation = OrmRelationship.objects.get(label="Controller -> Device") device_ct = ContentType.objects.get_for_model(OrmDevice) cvp = OrmDevice.objects.get(name="CloudVision") loaded_devices = source.dict()["device"] From fecda8d31169820058b4971bcf8e7fa16230b145 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 17 Jan 2024 10:09:00 -0600 Subject: [PATCH 38/47] =?UTF-8?q?refactor:=20=E2=99=BB=EF=B8=8F=20Use=20bl?= =?UTF-8?q?ank=20string=20instead=20of=20None=20for=20defaults=20in=20init?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/integrations/aristacv/utils/cloudvision.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nautobot_ssot/integrations/aristacv/utils/cloudvision.py b/nautobot_ssot/integrations/aristacv/utils/cloudvision.py index 4dbe8f464..eec5ed13b 100644 --- a/nautobot_ssot/integrations/aristacv/utils/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/utils/cloudvision.py @@ -49,11 +49,11 @@ class CloudvisionApi: # pylint: disable=too-many-instance-attributes, too-many- def __init__( self, cvp_host: str, - cvp_port: str = None, + cvp_port: str = "", verify: bool = True, - username: str = None, - password: str = None, - cvp_token: str = None, + username: str = "", + password: str = "", + cvp_token: str = "", ): """Create Cloudvision API connection.""" self.metadata = None From 4c1ee1ee9d27f573ae70cbd241239dddbf81eba8 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 17 Jan 2024 10:09:48 -0600 Subject: [PATCH 39/47] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Refactor=20get=5Fcv?= =?UTF-8?q?p=5Fversion()=20to=20work=20with=20CVaaS.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../aristacv/utils/cloudvision.py | 26 +++++++++++++------ 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/nautobot_ssot/integrations/aristacv/utils/cloudvision.py b/nautobot_ssot/integrations/aristacv/utils/cloudvision.py index eec5ed13b..fa51f9580 100644 --- a/nautobot_ssot/integrations/aristacv/utils/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/utils/cloudvision.py @@ -694,14 +694,24 @@ def get_cvp_version(): """ client = CvpClient() try: - client.connect( - [APP_SETTINGS["aristacv_cvp_host"]], - APP_SETTINGS["aristacv_cvp_user"], - APP_SETTINGS["aristacv_cvp_password"], - ) - version = client.api.get_cvp_info() - if "version" in version: - return version["version"] + if APP_SETTINGS.get("aristacv_cvp_token") and not APP_SETTINGS.get("aristacv_cvp_host"): + client.connect( + nodes=[APP_SETTINGS["aristacv_cvaas_url"]], + username="", + password="", + is_cvaas=True, + api_token=APP_SETTINGS.get("aristacv_cvp_token"), + ) + else: + client.connect( + nodes=[APP_SETTINGS["aristacv_cvp_host"]], + username=APP_SETTINGS.get("aristacv_cvp_user"), + password=APP_SETTINGS.get("aristacv_cvp_password"), + is_cvaas=False, + ) except CvpLoginError as err: raise AuthFailure(error_code="Failed Login", message=f"Unable to login to CloudVision Portal. {err}") from err + version = client.api.get_cvp_info() + if "version" in version: + return version["version"] return "" From bb9df2277c7ad665001d36a20d36b5ea7d79cb76 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 17 Jan 2024 10:10:23 -0600 Subject: [PATCH 40/47] =?UTF-8?q?feat:=20=E2=9C=A8=20Add=20VXLAN=20Configu?= =?UTF-8?q?red=20CustomField=20so=20displayed=20in=20UI=20correctly.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nautobot_ssot/integrations/aristacv/signals.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/nautobot_ssot/integrations/aristacv/signals.py b/nautobot_ssot/integrations/aristacv/signals.py index 7ba3c9b73..46748e6e1 100644 --- a/nautobot_ssot/integrations/aristacv/signals.py +++ b/nautobot_ssot/integrations/aristacv/signals.py @@ -102,6 +102,11 @@ def post_migrate_create_custom_fields(apps=global_apps, **kwargs): "type": CustomFieldTypeChoices.TYPE_TEXT, "label": "Topology Pod", }, + { + "key": "arista_vxlanConfigured", + "type": CustomFieldTypeChoices.TYPE_BOOLEAN, + "label": "VXLAN Configured", + }, ]: field, _ = CustomField.objects.update_or_create( key=device_cf_dict["key"], From bd4dd7059493593c0ac8b264efeab06f71afe55b Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 17 Jan 2024 10:10:55 -0600 Subject: [PATCH 41/47] =?UTF-8?q?fix:=20=F0=9F=90=9B=20Correct=20tag=20val?= =?UTF-8?q?ue=20when=20a=20Boolean=20so=20diff=20lines=20up.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../integrations/aristacv/diffsync/adapters/cloudvision.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py index 871444d83..40c460f2e 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/cloudvision.py @@ -243,6 +243,9 @@ def load_device_tags(self, device): if tag["label"] == "mpls" or tag["label"] == "ztp": tag["value"] = bool(distutils.util.strtobool(tag["value"])) + if tag["value"] in ["true", "false"]: + tag["value"] = bool(distutils.util.strtobool(tag["value"])) + new_cf = self.cf( name=f"arista_{tag['label']}", value=tag["value"], From b5559e2fc187a7614329c0caaf352996ca07a66d Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 17 Jan 2024 10:11:58 -0600 Subject: [PATCH 42/47] =?UTF-8?q?fix:=20=E2=99=BB=EF=B8=8F=20Update=20Pref?= =?UTF-8?q?ixes/IPAddress=20model=20to=20allow=20delete,=20use=20objects?= =?UTF-8?q?=5Fto=5Fdelete=20to=20control=20order=20of=20object=20deletion.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../aristacv/diffsync/adapters/nautobot.py | 19 ++++++++++++++++++ .../aristacv/diffsync/models/base.py | 4 ---- .../aristacv/diffsync/models/nautobot.py | 20 ++++++++++++++++--- 3 files changed, 36 insertions(+), 7 deletions(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py index a44f38a41..8cc12e337 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/adapters/nautobot.py @@ -1,5 +1,7 @@ """DiffSync adapter for Nautobot.""" +from collections import defaultdict from django.contrib.contenttypes.models import ContentType +from django.db.models import ProtectedError from nautobot.dcim.models import Device as OrmDevice from nautobot.dcim.models import Interface as OrmInterface from nautobot.extras.models import Relationship as OrmRelationship @@ -39,6 +41,7 @@ def __init__(self, *args, job=None, **kwargs): """Initialize the Nautobot DiffSync adapter.""" super().__init__(*args, **kwargs) self.job = job + self.objects_to_delete = defaultdict(list) def load_devices(self): """Add Nautobot Device objects as DiffSync Device models.""" @@ -147,6 +150,22 @@ def sync_complete(self, source: DiffSync, *args, **kwargs): Args: source (DiffSync): Source DiffSync DataSource adapter. """ + for grouping in ( + "ipaddresses", + "prefixes", + "namespaces", + "interfaces", + "devices", + ): + for nautobot_object in self.objects_to_delete[grouping]: + try: + if self.job.debug: + self.job.logger.info(f"Deleting {nautobot_object}.") + nautobot_object.delete() + except ProtectedError as err: + self.job.logger.warning(f"Deletion failed for protected object: {nautobot_object}. {err}") + self.objects_to_delete[grouping] = [] + # if Controller is created we need to ensure all imported Devices have RelationshipAssociation to it. if APP_SETTINGS.get("aristacv_create_controller"): self.job.logger.info("Creating Relationships between CloudVision and connected Devices.") diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/base.py b/nautobot_ssot/integrations/aristacv/diffsync/models/base.py index c0a0a0971..109aa2770 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/base.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/base.py @@ -73,8 +73,6 @@ class Namespace(DiffSyncModel): class Prefix(DiffSyncModel): """Prefix Model.""" - model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST - _modelname = "prefix" _identifiers = ("prefix", "namespace") _attributes = () @@ -88,8 +86,6 @@ class Prefix(DiffSyncModel): class IPAddress(DiffSyncModel): """IPAddress Model.""" - model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST - _modelname = "ipaddr" _identifiers = ( "address", diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py index d02398f7f..a67708862 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/nautobot.py @@ -146,7 +146,7 @@ def delete(self): if APP_SETTINGS.get("aristacv_delete_devices_on_sync", DEFAULT_DELETE_DEVICES_ON_SYNC): self.diffsync.job.logger.warning(f"Device {self.name} will be deleted per app settings.") device = OrmDevice.objects.get(id=self.uuid) - device.delete() + self.diffsync.objects_to_delete["devices"].append(device) super().delete() return self @@ -247,7 +247,7 @@ def delete(self): if self.diffsync.job.debug: self.diffsync.job.logger.warning(f"Interface {self.name} for {self.device} will be deleted.") _port = OrmInterface.objects.get(id=self.uuid) - _port.delete() + self.diffsync.objects_to_delete["interfaces"].append(_port) return self @@ -269,7 +269,7 @@ def delete(self): """Delete Namespace in Nautobot.""" super().delete() _ns = OrmNamespace.objects.get(id=self.uuid) - _ns.delete() + self.diffsync.objects_to_delete["namespaces"].append(_ns) return self @@ -289,6 +289,13 @@ def create(cls, diffsync, ids, attrs): _pf.validated_save() return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + def delete(self): + """Delete Prefix in Nautobot.""" + super().delete() + _pf = OrmPrefix.objects.get(id=self.uuid) + self.diffsync.objects_to_delete["prefixes"].append(_pf) + return self + class NautobotIPAddress(IPAddress): """Nautobot IPAddress model.""" @@ -306,6 +313,13 @@ def create(cls, diffsync, ids, attrs): new_ip.validated_save() return super().create(ids=ids, diffsync=diffsync, attrs=attrs) + def delete(self): + """Delete IPAddress in Nautobot.""" + super().delete() + ipaddr = OrmIPAddress.objects.get(id=self.uuid) + self.diffsync.objects_to_delete["ipaddresses"].append(ipaddr) + return self + class NautobotIPAssignment(IPAssignment): """Nautobot IPAssignment model.""" From 5075d6e46ef3aea6ee3c2579f34f219dbad92624 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Wed, 17 Jan 2024 10:58:28 -0600 Subject: [PATCH 43/47] =?UTF-8?q?test:=20=E2=9C=85=20Fix=20issues=20with?= =?UTF-8?q?=20tests.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Removed unused import, ignore password issue from bandit, and patch CVaaS setting so env file changes don't affect the test. --- nautobot_ssot/integrations/aristacv/diffsync/models/base.py | 1 - nautobot_ssot/integrations/aristacv/utils/cloudvision.py | 2 +- nautobot_ssot/tests/aristacv/test_utils_cloudvision.py | 4 ++++ 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/nautobot_ssot/integrations/aristacv/diffsync/models/base.py b/nautobot_ssot/integrations/aristacv/diffsync/models/base.py index 109aa2770..89028522b 100644 --- a/nautobot_ssot/integrations/aristacv/diffsync/models/base.py +++ b/nautobot_ssot/integrations/aristacv/diffsync/models/base.py @@ -1,7 +1,6 @@ """DiffSyncModel subclasses for Nautobot-to-AristaCV data sync.""" from uuid import UUID from diffsync import DiffSyncModel -from diffsync.enum import DiffSyncModelFlags from typing import List, Optional diff --git a/nautobot_ssot/integrations/aristacv/utils/cloudvision.py b/nautobot_ssot/integrations/aristacv/utils/cloudvision.py index fa51f9580..5757ca4a3 100644 --- a/nautobot_ssot/integrations/aristacv/utils/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/utils/cloudvision.py @@ -698,7 +698,7 @@ def get_cvp_version(): client.connect( nodes=[APP_SETTINGS["aristacv_cvaas_url"]], username="", - password="", + password="", # nosec: B106 is_cvaas=True, api_token=APP_SETTINGS.get("aristacv_cvp_token"), ) diff --git a/nautobot_ssot/tests/aristacv/test_utils_cloudvision.py b/nautobot_ssot/tests/aristacv/test_utils_cloudvision.py index 9c40cd4df..64dddc022 100644 --- a/nautobot_ssot/tests/aristacv/test_utils_cloudvision.py +++ b/nautobot_ssot/tests/aristacv/test_utils_cloudvision.py @@ -19,6 +19,10 @@ def test_auth_failure_exception(self): with self.assertRaises(cloudvision.AuthFailure): cloudvision.CloudvisionApi(cvp_host="https://localhost", username="", password="", verify=True) # nosec + @patch.dict( + "nautobot_ssot.integrations.aristacv.constant.APP_SETTINGS", + {"aristacv_cvaas_url": "www.arista.io:443"}, + ) def test_auth_cvass_with_token(self): """Test that authentication against CVaaS with token works.""" client = cloudvision.CloudvisionApi(cvp_host=None, cvp_token="1234567890abcdef") # nosec From 346d3125a381a1bdaefbe517a22505adffbcf74e Mon Sep 17 00:00:00 2001 From: Jacob McGill Date: Tue, 16 Jan 2024 16:14:24 -0500 Subject: [PATCH 44/47] Fix ipfabric create_location to use correct arg name --- .../ipfabric/utilities/nbutils.py | 2 +- nautobot_ssot/tests/ipfabric/test_nbutils.py | 34 +++++++++++++++---- 2 files changed, 28 insertions(+), 8 deletions(-) diff --git a/nautobot_ssot/integrations/ipfabric/utilities/nbutils.py b/nautobot_ssot/integrations/ipfabric/utilities/nbutils.py index 2c0f3127c..d24b3d944 100644 --- a/nautobot_ssot/integrations/ipfabric/utilities/nbutils.py +++ b/nautobot_ssot/integrations/ipfabric/utilities/nbutils.py @@ -40,7 +40,7 @@ def create_location(location_name, location_id=None): # Ensure custom field is available custom_field_obj, _ = CustomField.objects.get_or_create( type=CustomFieldTypeChoices.TYPE_TEXT, - label="ipfabric_site_id", + key="ipfabric_site_id", defaults={"label": "IPFabric Location ID"}, ) custom_field_obj.content_types.add(ContentType.objects.get_for_model(Location)) diff --git a/nautobot_ssot/tests/ipfabric/test_nbutils.py b/nautobot_ssot/tests/ipfabric/test_nbutils.py index 3ac2ce7e5..c3a146e0b 100644 --- a/nautobot_ssot/tests/ipfabric/test_nbutils.py +++ b/nautobot_ssot/tests/ipfabric/test_nbutils.py @@ -11,6 +11,7 @@ from nautobot_ssot.integrations.ipfabric.utilities import ( # create_ip,; create_interface,; create_location, get_or_create_device_role_object, create_device_type_object, + create_location, create_manufacturer, create_status, create_vlan, @@ -23,12 +24,12 @@ class TestNautobotUtils(TestCase): def setUp(self): """Setup.""" - reg_loctype = LocationType.objects.update_or_create(name="Region")[0] - reg_loctype.content_types.set([ContentType.objects.get_for_model(VLAN)]) + site_location_type = LocationType.objects.update_or_create(name="Site")[0] + site_location_type.content_types.set([ContentType.objects.get_for_model(VLAN)]) self.location = Location.objects.create( name="Test-Location", status=Status.objects.get(name="Active"), - location_type=reg_loctype, + location_type=site_location_type, ) status_active = Status.objects.get(name="Active") @@ -79,10 +80,29 @@ def test_create_vlan(self): ) self.assertEqual(VLAN.objects.get(name="Test-Vlan").pk, vlan.pk) - # def test_create_location(self): - # """Test `create_location` Utility.""" - # test_location = create_location(location_name="Test-Location") - # self.assertEqual(test_location.id, self.location.id) + def test_create_location_existing_location_no_location_id(self): + """Test `create_location` Utility.""" + test_location = create_location(location_name="Test-Location") + self.assertEqual(test_location.id, self.location.id) + + def test_create_location_existing_location_with_location_id(self): + """Test `create_location` Utility.""" + self.assertFalse(self.location.cf.get("ipfabric_site_id")) + test_location = create_location(location_name="Test-Location", location_id="Test-Location") + self.assertEqual(test_location.id, self.location.id) + self.assertEqual(test_location.cf["ipfabric_site_id"], "Test-Location") + + def test_create_location_no_location_id(self): + """Test `create_location` Utility.""" + test_location = create_location(location_name="Test-Location-new") + self.assertEqual(test_location.name, "Test-Location-new") + + def test_create_location_with_location_id(self): + """Test `create_location` Utility.""" + self.assertFalse(Location.objects.filter(name="Test-Location-new")) + test_location = create_location(location_name="Test-Location-new", location_id="Test-Location-new") + self.assertEqual(test_location.name, "Test-Location-new") + self.assertEqual(test_location.cf["ipfabric_site_id"], "Test-Location-new") # def test_create_location_exception(self): # """Test `create_location` Utility exception.""" From 5599e72fa55776dbef088af484656a0833c6968f Mon Sep 17 00:00:00 2001 From: Jacob McGill Date: Mon, 8 Jan 2024 18:42:11 -0500 Subject: [PATCH 45/47] BugFix: account for VLAN not having VLANGroup --- .../integrations/infoblox/utils/nautobot.py | 6 +++++- nautobot_ssot/tests/infoblox/test_utils.py | 20 +++++++++++++------ 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/nautobot_ssot/integrations/infoblox/utils/nautobot.py b/nautobot_ssot/integrations/infoblox/utils/nautobot.py index 3f148b3ee..add31c8db 100644 --- a/nautobot_ssot/integrations/infoblox/utils/nautobot.py +++ b/nautobot_ssot/integrations/infoblox/utils/nautobot.py @@ -13,7 +13,11 @@ def build_vlan_map_from_relations(vlans: list): """ vlan_map = {} for vlan in vlans: - vlan_map[vlan.vid] = {"vid": vlan.vid, "name": vlan.name, "group": vlan.vlan_group.name} + vlan_map[vlan.vid] = {"vid": vlan.vid, "name": vlan.name} + if vlan.vlan_group: + vlan_map[vlan.vid]["group"] = vlan.vlan_group.name + else: + vlan_map[vlan.vid]["group"] = None return vlan_map diff --git a/nautobot_ssot/tests/infoblox/test_utils.py b/nautobot_ssot/tests/infoblox/test_utils.py index 187fbf141..2cd86abda 100644 --- a/nautobot_ssot/tests/infoblox/test_utils.py +++ b/nautobot_ssot/tests/infoblox/test_utils.py @@ -1,6 +1,8 @@ """Util tests that do not require Django.""" import unittest +from django.test import TestCase + from nautobot.extras.models import Status from nautobot.ipam.models import VLAN, VLANGroup @@ -35,7 +37,7 @@ def test_get_ext_attr_dict(self): self.assertEqual(standardized_dict, expected) -class TestNautobotUtils(unittest.TestCase): +class TestNautobotUtils(TestCase): """Test infoblox.utils.nautobot.py.""" def setUp(self): @@ -61,15 +63,16 @@ def setUp(self): status=active_status, vlan_group=self.vlan_group_2, ) - - def tearDown(self): - for obj in [self.vlan_10, self.vlan_20, self.vlan_30, self.vlan_group_1, self.vlan_group_2]: - obj.delete() + self.vlan_40 = VLAN.objects.create( + vid=40, + name="forty", + status=active_status, + ) def test_build_vlan_map_from_relations(self): """Test VLAN map is built correctly.""" - actual = build_vlan_map_from_relations([self.vlan_10, self.vlan_20, self.vlan_30]) + actual = build_vlan_map_from_relations([self.vlan_10, self.vlan_20, self.vlan_30, self.vlan_40]) expected = { 10: { "vid": 10, @@ -86,5 +89,10 @@ def test_build_vlan_map_from_relations(self): "name": "thirty", "group": "two", }, + 40: { + "vid": 40, + "name": "forty", + "group": None, + }, } self.assertEqual(actual, expected) From 690a8b6fcccd8c94a62895ad9f3da9e2a45ddb85 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 18 Jan 2024 11:56:31 -0600 Subject: [PATCH 46/47] =?UTF-8?q?build:=20=F0=9F=94=96=20Bump=20version=20?= =?UTF-8?q?to=202.2.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index d4d32381e..432d8441c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-ssot" -version = "2.1.0" +version = "2.2.0" description = "Nautobot Single Source of Truth" authors = ["Network to Code, LLC "] license = "Apache-2.0" From b41571c9416a468580da8b7af6fd8c75dc2acab1 Mon Sep 17 00:00:00 2001 From: Justin Drew <2396364+jdrew82@users.noreply.github.com> Date: Thu, 18 Jan 2024 11:56:42 -0600 Subject: [PATCH 47/47] =?UTF-8?q?docs:=20=F0=9F=93=9D=20Update=20release?= =?UTF-8?q?=20notes=20for=202.2.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/admin/release_notes/version_2.2.md | 17 +++++++++++++++++ mkdocs.yml | 1 + 2 files changed, 18 insertions(+) create mode 100644 docs/admin/release_notes/version_2.2.md diff --git a/docs/admin/release_notes/version_2.2.md b/docs/admin/release_notes/version_2.2.md new file mode 100644 index 000000000..490576296 --- /dev/null +++ b/docs/admin/release_notes/version_2.2.md @@ -0,0 +1,17 @@ + +# v2.2 Release Notes + +## v2.2.0 - 2024-01-18 + +## Added + +- [271](https://github.com/nautobot/nautobot-app-ssot/pull/271) - Add custom relationship capabilities to the contrib module by @Kircheneer +- [320](https://github.com/nautobot/nautobot-app-ssot/pull/320) - Store sets in diffsync by @snaselj +- [325](https://github.com/nautobot/nautobot-app-ssot/pull/325) - Update Infoblox SSoT to allow for gathering of IPv6 Prefixes by @qduk + +## Fixed + +- [318](https://github.com/nautobot/nautobot-app-ssot/pull/318) - BugFix: Use correct attr name for vlan_group by @jmcgill298 +- [319](https://github.com/nautobot/nautobot-app-ssot/pull/319) - Arista CVP Integration Fixes by @jdrew82 +- [326](https://github.com/nautobot/nautobot-app-ssot/pull/326) - Fix ipfabric create_location to use correct arg name by @jmcgill298 +- [327](https://github.com/nautobot/nautobot-app-ssot/pull/327) - BugFix: account for VLAN not having VLANGroup by @jmcgill298 diff --git a/mkdocs.yml b/mkdocs.yml index 7faaf8a16..95854aaa5 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -128,6 +128,7 @@ nav: - Compatibility Matrix: "admin/compatibility_matrix.md" - Release Notes: - "admin/release_notes/index.md" + - v2.2: "admin/release_notes/version_2.2.md" - v2.1: "admin/release_notes/version_2.1.md" - v2.0: "admin/release_notes/version_2.0.md" - v1.6: "admin/release_notes/version_1.6.md"