Skip to content

Commit

Permalink
Squashed commit of v1.1.2 changes from develop
Browse files Browse the repository at this point in the history
- Continued revision of prefix queries to use fewer larger queries
- Improved handling of soft timeout errors
- Fixed bug where getting prefixes by ID could get the wrong IP version prefix
- Fixed bug where objects only in Nautobot could cause traceback during diff
- Fixed issue where getting prefixes by ID could fail if filter was already
converted to a netaddr rather than being a string
  • Loading branch information
Mathias Wegner committed Feb 14, 2024
1 parent 08f538c commit a15d394
Show file tree
Hide file tree
Showing 6 changed files with 88 additions and 87 deletions.
11 changes: 11 additions & 0 deletions Changelog.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,14 @@
### Release 1.1.2
- Continued revision of prefix queries to use fewer larger queries
- Improved handling of soft timeout errors
- Fixed bug where getting prefixes by ID could get the wrong IP version prefix
- Fixed bug where objects only in Nautobot could cause traceback during diff
- Fixed issue where getting prefixes by ID could fail if filter was already
converted to a netaddr rather than being a string

### Release 1.1.1
- Minor Jenkinsfile tweak

### Release 1.1.0
- Improved performance of SolidSERVER prefix queries by switching list querying approach
- Improved SolidSERVER adapter logging
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ The following should be added to your nautobot_config.py and updated for your en

## Notes/tips on usage

The default timeout of 120 seconds is enough for many queries, but larger queries will take more time. An IPv4 /18 network containing ~1700 records takes approximately 90 seconds. An IPv4 /18 network containing ~4500 records takes approximately 270 seconds. An IPv4 /19 network containing ~5200 records takes approxmately 300 seconds. Exactly when to increase the timeout depends on how broad the query is and how many records are in the query results.
The default timeout of 120 seconds is enough for most queries, but larger queries may exceed the timeout. Jobs that exceed the default timeout will be killed by Nautobot and show up as failed with a "Query exceeded timeout!" error in the job log. Re-running the job with a narrower filter or a larger timeout should help, but be aware that exceeding the hard timeout limit from the nautobot_config will cause the job to fail no matter what.

### BIG CAVEAT ABOUT THE NAME FILTER!

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,10 @@ def _load_prefixes(self, address_filter=None, subnet_list=None):
subnet_list=subnet_list, address_filter=address_filter
)
self.job.log_debug(
message=f"Filter name prefixes has {len(filter_name_prefixes)} items"
message=(
f"Filter name prefixes has {len(filter_name_prefixes)} items with"
)
+ f" filters {address_filter} and subnet_list {subnet_list}"
)
self.job.log_debug(message=f"Subnet list has {len(subnet_list)} items")
if filter_name_prefixes:
Expand Down
7 changes: 7 additions & 0 deletions nautobot_plugin_ssot_eip_solidserver/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import diffsync # type: ignore # pylint: disable=unused-import # noqa: F401
import netaddr # type: ignore
from billiard.exceptions import SoftTimeLimitExceeded
from diffsync.enum import DiffSyncFlags
from diffsync.exceptions import ObjectNotCreated
from django.conf import settings # type: ignore
Expand Down Expand Up @@ -255,6 +256,12 @@ def sync_data(self) -> None:
)
except ObjectNotCreated as create_err:
self.log_failure(f"Unable to create object {create_err}")
except SoftTimeLimitExceeded as timeout_err:
self.log_failure(
f"Query exceeded timeout! {timeout_err}"
" Consider re-running the job with a larger timeout"
" or a smaller address filter."
)


jobs = [SolidserverDataSource]
76 changes: 38 additions & 38 deletions nautobot_plugin_ssot_eip_solidserver/utils/ssapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,43 +227,45 @@ def generic_api_action(
return r_text

def get_prefixes_by_id(
self, subnet_list: list[str], address_filter: str | list[str] | None = None
self,
subnet_list: list[str],
address_filter: str | netaddr.IPNetwork,
) -> list[Any]:
"""take a list of unique ids, fetch them from solidserver
Args:
subnet_list (list): a list of subnet IDs
address_filter (str, netaddr.IPNetwork): a CIDR (or string representation of a CIDR)
Returns:
list: a list of prefix resources
"""
prefixes = []
parent4 = netaddr.IPNetwork("0.0.0.0/0")
parent6 = netaddr.IPNetwork("::/0")
if address_filter and isinstance(address_filter, str):
parent = netaddr.IPNetwork("0.0.0.0/0")
subnet_name = "subnet_id"
api_action = "ip_block_subnet_info"
if isinstance(address_filter, str):
parent = netaddr.IPNetwork(address_filter)
if parent.version == 4:
parent4 = parent
elif parent.version == 6:
parent6 = parent
elif isinstance(address_filter, netaddr.IPNetwork):
parent = address_filter
else:
self.job.log_warning(
f"address filter {address_filter} is not a string or netaddr object"
)
return prefixes
if parent.version == 6:
subnet_name = "subnet6_id"
api_action = "ip6_block6_subnet6_info"
self.job.log_debug(f"parent is {parent} (ipv{parent.version})")
params: dict[str, int | str] = {"LIMIT": LIMIT}
for each_id in subnet_list:
self.job.log_debug(f"fetching Solidserver prefix id {each_id}")
params["subnet_id"] = each_id
params[subnet_name] = each_id
this_prefix = self.generic_api_action(
api_action="ip_block_subnet_info", http_action="get", params=params
api_action=api_action, http_action="get", params=params
)
if this_prefix:
if ssutils.prefix_to_net(this_prefix[0]) in parent4:
prefixes.append(this_prefix)
else:
params["subnet6_id"] = each_id
this_prefix = self.generic_api_action(
api_action="ip6_block6_subnet6_info",
http_action="get",
params=params,
)
if ssutils.prefix_to_net(this_prefix[0]) in parent6:
if ssutils.prefix_to_net(this_prefix[0]) in parent:
prefixes.append(this_prefix)
return prefixes

Expand Down Expand Up @@ -408,31 +410,29 @@ def get_addresses_by_network(self, cidr: netaddr.IPNetwork) -> list[Any]:
list: a list of address models
"""
ss_addrs: list[Any] = []
sub_cidrs: list[str] = []
query_str = ""
self.job.log_debug("Starting get addresses by network")
action = "unset"
if cidr.version == 4:
action = "ip_address_list"
sub_cidrs = ssutils.iter_ip4_subnet_values_for_like_clause(cidr)
query_str = ssutils.generate_ip4_where_clause(cidr)
elif cidr.version == 6:
action = "ip6_address6_list"
sub_cidrs = ssutils.iter_ip6_subnet_values_for_like_clause(cidr)
query_str = ssutils.generate_ip6_where_clause(cidr)
params: dict[str, str | int] = {"LIMIT": LIMIT}
self.job.log_debug(f"sub_cidrs is {sub_cidrs}")
for each_cidr in sub_cidrs:
self.job.log_debug(f"fetching Solidserver address for {each_cidr}")
params["WHERE"] = each_cidr
this_address = self.generic_api_action(
api_action=action, http_action="get", params=params
)
if this_address:
if isinstance(this_address, list):
for each_addr in this_address:
if each_addr.get("hostaddr") in cidr:
ss_addrs.append(each_addr)
else:
if this_address.get("hostaddr") in cidr:
ss_addrs.append(this_address)
self.job.log_debug(f"fetching Solidserver address for {query_str}")
params["WHERE"] = query_str
addresses = self.generic_api_action(
api_action=action, http_action="get", params=params
)
if addresses:
if isinstance(addresses, list):
for each_addr in addresses:
if each_addr.get("hostaddr") in cidr:
ss_addrs.append(each_addr)
else:
if addresses.get("hostaddr") in cidr:
ss_addrs.append(addresses)
return ss_addrs

def get_prefixes_by_network(self, cidr: str) -> list[Any]:
Expand Down
74 changes: 27 additions & 47 deletions nautobot_plugin_ssot_eip_solidserver/utils/ssutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import netaddr # type: ignore
import validators # type: ignore
from diffsync import Diff, DiffSync # , DiffElement
from diffsync.exceptions import ObjectNotFound
from validators import ValidationError

from nautobot_plugin_ssot_eip_solidserver.diffsync.models.base import (
Expand All @@ -32,32 +33,34 @@ def unpack_class_params(params):
return dict(urllib.parse.parse_qsl(params, keep_blank_values=True))


def iter_ip4_subnet_values_for_like_clause(cidr: netaddr.IPNetwork) -> list[str]:
"""Iterate through a CIDR, returning a list of CIDRs that are
one bit shorter than the original CIDR
def generate_ip4_where_clause(cidr: netaddr.IPNetwork) -> str:
"""Take an IPv4 CIDR and return a where statements to find all addresses within a
given CIDR (where >= first and <= last)
Args:
cidr (netaddr.IPNetwork): a CIDR
Returns:
list: a list of CIDRs
str: a where statement for all subnets within a CIDR
"""
search_list = []
if cidr.prefixlen >= 24:
# if the cidr is /24 or smaller, return a list with a single where statement
first_addr = str(hex(cidr.first)).lstrip("0x").rjust(8, "0")
last_addr = str(hex(cidr.last)).lstrip("0x").rjust(8, "0")
return [f"ip_addr >= '{first_addr}' and ip_addr <= '{last_addr}'"]
else:
# if the cidr is longer than /24, iterate through the various combinations of
# first three octets and append them to cidr_list
for each_cidr in cidr.subnet(24):
first_addr = str(hex(each_cidr.first)).lstrip("0x").rjust(8, "0")
last_addr = str(hex(each_cidr.last)).lstrip("0x").rjust(8, "0")
search_list.append(
f"ip_addr >= '{first_addr}' and ip_addr <= '{last_addr}'"
)
return search_list
first_addr = str(hex(cidr.first)).lstrip("0x").rjust(8, "0")
last_addr = str(hex(cidr.last)).lstrip("0x").rjust(8, "0")
return f"ip_addr >= '{first_addr}' and ip_addr <= '{last_addr}'"


def generate_ip6_where_clause(cidr: netaddr.IPNetwork) -> str:
"""Take an IPv6 CIDR and return a where statements to find all addresses within a
given CIDR (where >= first and <= last)
Args:
cidr (netaddr.IPNetwork): a CIDR
Returns:
str: a where statement for all subnets within a CIDR
"""
first_addr = str(hex(cidr.first)).lstrip("0x").rjust(32, "0")
last_addr = str(hex(cidr.last)).lstrip("0x").rjust(32, "0")
return f"ip6_addr >= '{first_addr}' and ip6_addr <= '{last_addr}'"


def get_ip4_subnet_start_and_end_hexes_query(cidr: netaddr.IPNetwork) -> str:
Expand Down Expand Up @@ -90,32 +93,6 @@ def get_ip6_subnet_start_and_end_hexes_query(cidr: netaddr.IPNetwork) -> str:
return f"start_ip6_addr >= '{first_addr}' and end_ip6_addr <= '{last_addr}'"


def iter_ip6_subnet_values_for_like_clause(cidr: netaddr.IPNetwork) -> list[str]:
"""Iterate through a CIDR, returning a list of where statements to find all
addresses within a given /112
If the cidr is smaller than /112, return a list with a single where statement
Args:
cidr (netaddr.IPNetwork): a CIDR
Returns:
list: a list of CIDRs
"""
search_list = []
if cidr.prefixlen >= 112:
first_addr = str(hex(cidr.first)).lstrip("0x").rjust(32, "0")
last_addr = str(hex(cidr.last)).lstrip("0x").rjust(32, "0")
return [f"ip6_addr >= '{first_addr}' and ip6_addr <= '{last_addr}'"]
else:
for each_cidr in cidr.subnet(112):
first_addr = str(hex(each_cidr.first)).lstrip("0x").rjust(32, "0")
last_addr = f"{first_addr[:-4]}ffff".rjust(32, "0")
search_list.append(
f"ip6_addr >= '{first_addr}' and ip6_addr <= '{last_addr}'"
)
return search_list


def domain_name_prep(domain_filter: str) -> tuple[list, list]:
"""ensure correct formatting in domain name filter(s)
Expand Down Expand Up @@ -229,7 +206,10 @@ def filter_diff_for_status(
for resource_type in ("ipaddress", "prefix"):
if resource_type in diff.dict().keys():
for key, value in diff.dict()[resource_type].items():
this_obj = source_adapter.get(obj=resource_type, identifier=key)
try:
this_obj = source_adapter.get(obj=resource_type, identifier=key)
except ObjectNotFound:
continue
if "status__name" in value["+"].keys():
if len(value["+"].keys()) == 1:
source_adapter.remove(this_obj)
Expand Down

0 comments on commit a15d394

Please sign in to comment.