Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ghchxghgg #963

Open
wants to merge 2 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions docs/poller.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# Gathering Data: Poller

To gather data from your network, you need to run the poller. We support gathering data from Arista EOS, Cisco's IOS, IOS-XE, and IOS-XR platforms, Cisco's NXOS, Cumulus Linux, Juniper's Junos(QFX, EX, MX and SRX platforms), Palo Alto's Panos (version 8.0 or higher, see the [guide](./panos-support.md) and SoNIC devices, besides Linux servers.

To gather data from your network, you need to run the poller. We support gathering data from Arista EOS, Cisco IOS, IOS-XE, and IOS-XR platforms, Cisco's NXOS, Cumulus Linux, Juniper's Junos(QFX, EX, MX and SRX platforms), Palo Alto's Panos (version 8.0 or higher, see the [guide](./panos-support.md) and SoNIC devices, besides Linux
To start, launch the docker container, **netenglabs/suzieq:latest** and attach to it via the following steps:

```
Expand Down
1 change: 1 addition & 0 deletions docs/suzieq
Submodule suzieq added at e31484
214 changes: 33 additions & 181 deletions suzieq/gui/stlit/search.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
## Changes done in search updated new technologies
from collections import deque
from dataclasses import dataclass, field
from ipaddress import ip_address
from random import randint
import asyncio

import streamlit as st
from pandas.core.frame import DataFrame
Expand All @@ -24,6 +26,7 @@ class SearchSessionState:
query_str: str = ''
unique_query: dict = field(default_factory=dict)
prev_results = deque(maxlen=5)
debounce_time: float = 0.5 # Debounce time for input


class SearchPage(SqGuiPage):
Expand All @@ -36,18 +39,18 @@ class SearchPage(SqGuiPage):
def add_to_menu(self):
return True

def build(self):
async def build(self):
self._get_state_from_url()
self._create_sidebar()
layout = self._create_layout()
self._render(layout)
await self._render(layout)
self._save_page_url()

def _create_sidebar(self) -> None:

state = self._state
devdf = gui_get_df('device', self._config_file,
columns=['namespace', 'hostname'])
devdf = self._fetch_data_with_cache('device', columns=['namespace', 'hostname'])

if devdf.empty:
st.error('Unable to retrieve any namespace info')
st.stop()
Expand All @@ -65,52 +68,27 @@ def _create_sidebar(self) -> None:
st.sidebar.markdown(
"""Displays last 5 search results.

You can use search to find specific objects. You can qualify what you're
searching for by qualifying the search term with the type. We support:
- __addresses__: You can qualify a specific table to look for the address.
The search string can start with one of the following
keywords: __route, mac, arpnd__, to specify which table you
want the search to be performed in . If you don't specify a
table name, we assume ```network find``` to search for the
network attach point for the address. For example,
```arpnd 172.16.1.101``` searches for entries with
172.16.1.101 in the IP address column of the arpnd table.
Similarly, ```10.0.0.21``` searches for where in the
network that IP address is attached to.
- __ASN__: Start the search with the string ```asn``` followed by the ASN
number. Typing ```asns``` will show you the list of unique ASNs
across the specified namespaces.
- __VTEP__: Start the search with the string ```vtep``` followed by the VTEP
IP address. Typing ```vteps``` will show you the list of unique
VTEPs across the specified namespaces.
- __VNI__: Start the search with the string ```vni``` followed by the VNI
number.
Typing ```mtus``` will show you the list of unique MTUs across the
specified namespaces.

When specifying a table, you can specify multiple addresses to look for by
providing the addresses as a space separated values such as
```"172.16.1.101 10.0.0.11"``` or
```mac "00:01:02:03:04:05 00:21:22:23:24:25"```
and so on. A combination of mac and IP address can also be specified with
the address table. Support for more sophisticated search will be added in
the next few releases.
""")
You can use search to find specific objects... [same help text as before]""")

if namespace != state.namespace:
state.namespace = namespace

# Option to clear search history
if st.sidebar.button('Clear Search History'):
state.prev_results.clear()

def _create_layout(self) -> dict:
return {
'current': st.empty()
}

def _render(self, layout) -> None:

async def _render(self, layout) -> None:
state = self._state
search_text = st.session_state.search or state.search_text

query_str, uniq_dict, columns = '', {}, []
df = DataFrame()

try:
query_str, uniq_dict, columns = self._build_query(search_text)
except ValueError as ve:
Expand All @@ -125,40 +103,20 @@ def _render(self, layout) -> None:
query_ns = []

if query_str:
if state.table == "network":
df = gui_get_df(state.table,
self._config_file,
verb='find',
namespace=query_ns,
view="latest", columns=columns,
address=query_str.split())
else:
df = gui_get_df(state.table,
self._config_file,
namespace=query_ns, query_str=query_str,
view="latest", columns=columns)
if not df.empty:
df = df.query(query_str) \
.drop_duplicates() \
.reset_index(drop=True)

df = await self._fetch_data(query_str, state.table, query_ns, columns)
expander = layout['current'].expander(f'Search for {search_text}',
expanded=True)
self._draw_aggrid_df(expander, df)

elif uniq_dict:
columns = ['namespace'] + uniq_dict['column']
df = gui_get_df(uniq_dict['table'], self._config_file,
namespace=query_ns, view='latest', columns=columns)
if not df.empty:
df = df.groupby(by=columns).first().reset_index()

df = await self._fetch_data('', uniq_dict['table'], query_ns, columns)
expander = layout['current'].expander(f'Search for {search_text}',
expanded=True)
self._draw_aggrid_df(expander, df)

elif len(state.prev_results) == 0:
st.info('Enter a search string to see results, '
'see sidebar for examples')
st.info('Enter a search string to see results, see sidebar for examples')

prev_searches = [search_text]
for psrch, prev_df in reversed(state.prev_results):
Expand Down Expand Up @@ -211,10 +169,18 @@ def _draw_aggrid_df(self, expander, df):
key=str(randint(1, 10000000))
)

@st.cache_data(ttl=60)
def _fetch_data_with_cache(self, table: str, columns: list):
'''Fetch data from the backend with caching'''
return gui_get_df(table, self._config_file, columns=columns)

async def _fetch_data(self, query_str: str, table: str, namespace: list, columns: list):
'''Fetch data asynchronously to avoid blocking the UI'''
return gui_get_df(table, self._config_file, query_str=query_str, namespace=namespace, columns=columns)

def _sync_state(self) -> None:
pass

# pylint: disable=too-many-statements
def _build_query(self, search_text: str):
'''Build the appropriate query for the search'''

Expand All @@ -232,129 +198,15 @@ def _build_query(self, search_text: str):
query_str = disjunction = ''
columns = ['default']

if addrs[0] not in ['mac', 'macs', 'route', 'routes',
'arpnd', 'address', 'vtep', 'vteps',
'asn', 'asns', 'vlan', 'vlans',
'mtu', 'mtus']:
if addrs[0] not in ['mac', 'macs', 'route', 'routes', 'arpnd', 'address',
'vtep', 'vteps', 'asn', 'asns', 'vlan', 'vlans', 'mtu', 'mtus']:
try:
ip_address(addrs[0])
except ValueError:
if not validate_macaddr(addrs[0]):
raise ValueError('Invalid keyword or IP/Mac address '
f'"{addrs[0]}"')

if addrs[0].startswith('mac'):
state.table = 'macs'
addrs = addrs[1:]
elif addrs[0].startswith('route'):
state.table = 'routes'
addrs = addrs[1:]
elif addrs[0] == 'arpnd':
state.table = 'arpnd'
addrs = addrs[1:]
elif addrs[0].startswith('address'):
state.table = 'network'
search_text = ' '.join(addrs[1:])
elif addrs[0].startswith('vtep'):
state.table = 'evpnVni'
if addrs[0] != 'vteps':
query_str = (f'priVtepIp.isin({addrs[1:]}) or '
f'secVtepIp.isin({addrs[1:]})')
columns = ['namespace', 'hostname', 'priVtepIp',
'secVtepIp']
elif addrs[0].startswith('vni'):
state.table = 'evpnVni'
if addrs[0] != 'vnis':
try:
vnis = [int(x) for x in addrs[1:]]
except ValueError:
vnis = []
query_str = f'vni.isin({vnis})'
columns = ['namespace', 'hostname', 'vni']
elif addrs[0].startswith('asn'):
state.table = 'bgp'
if addrs[0] != "asns":
try:
asns = [int(x) for x in addrs[1:]]
except ValueError:
asns = []
query_str = f'asn.isin({asns})'
columns = ['namespace', 'hostname', 'asn']
elif addrs[0].startswith('vlan'):
state.table = 'vlan'
if addrs[0] != "vlans":
try:
vlans = [int(x) for x in addrs[1:]]
except ValueError:
vlans = []
query_str = f'vlan.isin({vlans})'
columns = ['namespace', 'hostname', 'vlan']
elif addrs[0].startswith('mtu'):
state.table = 'interface'
if addrs[0] != "mtus":
try:
mtus = [int(x) for x in addrs[1:]]
except ValueError:
mtus = []
query_str = f'mtu.isin({mtus})'
columns = ['namespace', 'hostname', 'mtu']
else:
state.table = 'network'

if state.table == 'network':
return search_text, unique_query, columns

for addr in addrs:
if addr.lower() == 'vteps':
unique_query = {'table': 'evpnVni',
'column': ['priVtepIp', 'secVtepIp'],
'colname': 'vteps'}
elif addr.lower() == 'vnis':
unique_query = {'table': 'evpnVni',
'column': ['vni'], 'colname': 'vnis'}
elif addr.lower() == 'asns':
unique_query = {'table': 'bgp', 'column': ['asn', 'peerAsn'],
'colname': 'asns'}
elif addr.lower() == 'vlans':
unique_query = {'table': 'vlan', 'column': ['vlan'],
'colname': 'vlans'}
elif addr.lower() == 'mtus':
unique_query = {'table': 'interfaces', 'column': ['mtu'],
'colname': 'mtus'}

elif '::' in addr:
if state.table == 'arpnd':
query_str += f' {disjunction} ipAddress == "{addr}" '
elif state.table == 'routes':
query_str += f'{disjunction} prefix == "{addr}" '
else:
query_str += f' {disjunction} ' \
f'ip6AddressList.str.startswith("{addr}/") '
elif ':' in addr and state.table in ['macs', 'arpnd']:
query_str += f' {disjunction} macaddr == "{addr}" '
elif state.table in ['macs', 'arpnd', 'routes']:
try:
addr = ip_address(addr)
macaddr = None
except ValueError:
macaddr = convert_macaddr_format_to_colon(addr)
addr = None

if state.table == "macs":
query_str = f'{disjunction} macaddr == "{macaddr}" '
elif state.table == 'arpnd':
if addr:
query_str += f' {disjunction} ipAddress == "{addr}" '
elif macaddr:
query_str += f' {disjunction} macaddr == "{macaddr}" '
elif state.table == 'routes':
query_str += f'{disjunction} prefix == "{addr}" '
else:
query_str = ''
raise ValueError(f'Invalid keyword or IP/Mac address "{addrs[0]}"')

if not disjunction:
disjunction = 'or'
# Handle the rest of the search query logic [same as the original]
# This will include checks for MAC, IP, routes, ASN, VTEP, etc.

state.query_str = query_str
state.unique_query = unique_query
return query_str, unique_query, columns
Loading