Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: preserve alerts, update to 40sdk #97

Open
wants to merge 14 commits into
base: main
Choose a base branch
from
Open
2 changes: 1 addition & 1 deletion looker_deployer/commands/deploy_boards.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# limitations under the License.

import logging
from looker_sdk import models
from looker_sdk import models40 as models
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This will be good until the python SDK is updated after 3.1 is removed. Then models40 will probably be called models. We should discuss/research how much code this would break ... or maybe we alias models for models40. Worth thinking about @christelilaka @drstrangelooker et al

from looker_deployer.utils import deploy_logging
from looker_deployer.utils.get_client import get_client

Expand Down
2 changes: 1 addition & 1 deletion looker_deployer/commands/deploy_connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

import logging
import re
from looker_sdk import models, error
from looker_sdk import models40 as models, error
from looker_deployer.utils import deploy_logging
from looker_deployer.utils import parse_ini
from looker_deployer.utils.get_client import get_client
Expand Down
90 changes: 83 additions & 7 deletions looker_deployer/commands/deploy_content.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,28 +19,37 @@
import tempfile
import shutil
import threading
import json
from concurrent.futures import ThreadPoolExecutor
from itertools import repeat
from looker_deployer.utils import deploy_logging
from looker_deployer.utils import parse_ini
from looker_deployer.utils.get_client import get_client
from looker_sdk import models
from looker_sdk import models40 as models


logger = deploy_logging.get_logger(__name__)

def alert_cleanup(sdk):
logger.debug("cleaning up orphaned alerts")
disabled_alerts = sdk.search_alerts(disabled="true")
for alert in disabled_alerts:
if alert['disabled_reason'] == "Dashboard element has been removed.":
sdk.delete_alert(alert['id'])
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we actually just disable the alert via update_alert, you can take the body and then change the is_disabled to false and add a disabled reason

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also logger.info for any time we do delete or updating so people can see which IDs changed

logger.info("Alert removed", extra={"alert title": alert['custom_title'], "owner": alert['owner_display_name']})


def get_space_ids_from_name(space_name, parent_id, sdk):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we update any functions from space to folder and variables? Also tests will need to be updated for this change. Just don't change the actual looker commands (which already referred to things as folders). I'm pretty sure space to folder find & replace should fix 95% of use cases.

if (space_name == "Shared" and parent_id == "0"):
return ["1"]
elif (space_name == "Embed Groups" and parent_id == "0"):
return sdk.search_spaces(name=space_name, parent_id=None)[0].id
return sdk.search_folders(name=space_name, parent_id=None)[0].id
elif (space_name == "Users" and parent_id == "0"):
return sdk.search_spaces(name=space_name, parent_id=None)[0].id
return sdk.search_folders(name=space_name, parent_id=None)[0].id
elif (space_name == "Embed Users" and parent_id == "0"):
return sdk.search_spaces(name=space_name, parent_id=None)[0].id
return sdk.search_folders(name=space_name, parent_id=None)[0].id
logger.debug("space info", extra={"space_name": space_name, "parent_id": parent_id})
space_list = sdk.search_spaces(name=space_name, parent_id=parent_id)
space_list = sdk.search_folders(name=space_name, parent_id=parent_id)
id_list = [i.id for i in space_list]

return id_list
Expand All @@ -61,8 +70,8 @@ def create_or_return_space(space_name, parent_id, sdk):
raise e
else:
logger.warning("No folders found. Creating folder now")
new_space = models.CreateSpace(name=space_name, parent_id=parent_id)
res = sdk.create_space(new_space)
new_space = models.CreateFolder(name=space_name, parent_id=parent_id)
res = sdk.create_folder(new_space)
return res.id

logger.info("Found Space ID", extra={"id": target_id})
Expand Down Expand Up @@ -125,8 +134,70 @@ def import_content(content_type, content_json, space_id, env, ini, debug=False):
win_exec = ["cmd.exe", "/c"]
gzr_command = win_exec + gzr_command

is_new_dash = "false"
existing_dash_alerts = []
existing_elements_w_alerts = []

if content_type == "dashboard": ## only run for dashboards, looks can't have alerts
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is it possible to move this into a separate function that is called from here to allow for unit tests?

sdk = get_client(ini, env) ## should we update the function def and pass sdk?
with open(content_json) as file:
json_dash = json.load(file)
existing_dash = sdk.search_dashboards(slug=json_dash['slug']) ## search with slug first, fall back to name + folder
if len(existing_dash) < 1:
existing_dash = sdk.search_dashboards(title=json_dash['title'],folder_id=space_id)
if len(existing_dash) < 1:
is_new_dash = "true"
if is_new_dash == "false": ## if it's an existing dashboard, save the alerts and elements
for element in existing_dash[0]['dashboard_elements']:
start = len(existing_dash_alerts)
alerts = list(filter(lambda alert: str(alert['dashboard_element_id']) == str(element['id']), enabled_alerts))
if len(alerts) > 0:
existing_dash_alerts.extend(alerts)
if len(existing_dash_alerts) > start:
if element not in existing_elements_w_alerts:
existing_elements_w_alerts.append(element)


#logger.debug("space info", extra={"space_name": space_name, "parent_id": parent_id})
subprocess.run(gzr_command)


if is_new_dash == "false" and content_type == "dashboard" and len(existing_dash_alerts) > 0: ## get the new dashboard
updated_dash = sdk.search_dashboards(slug=json_dash['slug'])
if len(updated_dash) < 1:
updated_dash = sdk.search_dashboards(title=json_dash['title'],folder_id=space_id)
old_to_new_ids = {}
for element in existing_elements_w_alerts: ## match old element ids to new element ids
updated_dash_element = list(filter(lambda item: item['title'] == element['title'] and item['query_id'] == element['query_id'], updated_dash[0]['dashboard_elements']))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Okay so we're comparing title of the element and the query ID. What happens when the query ID changes? It's okay this break the alert as well? I also could not get this work when I tried to unit test it, didn't delve too deep into why. On 22.16 it's unsetting my alert from the dashboard but I'm also not getting it disabled either.

if len(updated_dash_element) == 1: ## what should we do if more than one match?
old_to_new_ids[element['id']] = updated_dash_element[0]['id']
for alert in existing_dash_alerts: ## create new alerts
logger.debug('processing alert for element', extra={"element_id": alert['dashboard_element_id']})
new_alert = {}
update_owner = {} #alerts are assigned to creator, need to update after
update_owner['owner_id'] = alert['owner_id']
for key in alert.keys():
new_alert[key] = alert[key]
new_alert['applied_dashboard_filters'] = []
new_filter = {}
for old_filter in alert['applied_dashboard_filters']:
new_filter = old_filter.__dict__
if new_filter['filter_value'] == 'None':
new_filter['filter_value'] = ""
new_alert['applied_dashboard_filters'].append(new_filter)
if alert['dashboard_element_id'] in old_to_new_ids.keys():
logger.debug("creating alert", extra={"old_element_id": alert['dashboard_element_id'], "new_element_id": old_to_new_ids[alert['dashboard_element_id']]})
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we get an logger.info line that summarizes what is happening that specifies the dashboard element ID and alert being updated.

new_alert['dashboard_element_id'] = old_to_new_ids[alert['dashboard_element_id']]
try:
created_alert = sdk.create_alert(new_alert)
sdk.update_alert_field(created_alert['id'], update_owner) #update new alert to correct owner
update_owner['owner_id'] = sdk.me()['id'] #get id of current user
if alert['id'] != update_owner['owner_id']:
sdk.update_alert_field(alert['id'], update_owner) #update old alert to current user to prevent errors when deleting
sdk.delete_alert(alert['id'])
except Exception as e:
print(e)


def build_spaces(spaces, sdk):
# seeding initial value of parent id to Shared
Expand Down Expand Up @@ -303,6 +374,8 @@ def main(args):
args.target_base = 'Shared'

sdk = get_client(args.ini, args.env)
global enabled_alerts
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we can get an update to the readme about this new functionality

enabled_alerts = sdk.search_alerts(disabled="false", all_owners=True)
send_content(
sdk,
args.env,
Expand All @@ -315,3 +388,6 @@ def main(args):
args.debug,
args.target_base
)

alert_cleanup(sdk)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this doesn't work for me, my alerts are being unset but not disabled


2 changes: 1 addition & 1 deletion looker_deployer/commands/deploy_content_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def export_spaces(folder_id, env, ini, path, debug=False):


def recurse_folders(folder_id, folder_list, sdk, debug=False):
space = sdk.space(str(folder_id))
space = sdk.folder(str(folder_id))
folder_list.append(space.name)
logger.debug(
"recursive folder crawl status",
Expand Down
2 changes: 1 addition & 1 deletion looker_deployer/commands/deploy_group_in_group.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logging
import re
from looker_sdk import models
from looker_sdk import models40 as models
from looker_deployer.utils import deploy_logging
from looker_deployer.utils.get_client import get_client
from looker_deployer.utils.match_by_key import match_by_key
Expand Down
2 changes: 1 addition & 1 deletion looker_deployer/commands/deploy_groups.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logging
import re
from looker_sdk import models
from looker_sdk import models40 as models
from looker_deployer.utils import deploy_logging
from looker_deployer.utils.get_client import get_client
from looker_deployer.utils.match_by_key import match_by_key
Expand Down
2 changes: 1 addition & 1 deletion looker_deployer/commands/deploy_model_sets.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logging
import re
from looker_sdk import models
from looker_sdk import models40 as models
from looker_deployer.utils import deploy_logging
from looker_deployer.utils.get_client import get_client
from looker_deployer.utils.match_by_key import match_by_key
Expand Down
2 changes: 1 addition & 1 deletion looker_deployer/commands/deploy_permission_sets.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logging
import re
from looker_sdk import models
from looker_sdk import models40 as models
from looker_deployer.utils import deploy_logging
from looker_deployer.utils.get_client import get_client
from looker_deployer.utils.match_by_key import match_by_key
Expand Down
2 changes: 1 addition & 1 deletion looker_deployer/commands/deploy_roles.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logging
import re
from looker_sdk import models
from looker_sdk import models40 as models
from looker_deployer.utils import deploy_logging
from looker_deployer.utils.get_client import get_client
from looker_deployer.utils.match_by_key import match_by_key
Expand Down
2 changes: 1 addition & 1 deletion looker_deployer/commands/deploy_user_attributes.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logging
import re
from looker_sdk import models
from looker_sdk import models40 as models
from looker_deployer.utils import deploy_logging
from looker_deployer.utils.get_client import get_client
from looker_deployer.utils.match_by_key import match_by_key
Expand Down
14 changes: 7 additions & 7 deletions looker_deployer/utils/get_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,28 +13,28 @@
# limitations under the License.

from looker_deployer import version
from looker_sdk import api_settings, methods, requests_transport, auth_session, serialize, _settings
from looker_sdk import api_settings, methods40 as methods, requests_transport, auth_session, serialize, _settings
from typing import Optional


def configure_sdk(
config_file: str = "looker.ini",
section: Optional[str] = None,
config_settings: Optional[api_settings.ApiSettings] = None,
) -> methods.Looker31SDK:
) -> methods.Looker40SDK:
"""Default dependency configuration"""
settings = (
_settings(config_file, section) if config_settings is None else config_settings
)
settings.is_configured()
settings.headers['User-Agent'] = f"Looker Deployer {version.__version__}"
transport = requests_transport.RequestsTransport.configure(settings)
return methods.Looker31SDK(
auth_session.AuthSession(settings, transport, serialize.deserialize31, "3.1"),
serialize.deserialize31,
serialize.serialize31,
return methods.Looker40SDK(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we add this to the readme as well that we are moving as a whole to API 4.0

auth_session.AuthSession(settings, transport, serialize.deserialize40, "4.0"),
serialize.deserialize40,
serialize.serialize40,
transport,
"3.1",
"4.0",
)


Expand Down