diff --git a/Solutions/Commvault Security IQ/Analytic Rules/CommvaultSecurityIQ_Alert.yaml b/Solutions/Commvault Security IQ/Analytic Rules/CommvaultSecurityIQ_Alert.yaml index 1dcdb3f5139..16b2ad42544 100644 --- a/Solutions/Commvault Security IQ/Analytic Rules/CommvaultSecurityIQ_Alert.yaml +++ b/Solutions/Commvault Security IQ/Analytic Rules/CommvaultSecurityIQ_Alert.yaml @@ -1,7 +1,7 @@ id: 317e757e-c320-448e-8837-fc61a70fe609 -name: CommvaultSecurityIQ Alert +name: Commvault Cloud Alert description: | - 'This query identifies CommvaultSecurityIQ Alerts.' + 'This query identifies Alerts from Commvault Cloud.' severity: Medium status: Available requiredDataConnectors: [] @@ -25,5 +25,5 @@ query: | CommvaultSecurityIQ_CL | take 1000 entityMappings: null -version: 1.0.0 +version: 1.0.1 kind: Scheduled diff --git a/Solutions/Commvault Security IQ/Data Connectors/AzureFunctionCommvaultSecurityIQ/main.py b/Solutions/Commvault Security IQ/Data Connectors/AzureFunctionCommvaultSecurityIQ/main.py index 1a82b242c66..5cce0b211c6 100644 --- a/Solutions/Commvault Security IQ/Data Connectors/AzureFunctionCommvaultSecurityIQ/main.py +++ b/Solutions/Commvault Security IQ/Data Connectors/AzureFunctionCommvaultSecurityIQ/main.py @@ -20,10 +20,11 @@ blob_name = "timestamp" cs = os.environ.get('AzureWebJobsStorage') +backfill_days = int(os.environ.get('NumberOfDaysToBackfill', "2")) # this is just for testing customer_id = os.environ.get('AzureSentinelWorkspaceId','') shared_key = os.environ.get('AzureSentinelSharedKey') -verify = False + logAnalyticsUri = 'https://' + customer_id + '.ods.opinsights.azure.com' key_vault_name = os.environ.get("KeyVaultName","Commvault-Integration-KV") @@ -114,19 +115,36 @@ def main(mytimer: func.TimerRequest) -> None: secret_name = "access-token" qsdk_token = client.get_secret(secret_name).value headers["authtoken"] = "QSDK " + qsdk_token + + companyId_url = f"{url}/v2/WhoAmI" + company_response = requests.get(companyId_url, headers=headers) + if company_response.status_code == 200: + company_data_json = company_response.json() + logging.info(f"Company Response : {company_data_json}") + company_data = company_data_json.get("company", {}) + companyId = company_data.get("id") + audit_url = f"{url}/V4/Company/{companyId}/SecurityPartners/Register/6" + logging.info(f"Company Id : {companyId}") + audit_response = requests.put(audit_url, headers=headers) + if audit_response.status_code == 200: + logging.info(f"Audit Log request sent Successfully. Audit Response : {audit_response.json()}" ) + else: + logging.error(f"Failed to send Audit Log request with status code : {audit_response.status_code}") + else: + logging.error(f"Failed to get Company Id with status code : {company_response.status_code}") ustring = "/events?level=10&showInfo=false&showMinor=false&showMajor=true&showCritical=true&showAnomalous=true" f_url = url + ustring current_date = datetime.now(timezone.utc) to_time = int(current_date.timestamp()) fromtime = read_blob(cs, container_name, blob_name) if fromtime is None: - fromtime = int((current_date - timedelta(days=2)).timestamp()) + fromtime = int((current_date - timedelta(days=backfill_days)).timestamp()) logging.info("From Time : [{}] , since the time read from blob is None".format(fromtime)) else: fromtime_dt = datetime.fromtimestamp(fromtime, tz=timezone.utc) time_diff = current_date - fromtime_dt - if time_diff > timedelta(days=2): - updatedfromtime = int((current_date - timedelta(days=2)).timestamp()) + if time_diff > timedelta(days=backfill_days): + updatedfromtime = int((current_date - timedelta(days=backfill_days)).timestamp()) logging.info("From Time : [{}] , since the time read from blob : [{}] is older than 2 days".format(updatedfromtime,fromtime)) fromtime = updatedfromtime elif time_diff < timedelta(minutes = 5): @@ -138,8 +156,9 @@ def main(mytimer: func.TimerRequest) -> None: logging.info("Starts at: [{}]".format(datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S"))) event_endpoint = f"{f_url}&fromTime={fromtime}&toTime={to_time}" logging.info("Event endpoint : [{}]".format(event_endpoint)) - response = requests.get(event_endpoint, headers=headers, verify=verify) + response = requests.get(event_endpoint, headers=headers) logging.info("Response Status Code : " + str(response.status_code)) + if response.status_code == 200: events = response.json() logging.info("Events Data") @@ -292,7 +311,7 @@ def get_files_list(job_id) -> list: "advConfig": {"browseAdvancedConfigBrowseByJob": {"jobId": int(job_id)}} } f_url = url+"/DoBrowse" - response = requests.post(f_url, headers=headers, json=job_details_body, verify=verify) + response = requests.post(f_url, headers=headers, json=job_details_body) resp = response.json() browse_responses = resp.get("browseResponses", []) file_list = [] @@ -322,7 +341,7 @@ def get_subclient_content_list(subclient_id) -> dict: """ f_url = url + "/Subclient/" + str(subclient_id) - resp = requests.get(f_url, headers=headers, verify=verify).json() + resp = requests.get(f_url, headers=headers).json() resp = resp.get("subClientProperties", [{}])[0].get("content") return resp @@ -364,7 +383,7 @@ def get_job_details(job_id, url, headers): """ f_url = f"{url}/Job/{job_id}" - response = requests.get(f_url, headers=headers, verify=verify) + response = requests.get(f_url, headers=headers) data = response.json() if ("totalRecordsWithoutPaging" in data) and ( int(data["totalRecordsWithoutPaging"]) > 0 @@ -390,7 +409,7 @@ def get_user_details(client_name): """ f_url = f"{url}/Client/byName(clientName='{client_name}')" - response = requests.get(f_url, headers=headers, verify=False).json() + response = requests.get(f_url, headers=headers).json() user_id = response['clientProperties'][0]['clientProps']['securityAssociations']['associations'][0]['userOrGroup'][0]['userId'] user_name = response['clientProperties'][0]['clientProps']['securityAssociations']['associations'][0]['userOrGroup'][0]['userName'] return user_id, user_name @@ -625,8 +644,11 @@ def upload_timestamp_blob(connection_string, container_name, blob_name, timestam timestamp_str = str(timestamp) blob_service_client = BlobServiceClient.from_connection_string(connection_string) - + container_client = blob_service_client.get_container_client(container_name) + + if not container_client.exists(): + container_client.create_container() blob_client = container_client.get_blob_client(blob_name) diff --git a/Solutions/Commvault Security IQ/Data Connectors/CommvaultSecurityIQDataConnector.zip b/Solutions/Commvault Security IQ/Data Connectors/CommvaultSecurityIQDataConnector.zip index 84d8897ce73..77376d894c2 100644 Binary files a/Solutions/Commvault Security IQ/Data Connectors/CommvaultSecurityIQDataConnector.zip and b/Solutions/Commvault Security IQ/Data Connectors/CommvaultSecurityIQDataConnector.zip differ diff --git a/Solutions/Commvault Security IQ/Data/Solution_Commvault Security IQ.json b/Solutions/Commvault Security IQ/Data/Solution_Commvault Security IQ.json index 5a927343e88..11b2c888ff3 100644 --- a/Solutions/Commvault Security IQ/Data/Solution_Commvault Security IQ.json +++ b/Solutions/Commvault Security IQ/Data/Solution_Commvault Security IQ.json @@ -19,7 +19,7 @@ ], "Metadata": "SolutionMetadata.json", "BasePath": "C:\\GitHub\\Azure-Sentinel\\Solutions\\Commvault Security IQ", - "Version": "3.0.1", + "Version": "3.0.2", "TemplateSpec": true, "Is1Pconnector": false -} \ No newline at end of file +} diff --git a/Solutions/Commvault Security IQ/ReleaseNotes.md b/Solutions/Commvault Security IQ/ReleaseNotes.md index f1fd8a1e752..c650173f57f 100644 --- a/Solutions/Commvault Security IQ/ReleaseNotes.md +++ b/Solutions/Commvault Security IQ/ReleaseNotes.md @@ -1,4 +1,4 @@ | **Version** | **Date Modified (DD-MM-YYYY)** | **Change History** | |-------------|--------------------------------|---------------------------------------------| | 3.0.1 | 28-03-2024 | Adding **Data Connector** for Commvault Sentinel Integration| -| 3.0.0 | 21-08-2023 | Initial Solution Release| \ No newline at end of file +| 3.0.0 | 21-08-2023 | Initial Solution Release|