From a0e68fe54847411dcc641a26b4c842d493b299a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mats-G=C3=B8ran=20Karlsen?= Date: Thu, 4 Apr 2019 15:37:19 +0200 Subject: [PATCH] Extend ElasticSearch and move elasticsearch version checks into extended class elasticsearch_test - test api calls against elasticsearch --- .editorconfig | 2 +- elastalert/elastalert.py | 70 +++++----------- elastalert/elasticsearchclient.py | 44 ++++++++++ elastalert/util.py | 18 +--- tests/base_test.py | 31 ++++--- tests/conftest.py | 37 +++++++- tests/create_index_test.py | 27 ------ tests/elasticsearch_test.py | 135 ++++++++++++++++++++++++++++++ 8 files changed, 254 insertions(+), 110 deletions(-) create mode 100644 elastalert/elasticsearchclient.py create mode 100644 tests/elasticsearch_test.py diff --git a/.editorconfig b/.editorconfig index d2c3bb958..f8eafe691 100644 --- a/.editorconfig +++ b/.editorconfig @@ -12,6 +12,6 @@ indent_size = 4 [Makefile] indent_style = tab -[{*.json,*.yml}] +[{*.json,*.yml,*.yaml}] indent_style = space indent_size = 2 diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 0d8df5f30..a0c51780d 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -123,7 +123,6 @@ def __init__(self, args): self.num_hits = 0 self.num_dupes = 0 self.current_es = None - self.current_es_addr = None self.buffer_time = self.conf['buffer_time'] self.silence_cache = {} self.rule_hashes = get_rule_hashes(self.conf, self.args.rule) @@ -134,7 +133,6 @@ def __init__(self, args): self.add_metadata_alert = self.conf.get('add_metadata_alert', False) self.writeback_es = elasticsearch_client(self.conf) - self._es_version = None remove = [] for rule in self.rules: @@ -145,28 +143,6 @@ def __init__(self, args): if self.args.silence: self.silence() - def get_version(self): - info = self.writeback_es.info() - return info['version']['number'] - - @property - def es_version(self): - if self._es_version is None: - self._es_version = self.get_version() - return self._es_version - - def is_atleastfive(self): - return int(self.es_version.split(".")[0]) >= 5 - - def is_atleastsix(self): - return int(self.es_version.split(".")[0]) >= 6 - - def is_atleastsixsix(self): - return float('.'.join(self.es_version.split(".")[:2])) >= 6.6 - - def is_atleastseven(self): - return int(self.es_version.split(".")[0]) >= 7 - @staticmethod def get_index(rule, starttime=None, endtime=None): """ Gets the index for a rule. If strftime is set and starttime and endtime @@ -289,7 +265,7 @@ def get_index_start(self, index, timestamp_field='@timestamp'): """ query = {'sort': {timestamp_field: {'order': 'asc'}}} try: - if self.is_atleastsixsix(): + if self.current_es.is_atleastsixsix(): # TODO use _source_includes=[...] instead when elasticsearch client supports this res = self.current_es.search(index=index, size=1, body=query, params={'_source_includes': timestamp_field}, ignore_unavailable=True) @@ -366,7 +342,7 @@ def get_hits(self, rule, starttime, endtime, index, scroll=False): to_ts_func=rule['dt_to_ts'], five=rule['five'], ) - if self.is_atleastsixsix(): + if self.current_es.is_atleastsixsix(): # TODO fix when elasticsearch client supports param _source_includes # Since _source_includes is not supported we must use params instead. # the value object in _source_includes is not automagically parsed into a legal @@ -395,7 +371,7 @@ def get_hits(self, rule, starttime, endtime, index, scroll=False): **extra_args ) - if self.is_atleastseven(): + if self.current_es.is_atleastseven(): self.total_hits = int(res['hits']['total']['value']) else: self.total_hits = int(res['hits']['total']) @@ -585,7 +561,7 @@ def get_hits_aggregation(self, rule, starttime, endtime, index, query_key, term_ else: payload = res['aggregations'] - if self.is_atleastseven(): + if self.current_es.is_atleastseven(): self.num_hits += res['hits']['total']['value'] else: self.num_hits += res['hits']['total'] @@ -678,14 +654,14 @@ def get_starttime(self, rule): """ sort = {'sort': {'@timestamp': {'order': 'desc'}}} query = {'filter': {'term': {'rule_name': '%s' % (rule['name'])}}} - if self.is_atleastfive(): + if self.writeback_es.is_atleastfive(): query = {'query': {'bool': query}} query.update(sort) try: - if self.is_atleastsix(): + if self.writeback_es.is_atleastsix(): index = self.get_six_index('elastalert_status') - if self.is_atleastsixsix(): + if self.writeback_es.is_atleastsixsix(): # TODO use _source_includes=[...] instead when elasticsearch client supports this res = self.writeback_es.search(index=index, doc_type='_doc', size=1, body=query, params={'_source_includes': 'endtime,rule_name'}) @@ -843,7 +819,7 @@ def enhance_filter(self, rule): else: query = " OR ".join(additional_terms) query_str_filter = {'query_string': {'query': query}} - if self.is_atleastfive(): + if self.writeback_es.is_atleastfive(): filters.append(query_str_filter) else: filters.append({'query': query_str_filter}) @@ -859,7 +835,6 @@ def run_rule(self, rule, endtime, starttime=None): """ run_start = time.time() self.current_es = elasticsearch_client(rule) - self.current_es_addr = (rule['es_host'], rule['es_port']) # If there are pending aggregate matches, try processing them for x in range(len(rule['agg_matches'])): @@ -984,7 +959,7 @@ def init_rule(self, new_rule, new=True): if 'top_count_keys' in new_rule and new_rule.get('raw_count_keys', True): if self.string_multi_field_name: string_multi_field_name = self.string_multi_field_name - elif self.is_atleastfive(): + elif self.writeback_es.is_atleastfive(): string_multi_field_name = '.keyword' else: string_multi_field_name = '.raw' @@ -1032,7 +1007,7 @@ def init_rule(self, new_rule, new=True): def modify_rule_for_ES5(new_rule): # Get ES version per rule rule_es = elasticsearch_client(new_rule) - if int(rule_es.info()['version']['number'].split(".")[0]) >= 5: + if rule_es.is_atleastfive(): new_rule['five'] = True else: new_rule['five'] = False @@ -1335,7 +1310,7 @@ def get_dashboard(self, rule, db_name): raise EAException("use_kibana_dashboard undefined") query = {'query': {'term': {'_id': db_name}}} try: - if self.is_atleastsixsix(): + if es.is_atleastsixsix(): # TODO use doc_type = _doc # TODO use _source_includes=[...] instead when elasticsearch client supports for this res = es.search(index='kibana-int', doc_type='dashboard', body=query, @@ -1533,7 +1508,7 @@ def writeback(self, doc_type, body): writeback_body['@timestamp'] = dt_to_ts(ts_now()) try: - if self.is_atleastsix(): + if self.writeback_es.is_atleastsix(): writeback_index = self.get_six_index(doc_type) res = self.writeback_es.index(index=writeback_index, doc_type='_doc', body=body) else: @@ -1554,13 +1529,13 @@ def find_recent_pending_alerts(self, time_limit): time_filter = {'range': {'alert_time': {'from': dt_to_ts(ts_now() - time_limit), 'to': dt_to_ts(ts_now())}}} sort = {'sort': {'alert_time': {'order': 'asc'}}} - if self.is_atleastfive(): + if self.writeback_es.is_atleastfive(): query = {'query': {'bool': {'must': inner_query, 'filter': time_filter}}} else: query = {'query': inner_query, 'filter': time_filter} query.update(sort) try: - if self.is_atleastsix(): + if self.writeback_es.is_atleastsix(): res = self.writeback_es.search(index=self.writeback_index, doc_type='_doc', body=query, size=1000) else: res = self.writeback_es.search(index=self.writeback_index, doc_type='elastalert', body=query, size=1000) @@ -1593,7 +1568,6 @@ def send_pending_alerts(self): # Set current_es for top_count_keys query self.current_es = elasticsearch_client(rule) - self.current_es_addr = (rule['es_host'], rule['es_port']) # Send the alert unless it's a future alert if ts_now() > ts_to_dt(alert_time): @@ -1616,7 +1590,7 @@ def send_pending_alerts(self): # Delete it from the index try: - if self.is_atleastsix(): + if self.writeback_es.is_atleastsix(): self.writeback_es.delete(index=self.writeback_index, doc_type='_doc', id=_id) else: self.writeback_es.delete(index=self.writeback_index, doc_type='elastalert', id=_id) @@ -1649,7 +1623,7 @@ def get_aggregated_matches(self, _id): query = {'query': {'query_string': {'query': 'aggregate_id:%s' % (_id)}}, 'sort': {'@timestamp': 'asc'}} matches = [] try: - if self.is_atleastsix(): + if self.writeback_es.is_atleastsix(): res = self.writeback_es.search(index=self.writeback_index, doc_type='_doc', body=query, size=self.max_aggregation) else: @@ -1657,7 +1631,7 @@ def get_aggregated_matches(self, _id): size=self.max_aggregation) for match in res['hits']['hits']: matches.append(match['_source']) - if self.is_atleastsix(): + if self.writeback_es.is_atleastsix(): self.writeback_es.delete(index=self.writeback_index, doc_type='_doc', id=match['_id']) else: self.writeback_es.delete(index=self.writeback_index, doc_type='elastalert', id=match['_id']) @@ -1672,11 +1646,11 @@ def find_pending_aggregate_alert(self, rule, aggregation_key_value=None): 'must_not': [{'exists': {'field': 'aggregate_id'}}]}}} if aggregation_key_value: query['filter']['bool']['must'].append({'term': {'aggregation_key': aggregation_key_value}}) - if self.is_atleastfive(): + if self.writeback_es.is_atleastfive(): query = {'query': {'bool': query}} query['sort'] = {'alert_time': {'order': 'desc'}} try: - if self.is_atleastsix(): + if self.writeback_es.is_atleastsix(): res = self.writeback_es.search(index=self.writeback_index, doc_type='_doc', body=query, size=1) else: res = self.writeback_es.search(index=self.writeback_index, doc_type='elastalert', body=query, size=1) @@ -1811,16 +1785,16 @@ def is_silenced(self, rule_name): return False query = {'term': {'rule_name': rule_name}} sort = {'sort': {'until': {'order': 'desc'}}} - if self.is_atleastfive(): + if self.writeback_es.is_atleastfive(): query = {'query': query} else: query = {'filter': query} query.update(sort) try: - if self.is_atleastsix(): + if self.writeback_es.is_atleastsix(): index = self.get_six_index('silence') - if self.is_atleastsixsix(): + if self.writeback_es.is_atleastsixsix(): # TODO use _source_includes=[...] instead when elasticsearch client supports this res = self.writeback_es.search(index=index, doc_type='_doc', size=1, body=query, params={'_source_includes': 'until,exponent'}) diff --git a/elastalert/elasticsearchclient.py b/elastalert/elasticsearchclient.py new file mode 100644 index 000000000..f8f8061bf --- /dev/null +++ b/elastalert/elasticsearchclient.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +from elasticsearch import Elasticsearch, RequestsHttpConnection + + +class ElasticSearchClient(Elasticsearch): + + def __init__(self, conf): + super(ElasticSearchClient, self).__init__(host=conf['es_host'], + port=conf['es_port'], + url_prefix=conf['es_url_prefix'], + use_ssl=conf['use_ssl'], + verify_certs=conf['verify_certs'], + ca_certs=conf['ca_certs'], + connection_class=RequestsHttpConnection, + http_auth=conf['http_auth'], + timeout=conf['es_conn_timeout'], + send_get_body_as=conf['send_get_body_as'], + client_cert=conf['client_cert'], + client_key=conf['client_key']) + self._conf = conf + self._es_version = None + + @property + def conf(self): + return self._conf + + @property + def es_version(self): + if self._es_version is None: + self._es_version = self.info()['version']['number'] + return self._es_version + + def is_atleastfive(self): + return int(self.es_version.split(".")[0]) >= 5 + + def is_atleastsix(self): + return int(self.es_version.split(".")[0]) >= 6 + + def is_atleastsixsix(self): + major, minor = map(int, self.es_version.split(".")[:2]) + return major > 6 or (major == 6 and minor >= 6) + + def is_atleastseven(self): + return int(self.es_version.split(".")[0]) >= 7 diff --git a/elastalert/util.py b/elastalert/util.py index 33f0b4e71..47238cf6c 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -7,8 +7,7 @@ import dateutil.parser import dateutil.tz from auth import Auth -from elasticsearch import RequestsHttpConnection -from elasticsearch.client import Elasticsearch +import elasticsearchclient from six import string_types logging.basicConfig() @@ -281,7 +280,7 @@ def replace_dots_in_field_names(document): def elasticsearch_client(conf): - """ returns an Elasticsearch instance configured using an es_conn_config """ + """ returns an ElasticsearchClient instance configured using an es_conn_config """ es_conn_conf = build_es_conn_config(conf) auth = Auth() es_conn_conf['http_auth'] = auth(host=es_conn_conf['es_host'], @@ -290,18 +289,7 @@ def elasticsearch_client(conf): aws_region=es_conn_conf['aws_region'], profile_name=es_conn_conf['profile']) - return Elasticsearch(host=es_conn_conf['es_host'], - port=es_conn_conf['es_port'], - url_prefix=es_conn_conf['es_url_prefix'], - use_ssl=es_conn_conf['use_ssl'], - verify_certs=es_conn_conf['verify_certs'], - ca_certs=es_conn_conf['ca_certs'], - connection_class=RequestsHttpConnection, - http_auth=es_conn_conf['http_auth'], - timeout=es_conn_conf['es_conn_timeout'], - send_get_body_as=es_conn_conf['send_get_body_as'], - client_cert=es_conn_conf['client_cert'], - client_key=es_conn_conf['client_key']) + return elasticsearchclient.ElasticSearchClient(es_conn_conf) def build_es_conn_config(conf): diff --git a/tests/base_test.py b/tests/base_test.py index efd986d10..559ed1f85 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -1324,14 +1324,13 @@ def test_query_with_whitelist_filter_es(ea): in new_rule['filter'][-1]['query']['query_string']['query'] -def test_query_with_whitelist_filter_es_five(ea): - ea.es_version = '6.2' - ea.rules[0]['_source_enabled'] = False - ea.rules[0]['filter'] = [{'query_string': {'query': 'baz'}}] - ea.rules[0]['compare_key'] = "username" - ea.rules[0]['whitelist'] = ['xudan1', 'xudan12', 'aa1', 'bb1'] - new_rule = copy.copy(ea.rules[0]) - ea.init_rule(new_rule, True) +def test_query_with_whitelist_filter_es_five(ea_sixsix): + ea_sixsix.rules[0]['_source_enabled'] = False + ea_sixsix.rules[0]['filter'] = [{'query_string': {'query': 'baz'}}] + ea_sixsix.rules[0]['compare_key'] = "username" + ea_sixsix.rules[0]['whitelist'] = ['xudan1', 'xudan12', 'aa1', 'bb1'] + new_rule = copy.copy(ea_sixsix.rules[0]) + ea_sixsix.init_rule(new_rule, True) assert 'NOT username:"xudan1" AND NOT username:"xudan12" AND NOT username:"aa1"' in \ new_rule['filter'][-1]['query_string']['query'] @@ -1347,13 +1346,13 @@ def test_query_with_blacklist_filter_es(ea): new_rule['filter'][-1]['query']['query_string']['query'] -def test_query_with_blacklist_filter_es_five(ea): - ea.es_version = '6.2' - ea.rules[0]['_source_enabled'] = False - ea.rules[0]['filter'] = [{'query_string': {'query': 'baz'}}] - ea.rules[0]['compare_key'] = "username" - ea.rules[0]['blacklist'] = ['xudan1', 'xudan12', 'aa1', 'bb1'] - new_rule = copy.copy(ea.rules[0]) - ea.init_rule(new_rule, True) +def test_query_with_blacklist_filter_es_five(ea_sixsix): + ea_sixsix.rules[0]['_source_enabled'] = False + ea_sixsix.rules[0]['filter'] = [{'query_string': {'query': 'baz'}}] + ea_sixsix.rules[0]['compare_key'] = "username" + ea_sixsix.rules[0]['blacklist'] = ['xudan1', 'xudan12', 'aa1', 'bb1'] + ea_sixsix.rules[0]['blacklist'] = ['xudan1', 'xudan12', 'aa1', 'bb1'] + new_rule = copy.copy(ea_sixsix.rules[0]) + ea_sixsix.init_rule(new_rule, True) assert 'username:"xudan1" OR username:"xudan12" OR username:"aa1"' in new_rule['filter'][-1]['query_string'][ 'query'] diff --git a/tests/conftest.py b/tests/conftest.py index 2f9f245f5..e58909d33 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,9 +11,6 @@ from elastalert.util import dt_to_ts from elastalert.util import ts_to_dt -mock_info = {'status': 200, 'name': 'foo', 'version': {'number': '2.0'}} -mock_sixsix_info = {'status': 200, 'name': 'foo', 'version': {'number': '6.6.0'}} - def pytest_addoption(parser): parser.addoption( @@ -55,6 +52,7 @@ def __init__(self): class mock_es_client(object): def __init__(self, host='es', port=14900): + mock_info = {'status': 200, 'name': 'foo', 'version': {'number': '2.0'}} self.host = host self.port = port self.return_hits = [] @@ -66,9 +64,26 @@ def __init__(self, host='es', port=14900): self.ping = mock.Mock(return_value=True) self.indices = mock_es_indices_client() + @property + def es_version(self): + return self.info()['version']['number'] + + def is_atleastfive(self): + return False + + def is_atleastsix(self): + return False + + def is_atleastsixsix(self): + return False + + def is_atleastseven(self): + return False + class mock_es_sixsix_client(object): def __init__(self, host='es', port=14900): + mock_sixsix_info = {'status': 200, 'name': 'foo', 'version': {'number': '6.6.0'}} self.host = host self.port = port self.return_hits = [] @@ -80,6 +95,22 @@ def __init__(self, host='es', port=14900): self.ping = mock.Mock(return_value=True) self.indices = mock_es_indices_client() + @property + def es_version(self): + return self.info()['version']['number'] + + def is_atleastfive(self): + return True + + def is_atleastsix(self): + return True + + def is_atleastsixsix(self): + return True + + def is_atleastseven(self): + return False + class mock_ruletype(object): def __init__(self): diff --git a/tests/create_index_test.py b/tests/create_index_test.py index 4b0cc1287..ba306aee5 100644 --- a/tests/create_index_test.py +++ b/tests/create_index_test.py @@ -2,7 +2,6 @@ import json import pytest -from elasticsearch import Elasticsearch, RequestsHttpConnection import elastalert.create_index @@ -52,29 +51,3 @@ def test_read_es_6_index_mappings(): mappings = elastalert.create_index.read_es_index_mappings(6) assert len(mappings) == len(es_mappings) print(json.dumps(mappings, indent=2)) - - -@pytest.mark.elasticsearch -def test_create_indices(): - es = Elasticsearch(host='127.0.0.1', port=9200, connection_class=RequestsHttpConnection, timeout=10) - print(json.dumps(es.info()['version']['number'], indent=2)) - index = 'create_index' - elastalert.create_index.main(es_client=es, ea_index=index) - indices_mappings = es.indices.get_mapping(index + '*') - print(json.dumps(indices_mappings, indent=2)) - if es_major_version(es) > 5: - assert index in indices_mappings - assert index + '_error' in indices_mappings - assert index + '_status' in indices_mappings - assert index + '_silence' in indices_mappings - assert index + '_past' in indices_mappings - else: - assert 'elastalert' in indices_mappings[index]['mappings'] - assert 'elastalert_error' in indices_mappings[index]['mappings'] - assert 'elastalert_status' in indices_mappings[index]['mappings'] - assert 'silence' in indices_mappings[index]['mappings'] - assert 'past_elastalert' in indices_mappings[index]['mappings'] - - -def es_major_version(es): - return int(es.info()['version']['number'].split(".")[0]) diff --git a/tests/elasticsearch_test.py b/tests/elasticsearch_test.py new file mode 100644 index 000000000..3dcedb83e --- /dev/null +++ b/tests/elasticsearch_test.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- +import datetime +import json +import time + +import dateutil +import mock +import pytest + +import elastalert.elasticsearchclient +import elastalert.create_index +import elastalert.elastalert +from elastalert.util import ts_to_dt, dt_to_ts, build_es_conn_config +from tests.conftest import mock_ruletype, mock_alert, mock_es_client + +test_index = 'test_index' + +es_host = '127.0.0.1' +es_port = 9200 +es_timeout = 10 + + +@pytest.fixture +def es_client(): + es_conn_config = build_es_conn_config({'es_host': es_host, 'es_port': es_port, 'es_conn_timeout': es_timeout}) + return elastalert.elasticsearchclient.ElasticSearchClient(es_conn_config) + + +@pytest.fixture +def ea(): + rules = [{'es_host': '', + 'es_port': 14900, + 'name': 'anytest', + 'index': 'idx', + 'filter': [], + 'include': ['@timestamp'], + 'aggregation': datetime.timedelta(0), + 'realert': datetime.timedelta(0), + 'processed_hits': {}, + 'timestamp_field': '@timestamp', + 'match_enhancements': [], + 'rule_file': 'blah.yaml', + 'max_query_size': 10000, + 'ts_to_dt': ts_to_dt, + 'dt_to_ts': dt_to_ts, + '_source_enabled': True}] + conf = {'rules_folder': 'rules', + 'run_every': datetime.timedelta(minutes=10), + 'buffer_time': datetime.timedelta(minutes=5), + 'alert_time_limit': datetime.timedelta(hours=24), + 'es_host': es_host, + 'es_port': es_port, + 'es_conn_timeout': es_timeout, + 'writeback_index': test_index, + 'rules': rules, + 'max_query_size': 10000, + 'old_query_limit': datetime.timedelta(weeks=1), + 'disable_rules_on_error': False, + 'scroll_keepalive': '30s'} + elastalert.elastalert.elasticsearch_client = mock_es_client + with mock.patch('elastalert.elastalert.get_rule_hashes'): + with mock.patch('elastalert.elastalert.load_rules') as load_conf: + load_conf.return_value = conf + ea = elastalert.elastalert.ElastAlerter(['--pin_rules']) + ea.rules[0]['type'] = mock_ruletype() + ea.rules[0]['alert'] = [mock_alert()] + ea.writeback_es = es_client() + ea.current_es = mock_es_client('', '') + return ea + + +@pytest.mark.elasticsearch +class TestElasticsearch: + def test_create_indices(self, es_client): + elastalert.create_index.main(es_client=es_client, ea_index=test_index) + indices_mappings = es_client.indices.get_mapping(test_index + '*') + print('-' * 50) + print(json.dumps(indices_mappings, indent=2)) + print('-' * 50) + if es_client.is_atleastsix(): + assert test_index in indices_mappings + assert test_index + '_error' in indices_mappings + assert test_index + '_status' in indices_mappings + assert test_index + '_silence' in indices_mappings + assert test_index + '_past' in indices_mappings + else: + assert 'elastalert' in indices_mappings[test_index]['mappings'] + assert 'elastalert_error' in indices_mappings[test_index]['mappings'] + assert 'elastalert_status' in indices_mappings[test_index]['mappings'] + assert 'silence' in indices_mappings[test_index]['mappings'] + assert 'past_elastalert' in indices_mappings[test_index]['mappings'] + + def test_aggregated_alert(self, ea): + match_timestamp = datetime.datetime.now(tz=dateutil.tz.tzutc()).replace(microsecond=0) + datetime.timedelta( + days=1) + ea.rules[0]['aggregate_by_match_time'] = True + match = {'@timestamp': match_timestamp, + 'num_hits': 0, + 'num_matches': 3 + } + res = ea.add_aggregated_alert(match, ea.rules[0]) + if ea.writeback_es.is_atleastsix(): + assert res['result'] == 'created' + else: + assert res['created'] is True + # Make sure added data is available for querying + time.sleep(2) + # Now lets find the pending aggregated alert + assert ea.find_pending_aggregate_alert(ea.rules[0]) + + def test_silenced(self, ea): + until_timestamp = datetime.datetime.now(tz=dateutil.tz.tzutc()).replace(microsecond=0) + datetime.timedelta( + days=1) + res = ea.set_realert(ea.rules[0]['name'], until_timestamp, 0) + if ea.writeback_es.is_atleastsix(): + assert res['result'] == 'created' + else: + assert res['created'] is True + # Make sure added data is available for querying + time.sleep(2) + # Force lookup in elasticsearch + ea.silence_cache = {} + # Now lets check if our rule is reported as silenced + assert ea.is_silenced(ea.rules[0]['name']) + + def test_get_hits(self, ea, es_client): + start = datetime.datetime.now(tz=dateutil.tz.tzutc()).replace(microsecond=0) + end = start + datetime.timedelta(days=1) + ea.current_es = es_client + if ea.current_es.is_atleastfive(): + ea.rules[0]['five'] = True + else: + ea.rules[0]['five'] = False + hits = ea.get_hits(ea.rules[0], start, end, test_index) + assert isinstance(hits, list)