From fbce0fb0ef9a65f57d15188f6d28f33be03fac22 Mon Sep 17 00:00:00 2001 From: Moises Cruz Date: Tue, 21 Apr 2020 20:07:44 +0200 Subject: [PATCH 01/83] agg_type percentiles fixes 2713 --- elastalert/ruletypes.py | 36 +++++++++++++++++++++++++++++------- elastalert/schema.yaml | 6 ++++-- 2 files changed, 33 insertions(+), 9 deletions(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 2f1d2f82c..85c08a4af 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -1026,6 +1026,7 @@ class MetricAggregationRule(BaseAggregationRule): """ A rule that matches when there is a low number of events given a timeframe. """ required_options = frozenset(['metric_agg_key', 'metric_agg_type']) allowed_aggregations = frozenset(['min', 'max', 'avg', 'sum', 'cardinality', 'value_count']) + allowed_percent_aggregations = frozenset(['percentiles']) def __init__(self, *args): super(MetricAggregationRule, self).__init__(*args) @@ -1035,8 +1036,10 @@ def __init__(self, *args): self.metric_key = 'metric_' + self.rules['metric_agg_key'] + '_' + self.rules['metric_agg_type'] - if not self.rules['metric_agg_type'] in self.allowed_aggregations: + if not self.rules['metric_agg_type'] in self.allowed_aggregations.union(self.allowed_percent_aggregations): raise EAException("metric_agg_type must be one of %s" % (str(self.allowed_aggregations))) + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations and self.rules['percentile_range'] is None: + raise EAException("percentile_range must be specified for percentiles aggregation") self.rules['aggregation_query_element'] = self.generate_aggregation_query() @@ -1051,14 +1054,20 @@ def get_match_str(self, match): return message def generate_aggregation_query(self): - return {self.metric_key: {self.rules['metric_agg_type']: {'field': self.rules['metric_agg_key']}}} + query = {self.metric_key: {self.rules['metric_agg_type']: {'field': self.rules['metric_agg_key']}}} + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: + query[self.metric_key][self.rules['metric_agg_type']]['percents'] = [self.rules['percentile_range']] + return query def check_matches(self, timestamp, query_key, aggregation_data): if "compound_query_key" in self.rules: self.check_matches_recursive(timestamp, query_key, aggregation_data, self.rules['compound_query_key'], dict()) else: - metric_val = aggregation_data[self.metric_key]['value'] + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: + metric_val = list(aggregation_data[self.metric_key]['values'].values())[0] + else: + metric_val = aggregation_data[self.metric_key]['value'] if self.crossed_thresholds(metric_val): match = {self.rules['timestamp_field']: timestamp, self.metric_key: metric_val} @@ -1106,6 +1115,7 @@ class SpikeMetricAggregationRule(BaseAggregationRule, SpikeRule): """ A rule that matches when there is a spike in an aggregated event compared to its reference point """ required_options = frozenset(['metric_agg_key', 'metric_agg_type', 'spike_height', 'spike_type']) allowed_aggregations = frozenset(['min', 'max', 'avg', 'sum', 'cardinality', 'value_count']) + allowed_percent_aggregations = frozenset(['percentiles']) def __init__(self, *args): # We inherit everything from BaseAggregation and Spike, overwrite only what we need in functions below @@ -1113,8 +1123,11 @@ def __init__(self, *args): # MetricAgg alert things self.metric_key = 'metric_' + self.rules['metric_agg_key'] + '_' + self.rules['metric_agg_type'] - if not self.rules['metric_agg_type'] in self.allowed_aggregations: + + if not self.rules['metric_agg_type'] in self.allowed_aggregations.union(self.allowed_percent_aggregations): raise EAException("metric_agg_type must be one of %s" % (str(self.allowed_aggregations))) + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations and self.rules['percentile_range'] is None: + raise EAException("percentile_range must be specified for percentiles aggregation") # Disabling bucket intervals (doesn't make sense in context of spike to split up your time period) if self.rules.get('bucket_interval'): @@ -1126,7 +1139,10 @@ def generate_aggregation_query(self): """Lifted from MetricAggregationRule, added support for scripted fields""" if self.rules.get('metric_agg_script'): return {self.metric_key: {self.rules['metric_agg_type']: self.rules['metric_agg_script']}} - return {self.metric_key: {self.rules['metric_agg_type']: {'field': self.rules['metric_agg_key']}}} + query = {self.metric_key: {self.rules['metric_agg_type']: {'field': self.rules['metric_agg_key']}}} + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: + query[self.metric_key][self.rules['metric_agg_type']]['percents'] = [self.rules['percentile_range']] + return query def add_aggregation_data(self, payload): """ @@ -1140,7 +1156,10 @@ def add_aggregation_data(self, payload): else: # no time / term split, just focus on the agg event = {self.ts_field: timestamp} - agg_value = payload_data[self.metric_key]['value'] + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: + agg_value = list(payload_data[self.metric_key]['values'].values())[0] + else: + agg_value = payload_data[self.metric_key]['value'] self.handle_event(event, agg_value, 'all') return @@ -1160,7 +1179,10 @@ def unwrap_term_buckets(self, timestamp, term_buckets, qk=[]): continue qk_str = ','.join(qk) - agg_value = term_data[self.metric_key]['value'] + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: + agg_value = list(term_data[self.metric_key]['values'].values())[0] + else: + agg_value = term_data[self.metric_key]['value'] event = {self.ts_field: timestamp, self.rules['query_key']: qk_str} # pass to SpikeRule's tracker diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index cc5d52395..a5b00a5fa 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -110,7 +110,7 @@ oneOf: type: {enum: [spike_aggregation]} spike_height: {type: number} spike_type: {enum: ["up", "down", "both"]} - metric_agg_type: {enum: ["min", "max", "avg", "sum", "cardinality", "value_count"]} + metric_agg_type: {enum: ["min", "max", "avg", "sum", "cardinality", "value_count", "percentiles"]} timeframe: *timeframe use_count_query: {type: boolean} doc_type: {type: string} @@ -120,6 +120,7 @@ oneOf: threshold_ref: {type: number} threshold_cur: {type: number} min_doc_count: {type: integer} + percentile_range: {type: integer} - title: Flatline required: [threshold, timeframe] @@ -153,8 +154,9 @@ oneOf: required: [metric_agg_key,metric_agg_type] properties: type: {enum: [metric_aggregation]} - metric_agg_type: {enum: ["min", "max", "avg", "sum", "cardinality", "value_count"]} + metric_agg_type: {enum: ["min", "max", "avg", "sum", "cardinality", "value_count", "percentiles"]} #timeframe: *timeframe + percentile_range: {type: integer} - title: Percentage Match required: [match_bucket_filter] From b9963d9e1bd2498bdf76fd2f10e7e71a0181e468 Mon Sep 17 00:00:00 2001 From: Minogiannis Grigoris Date: Fri, 24 Apr 2020 20:17:10 +0300 Subject: [PATCH 02/83] Adjusting elastalert/ruletypes.py so that the functions 'append' and 'append_middle' take into account the scenario whereby an event is None --- elastalert/ruletypes.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 2f1d2f82c..1ccb85d8d 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -317,6 +317,8 @@ def append(self, event): """ Add an event to the window. Event should be of the form (dict, count). This will also pop the oldest events and call onRemoved on them until the window size is less than timeframe. """ + if not event or not event[1]: + return self self.data.add(event) self.running_count += event[1] @@ -357,6 +359,8 @@ def __iter__(self): def append_middle(self, event): """ Attempt to place the event in the correct location in our deque. Returns True if successful, otherwise False. """ + if not event or not event[1]: + return self rotation = 0 ts = self.get_ts(event) From 50bcd948460438dc111b309e231a340248b5701d Mon Sep 17 00:00:00 2001 From: Minogiannis Grigoris Date: Fri, 24 Apr 2020 20:47:38 +0300 Subject: [PATCH 03/83] Adjusting elastalert/ruletypes.py so that the functions 'append' and 'append_middle' take into account the scenario whereby an event is None --- elastalert/ruletypes.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 1ccb85d8d..00ac4c621 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -317,15 +317,15 @@ def append(self, event): """ Add an event to the window. Event should be of the form (dict, count). This will also pop the oldest events and call onRemoved on them until the window size is less than timeframe. """ - if not event or not event[1]: - return self self.data.add(event) - self.running_count += event[1] + if event and event[1]: + self.running_count += event[1] while self.duration() >= self.timeframe: oldest = self.data[0] self.data.remove(oldest) - self.running_count -= oldest[1] + if oldest and oldest[1]: + self.running_count -= oldest[1] self.onRemoved and self.onRemoved(oldest) def duration(self): @@ -359,15 +359,14 @@ def __iter__(self): def append_middle(self, event): """ Attempt to place the event in the correct location in our deque. Returns True if successful, otherwise False. """ - if not event or not event[1]: - return self rotation = 0 ts = self.get_ts(event) # Append left if ts is earlier than first event if self.get_ts(self.data[0]) > ts: self.data.appendleft(event) - self.running_count += event[1] + if event and event[1]: + self.running_count += event[1] return # Rotate window until we can insert event @@ -378,7 +377,8 @@ def append_middle(self, event): # This should never happen return self.data.append(event) - self.running_count += event[1] + if event and event[1]: + self.running_count += event[1] self.data.rotate(-rotation) From 26820748fa14bdb1b161edb04588d9803d45d275 Mon Sep 17 00:00:00 2001 From: balusarakesh Date: Fri, 29 May 2020 15:26:45 -0700 Subject: [PATCH 04/83] fix aggregate_id search syntax --- elastalert/elastalert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 24b10ced9..1ce995c69 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -1722,7 +1722,7 @@ def get_aggregated_matches(self, _id): """ Removes and returns all matches from writeback_es that have aggregate_id == _id """ # XXX if there are more than self.max_aggregation matches, you have big alerts and we will leave entries in ES. - query = {'query': {'query_string': {'query': 'aggregate_id:%s' % (_id)}}, 'sort': {'@timestamp': 'asc'}} + query = {'query': {'query_string': {'query': 'aggregate_id:"%s"' % (_id)}}, 'sort': {'@timestamp': 'asc'}} matches = [] try: if self.writeback_es.is_atleastsixtwo(): From b267617d54f6ca8d91a0231f877664c6efc93f13 Mon Sep 17 00:00:00 2001 From: balusarakesh Date: Fri, 29 May 2020 15:33:37 -0700 Subject: [PATCH 05/83] fix aggregate_id test --- tests/base_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/base_test.py b/tests/base_test.py index 92dc35f7e..81724e729 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -427,8 +427,8 @@ def test_agg_matchtime(ea): call4 = ea.writeback_es.deprecated_search.call_args_list[10][1]['body'] assert 'alert_time' in call2['filter']['range'] - assert call3['query']['query_string']['query'] == 'aggregate_id:ABCD' - assert call4['query']['query_string']['query'] == 'aggregate_id:CDEF' + assert call3['query']['query_string']['query'] == 'aggregate_id:"ABCD"' + assert call4['query']['query_string']['query'] == 'aggregate_id:"CDEF"' assert ea.writeback_es.deprecated_search.call_args_list[9][1]['size'] == 1337 From 876d0e0c7d09a4fd9bb85e4f3caad393adc6346d Mon Sep 17 00:00:00 2001 From: balusarakesh Date: Mon, 1 Jun 2020 08:59:25 -0700 Subject: [PATCH 06/83] add quotes for search terms in tests --- tests/base_test.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/base_test.py b/tests/base_test.py index 81724e729..edd37f346 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -400,7 +400,7 @@ def test_agg_matchtime(ea): assert call2['match_body']['@timestamp'] == '2014-09-26T12:40:45' assert not call2['alert_sent'] - assert call2['aggregate_id'] == 'ABCD' + assert call2['aggregate_id'] == '"ABCD"' assert call3['match_body']['@timestamp'] == '2014-09-26T12:47:45' assert not call3['alert_sent'] @@ -455,11 +455,11 @@ def test_agg_not_matchtime(ea): assert call2['match_body']['@timestamp'] == '2014-09-26T12:40:45' assert not call2['alert_sent'] - assert call2['aggregate_id'] == 'ABCD' + assert call2['aggregate_id'] == '"ABCD"' assert call3['match_body']['@timestamp'] == '2014-09-26T12:47:45' assert not call3['alert_sent'] - assert call3['aggregate_id'] == 'ABCD' + assert call3['aggregate_id'] == '"ABCD"' def test_agg_cron(ea): @@ -491,7 +491,7 @@ def test_agg_cron(ea): assert call2['match_body']['@timestamp'] == '2014-09-26T12:40:45' assert not call2['alert_sent'] - assert call2['aggregate_id'] == 'ABCD' + assert call2['aggregate_id'] == '"ABCD"' assert call3['match_body']['@timestamp'] == '2014-09-26T12:47:45' assert call3['alert_time'] == alerttime2 @@ -569,7 +569,7 @@ def test_agg_with_aggregation_key(ea): assert not call3['alert_sent'] # Call3 should have it's aggregate_id set to call1's _id # It should also have the same alert_time as call1 - assert call3['aggregate_id'] == 'ABCD' + assert call3['aggregate_id'] == '"ABCD"' assert 'aggregation_key' in call3 assert call3['aggregation_key'] == 'Key Value 1' assert call3['alert_time'] == dt_to_ts(match_time + datetime.timedelta(minutes=10)) @@ -596,8 +596,8 @@ def test_agg_with_aggregation_key(ea): call4 = ea.writeback_es.deprecated_search.call_args_list[10][1]['body'] assert 'alert_time' in call2['filter']['range'] - assert call3['query']['query_string']['query'] == 'aggregate_id:ABCD' - assert call4['query']['query_string']['query'] == 'aggregate_id:CDEF' + assert call3['query']['query_string']['query'] == 'aggregate_id:"ABCD"' + assert call4['query']['query_string']['query'] == 'aggregate_id:"CDEF"' assert ea.writeback_es.deprecated_search.call_args_list[9][1]['size'] == 1337 From 6b7caa484696d025e29f15d56e20711c62613583 Mon Sep 17 00:00:00 2001 From: balusarakesh Date: Mon, 1 Jun 2020 09:06:41 -0700 Subject: [PATCH 07/83] removing quotes for search terms in tests --- tests/base_test.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/base_test.py b/tests/base_test.py index edd37f346..b86498b1d 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -400,7 +400,7 @@ def test_agg_matchtime(ea): assert call2['match_body']['@timestamp'] == '2014-09-26T12:40:45' assert not call2['alert_sent'] - assert call2['aggregate_id'] == '"ABCD"' + assert call2['aggregate_id'] == 'ABCD' assert call3['match_body']['@timestamp'] == '2014-09-26T12:47:45' assert not call3['alert_sent'] @@ -455,11 +455,11 @@ def test_agg_not_matchtime(ea): assert call2['match_body']['@timestamp'] == '2014-09-26T12:40:45' assert not call2['alert_sent'] - assert call2['aggregate_id'] == '"ABCD"' + assert call2['aggregate_id'] == 'ABCD' assert call3['match_body']['@timestamp'] == '2014-09-26T12:47:45' assert not call3['alert_sent'] - assert call3['aggregate_id'] == '"ABCD"' + assert call3['aggregate_id'] == 'ABCD' def test_agg_cron(ea): @@ -491,7 +491,7 @@ def test_agg_cron(ea): assert call2['match_body']['@timestamp'] == '2014-09-26T12:40:45' assert not call2['alert_sent'] - assert call2['aggregate_id'] == '"ABCD"' + assert call2['aggregate_id'] == 'ABCD' assert call3['match_body']['@timestamp'] == '2014-09-26T12:47:45' assert call3['alert_time'] == alerttime2 @@ -569,7 +569,7 @@ def test_agg_with_aggregation_key(ea): assert not call3['alert_sent'] # Call3 should have it's aggregate_id set to call1's _id # It should also have the same alert_time as call1 - assert call3['aggregate_id'] == '"ABCD"' + assert call3['aggregate_id'] == 'ABCD' assert 'aggregation_key' in call3 assert call3['aggregation_key'] == 'Key Value 1' assert call3['alert_time'] == dt_to_ts(match_time + datetime.timedelta(minutes=10)) From f98a94092e55ee465fb1186ffb63055d864e78b3 Mon Sep 17 00:00:00 2001 From: balusarakesh Date: Mon, 1 Jun 2020 09:14:46 -0700 Subject: [PATCH 08/83] fixed CI error --- elastalert/alerts.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index a453081fb..d3fa7518f 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -794,9 +794,9 @@ def alert(self, matches): except JIRAError as e: logging.exception("Error while commenting on ticket %s: %s" % (ticket, e)) if self.labels: - for l in self.labels: + for label in self.labels: try: - ticket.fields.labels.append(l) + ticket.fields.labels.append(label) except JIRAError as e: logging.exception("Error while appending labels to ticket %s: %s" % (ticket, e)) if self.transition: From a107491800ffd3e666d2dd570aefa952e197d280 Mon Sep 17 00:00:00 2001 From: Swapnil Suryawanshi Date: Tue, 23 Jun 2020 15:20:45 +0530 Subject: [PATCH 09/83] fix attribute error is raised when query ran for future --- elastalert/elastalert.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 24b10ced9..f99a3229b 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -870,15 +870,16 @@ def run_rule(self, rule, endtime, starttime=None): rule['original_starttime'] = rule['starttime'] rule['scrolling_cycle'] = 0 + self.thread_data.num_hits = 0 + self.thread_data.num_dupes = 0 + self.thread_data.cumulative_hits = 0 + # Don't run if starttime was set to the future if ts_now() <= rule['starttime']: logging.warning("Attempted to use query start time in the future (%s), sleeping instead" % (starttime)) return 0 # Run the rule. If querying over a large time period, split it up into segments - self.thread_data.num_hits = 0 - self.thread_data.num_dupes = 0 - self.thread_data.cumulative_hits = 0 segment_size = self.get_segment_size(rule) tmp_endtime = rule['starttime'] From 62924c4b2e7ecf071bb8cde46a726768df7d3ab0 Mon Sep 17 00:00:00 2001 From: Kevin Rose Date: Thu, 1 Oct 2020 17:06:32 -0400 Subject: [PATCH 10/83] Add support for custom_details in the PagerDuty alerter v2 module --- docs/source/ruletypes.rst | 5 +++++ elastalert/alerts.py | 13 ++++++++++--- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index ff3763712..0823c82eb 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1915,6 +1915,11 @@ See https://v2.developer.pagerduty.com/docs/send-an-event-events-api-v2 ``pagerduty_v2_payload_source_args``: If set, and ``pagerduty_v2_payload_source`` is a formattable string, Elastalert will format the source based on the provided array of fields from the rule or match. +``pagerduty_v2_payload_custom_details``: List of keys:values to use as the content of the custom_details payload. Example - ip:clientip will map the value from the clientip index of Elasticsearch to JSON key named ip. + +``pagerduty_v2_payload_include_all_info``: If True, this will include the entire Elasticsearch document as a custom detail field called "information" in the PagerDuty alert. + + PagerTree ~~~~~~~~~ diff --git a/elastalert/alerts.py b/elastalert/alerts.py index d3fa7518f..cca59b0d5 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1357,6 +1357,8 @@ def __init__(self, rule): self.pagerduty_v2_payload_severity = self.rule.get('pagerduty_v2_payload_severity', 'critical') self.pagerduty_v2_payload_source = self.rule.get('pagerduty_v2_payload_source', 'ElastAlert') self.pagerduty_v2_payload_source_args = self.rule.get('pagerduty_v2_payload_source_args', None) + self.pagerduty_v2_payload_custom_details = self.rule.get('pagerduty_v2_payload_custom_details', {}) + self.pagerduty_v2_payload_include_all_info = self.rule.get('pagerduty_v2_payload_include_all_info', True) if self.pagerduty_api_version == 'v2': self.url = 'https://events.pagerduty.com/v2/enqueue' @@ -1369,6 +1371,13 @@ def alert(self, matches): # post to pagerduty headers = {'content-type': 'application/json'} if self.pagerduty_api_version == 'v2': + + custom_details_payload = {'information': body} if self.pagerduty_v2_payload_include_all_info else {} + if self.pagerduty_v2_payload_custom_details: + for match in matches: + for custom_details_key, es_key in list(self.pagerduty_v2_payload_custom_details.items()): + custom_details_payload[custom_details_key] = lookup_es_key(match, es_key) + payload = { 'routing_key': self.pagerduty_service_key, 'event_action': self.pagerduty_event_type, @@ -1389,9 +1398,7 @@ def alert(self, matches): self.pagerduty_v2_payload_source_args, matches), 'summary': self.create_title(matches), - 'custom_details': { - 'information': body, - }, + 'custom_details': custom_details_payload, }, } match_timestamp = lookup_es_key(matches[0], self.rule.get('timestamp_field', '@timestamp')) From a9f0d1d8488b02e9e2d96a65d49d70fc12733f0a Mon Sep 17 00:00:00 2001 From: Dennis Boone Date: Wed, 28 Oct 2020 15:19:49 -0400 Subject: [PATCH 11/83] Fix for the mapping error reported in #2899. --- elastalert/es_mappings/6/elastalert.json | 1 + 1 file changed, 1 insertion(+) diff --git a/elastalert/es_mappings/6/elastalert.json b/elastalert/es_mappings/6/elastalert.json index 645a67762..2cc97bcfb 100644 --- a/elastalert/es_mappings/6/elastalert.json +++ b/elastalert/es_mappings/6/elastalert.json @@ -29,6 +29,7 @@ "format": "dateOptionalTime" }, "match_body": { + "enabled": "false", "type": "object" }, "aggregate_id": { From c2a01517b0b0d694233ec695eec3dce0fa601c01 Mon Sep 17 00:00:00 2001 From: plan-do-break-fix Date: Sun, 25 Apr 2021 19:15:07 -0500 Subject: [PATCH 12/83] fix(docs): corrects common typos in project README --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 99acc02e7..363eee0cd 100644 --- a/README.md +++ b/README.md @@ -150,7 +150,7 @@ Examples of different types of rules can be found in example_rules/. increases by a given factor. This example will send an email alert when there are 3 times more events matching a filter occurring within the last 2 hours than the number of events in the previous 2 hours. -- ``example_frequency.yaml`` is an example of the "frequency" rule type, which will alert when there are a given number of events occuring +- ``example_frequency.yaml`` is an example of the "frequency" rule type, which will alert when there are a given number of events occurring within a time period. This example will send an email when 50 documents matching a given filter occur within a 4 hour timeframe. - ``example_change.yaml`` is an example of the "change" rule type, which will alert when a certain field in two documents changes. In this example, @@ -267,7 +267,7 @@ status: ### How can I make the alert come at a certain time? -The ``aggregation`` feature will take every alert that has occured over a period of time and send them together in one alert. You can use cron style syntax to send all alerts that have occured since the last once by using +The ``aggregation`` feature will take every alert that has occurred over a period of time and send them together in one alert. You can use cron style syntax to send all alerts that have occurred since the last once by using ``` aggregation: @@ -290,7 +290,7 @@ buffer_time: minutes: 5 ``` -By default, ElastAlert will download every document in full before processing them. Instead, you can have ElastAlert simply get a count of the number of documents that have occured in between each query. To do this, set ``use_count_query: true``. This cannot be used if you use ``query_key``, because ElastAlert will not know the contents of each documents, just the total number of them. This also reduces the precision of alerts, because all events that occur between each query will be rounded to a single timestamp. +By default, ElastAlert will download every document in full before processing them. Instead, you can have ElastAlert simply get a count of the number of documents that have occurred in between each query. To do this, set ``use_count_query: true``. This cannot be used if you use ``query_key``, because ElastAlert will not know the contents of each documents, just the total number of them. This also reduces the precision of alerts, because all events that occur between each query will be rounded to a single timestamp. If you are using ``query_key`` (a single key, not multiple keys) you can use ``use_terms_query``. This will make ElastAlert perform a terms aggregation to get the counts for each value of a certain field. Both ``use_terms_query`` and ``use_count_query`` also require ``doc_type`` to be set to the ``_type`` of the documents. They may not be compatible with all rule types. From 06880b0d609592dff9865a04b88ad0c261580be9 Mon Sep 17 00:00:00 2001 From: Kat Kasianenko Date: Mon, 3 May 2021 10:41:09 +1000 Subject: [PATCH 13/83] Fix UnicodeEncodeError in PagerDutyAlerter --- elastalert/alerts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index f2f31853f..76772161a 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1416,7 +1416,7 @@ def alert(self, matches): try: response = requests.post( self.url, - data=json.dumps(payload, cls=DateTimeEncoder, ensure_ascii=False), + data=json.dumps(payload, cls=DateTimeEncoder, ensure_ascii=False).encode("utf-8"), headers=headers, proxies=proxies ) From 1143af15499f02e14561a5de23073d0e312a6916 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Thu, 23 Sep 2021 23:42:08 +0900 Subject: [PATCH 14/83] Fix travis-ci job error --- requirements.txt | 1 + setup.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 9c32052d0..17e805d46 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,6 +5,7 @@ boto3>=1.4.4 cffi>=1.11.5 configparser>=3.5.0 croniter>=0.3.16 +cryptography<3.4 elasticsearch>=7.0.0 envparse>=0.2.0 exotel>=0.1.3 diff --git a/setup.py b/setup.py index 2845836a7..56f053c2c 100644 --- a/setup.py +++ b/setup.py @@ -47,6 +47,7 @@ 'stomp.py>=4.1.17', 'texttable>=0.8.8', 'twilio>=6.0.0,<6.1', - 'cffi>=1.11.5' + 'cffi>=1.11.5', + 'cryptography<3.4' ] ) From 75d670ace405f89eb813e62e00581d9ee7c71e1b Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Thu, 23 Sep 2021 23:57:16 +0900 Subject: [PATCH 15/83] Fix docker test --- Dockerfile-test | 2 ++ Makefile | 2 +- requirements-dev.txt | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/Dockerfile-test b/Dockerfile-test index 3c153e644..fb8a78409 100644 --- a/Dockerfile-test +++ b/Dockerfile-test @@ -1,6 +1,8 @@ FROM ubuntu:latest RUN apt-get update && apt-get upgrade -y +RUN apt-get install software-properties-common -y +RUN add-apt-repository ppa:deadsnakes/ppa RUN apt-get -y install build-essential python3.6 python3.6-dev python3-pip libssl-dev git WORKDIR /home/elastalert diff --git a/Makefile b/Makefile index 470062ce8..608c0bb8b 100644 --- a/Makefile +++ b/Makefile @@ -21,7 +21,7 @@ test-elasticsearch: test-docker: docker-compose --project-name elastalert build tox - docker-compose --project-name elastalert run tox + docker-compose --project-name elastalert run --rm tox clean: make -C docs clean diff --git a/requirements-dev.txt b/requirements-dev.txt index 558761d9e..d15887c01 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,4 +6,4 @@ pylint<1.4 pytest<3.3.0 setuptools sphinx_rtd_theme -tox<2.0 +tox==3.20.1 From b795543735e85021127570aa4b726cfa4cef4e39 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:00:44 +0900 Subject: [PATCH 16/83] Fix test_rule.py --- elastalert/test_rule.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/elastalert/test_rule.py b/elastalert/test_rule.py index 06100aa0f..af1eaa497 100644 --- a/elastalert/test_rule.py +++ b/elastalert/test_rule.py @@ -83,7 +83,7 @@ def test_file(self, conf, args): # Get one document for schema try: - res = es_client.search(index, size=1, body=query, ignore_unavailable=True) + res = es_client.search(index=index, size=1, body=query, ignore_unavailable=True) except Exception as e: print("Error running your filter:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) @@ -109,7 +109,7 @@ def test_file(self, conf, args): five=conf['five'] ) try: - res = es_client.count(index, doc_type=doc_type, body=count_query, ignore_unavailable=True) + res = es_client.count(index=index, doc_type=doc_type, body=count_query, ignore_unavailable=True) except Exception as e: print("Error querying Elasticsearch:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) @@ -153,7 +153,7 @@ def test_file(self, conf, args): # Download up to max_query_size (defaults to 10,000) documents to save if (args.save or args.formatted_output) and not args.count: try: - res = es_client.search(index, size=args.max_query_size, body=query, ignore_unavailable=True) + res = es_client.search(index=index, size=args.max_query_size, body=query, ignore_unavailable=True) except Exception as e: print("Error running your filter:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) From 455604a612ac75d05977c2f80612df29339815bd Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:01:47 +0900 Subject: [PATCH 17/83] Fix Stomp --- elastalert/alerts.py | 1 - 1 file changed, 1 deletion(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index f2f31853f..46ac4cd4e 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -371,7 +371,6 @@ def alert(self, matches): conn = stomp.Connection([(self.stomp_hostname, self.stomp_hostport)], use_ssl=self.stomp_ssl) - conn.start() conn.connect(self.stomp_login, self.stomp_password) # Ensures that the CONNECTED frame is received otherwise, the disconnect call will fail. time.sleep(1) From 2a6f846152e67c4543760267dc52f930d81db696 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:03:34 +0900 Subject: [PATCH 18/83] Fix PagerTree --- elastalert/loaders.py | 1 + 1 file changed, 1 insertion(+) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 771194768..496b418a8 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -68,6 +68,7 @@ class RulesLoader(object): 'slack': alerts.SlackAlerter, 'mattermost': alerts.MattermostAlerter, 'pagerduty': alerts.PagerDutyAlerter, + 'pagertree': alerts.PagerTreeAlerter, 'exotel': alerts.ExotelAlerter, 'twilio': alerts.TwilioAlerter, 'victorops': alerts.VictorOpsAlerter, From db30b9cba733beac666fa45b271b866e9a3a8c8e Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:05:29 +0900 Subject: [PATCH 19/83] Fix LineNotify --- elastalert/loaders.py | 1 + 1 file changed, 1 insertion(+) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 496b418a8..f0351ab7f 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -78,6 +78,7 @@ class RulesLoader(object): 'servicenow': alerts.ServiceNowAlerter, 'alerta': alerts.AlertaAlerter, 'post': alerts.HTTPPostAlerter, + 'linenotify': alerts.LineNotifyAlerter, 'hivealerter': alerts.HiveAlerter } From 27f6800600bf1a0c6e8eab1c63ca048a002a2612 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:08:46 +0900 Subject: [PATCH 20/83] Update Docs for SNS --- docs/source/ruletypes.rst | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index ff3763712..79ff9dbf6 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1670,13 +1670,22 @@ SNS requires one option: Optional: -``aws_access_key``: An access key to connect to SNS with. +``aws_access_key_id``: An access key to connect to SNS with. -``aws_secret_key``: The secret key associated with the access key. +``aws_secret_access_key``: The secret key associated with the access key. ``aws_region``: The AWS region in which the SNS resource is located. Default is us-east-1 -``profile``: The AWS profile to use. If none specified, the default will be used. +``aws_profile``: The AWS profile to use. If none specified, the default will be used. + +Example usage:: + + alert: + - sns: + aws_region: 'us-east-1' # You must nest aws_region within your alert configuration so it is not used to sign AWS requests. + sns_topic_arn: 'arn:aws:sns:us-east-1:123456789:somesnstopic' + aws_access_key_id: 'XXXXXXXXXXXXXXXXXX'' + aws_secret_access_key: 'YYYYYYYYYYYYYYYYYYYY' HipChat ~~~~~~~ From a1dfc45babb8d4ea722cbeff1985e840ed034746 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:11:53 +0900 Subject: [PATCH 21/83] Fix Zabbix(Docs & schema.yaml) --- docs/source/ruletypes.rst | 2 +- elastalert/schema.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 79ff9dbf6..3f3065c81 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2251,4 +2251,4 @@ Required: ``zbx_sender_host``: The address where zabbix server is running. ``zbx_sender_port``: The port where zabbix server is listenning. ``zbx_host``: This field setup the host in zabbix that receives the value sent by Elastalert. -``zbx_item``: This field setup the item in the host that receives the value sent by Elastalert. +``zbx_key``: This field setup the key in the host that receives the value sent by Elastalert. diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 1241315dc..3473b9575 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -386,4 +386,4 @@ properties: zbx_sender_host: {type: string} zbx_sender_port: {type: integer} zbx_host: {type: string} - zbx_item: {type: string} + zbx_key: {type: string} From c7e5f4e771e4c0c753737892fe57e19408660090 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:13:41 +0900 Subject: [PATCH 22/83] Add tzlocal<3.0 --- requirements.txt | 1 + setup.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 17e805d46..5d12feaa6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -21,3 +21,4 @@ requests>=2.0.0 stomp.py>=4.1.17 texttable>=0.8.8 twilio==6.0.0 +tzlocal<3.0 \ No newline at end of file diff --git a/setup.py b/setup.py index 56f053c2c..4e60fbf5c 100644 --- a/setup.py +++ b/setup.py @@ -48,6 +48,7 @@ 'texttable>=0.8.8', 'twilio>=6.0.0,<6.1', 'cffi>=1.11.5', - 'cryptography<3.4' + 'cryptography<3.4', + 'tzlocal<3.0' ] ) From 55ff06fa07aa9dfd90d4e4bae8d7704f55db51a7 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:15:51 +0900 Subject: [PATCH 23/83] Change Library blist to sortedcontainers --- elastalert/ruletypes.py | 2 +- requirements.txt | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 2f1d2f82c..7a889e80a 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -3,7 +3,7 @@ import datetime import sys -from blist import sortedlist +from sortedcontainers import SortedKeyList as sortedlist from .util import add_raw_postfix from .util import dt_to_ts diff --git a/requirements.txt b/requirements.txt index 5d12feaa6..c8066fb5b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ apscheduler>=3.3.0 aws-requests-auth>=0.3.0 -blist>=1.3.6 +sortedcontainers>=2.2.2 boto3>=1.4.4 cffi>=1.11.5 configparser>=3.5.0 diff --git a/setup.py b/setup.py index 4e60fbf5c..7584315e1 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,7 @@ install_requires=[ 'apscheduler>=3.3.0', 'aws-requests-auth>=0.3.0', - 'blist>=1.3.6', + 'sortedcontainers>=2.2.2', 'boto3>=1.4.4', 'configparser>=3.5.0', 'croniter>=0.3.16', From e1e0b43f5d268ee8ce5bc42c89d5fc96f695eef6 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:18:41 +0900 Subject: [PATCH 24/83] Remove Duplicate Key in Schema YAML --- elastalert/schema.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 3473b9575..6dd03cb59 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -362,7 +362,6 @@ properties: alerta_origin: {type: string} # Python format string alerta_group: {type: string} # Python format string alerta_service: {type: array, items: {type: string}} # Python format string - alerta_service: {type: array, items: {type: string}} # Python format string alerta_correlate: {type: array, items: {type: string}} # Python format string alerta_tags: {type: array, items: {type: string}} # Python format string alerta_event: {type: string} # Python format string From f2e955bb87cb99fb6fb6694f17accf7838c340c2 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:22:05 +0900 Subject: [PATCH 25/83] Typo in example_rules/ssh.yaml --- example_rules/ssh.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/example_rules/ssh.yaml b/example_rules/ssh.yaml index 7af890784..a7147217b 100644 --- a/example_rules/ssh.yaml +++ b/example_rules/ssh.yaml @@ -1,5 +1,5 @@ # Rule name, must be unique - name: SSH abuse (ElastAlert 3.0.1) - 2 +name: SSH abuse (ElastAlert 3.0.1) - 2 # Alert on x events in y seconds type: frequency From 28d36ce7cc6a535735fab7a8e03dbe2fcf48f39e Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:23:44 +0900 Subject: [PATCH 26/83] fix ruletypes.rst typo --- docs/source/ruletypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 3f3065c81..754c57964 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -938,7 +938,7 @@ Optional: ``field_value``: When set, uses the value of the field in the document and not the number of matching documents. This is useful to monitor for example a temperature sensor and raise an alarm if the temperature grows too fast. Note that the means of the field on the reference and current windows are used to determine if the ``spike_height`` value is reached. -Note also that the threshold parameters are ignored in this smode. +Note also that the threshold parameters are ignored in this mode. ``threshold_ref``: The minimum number of events that must exist in the reference window for an alert to trigger. For example, if From d986d8b658e0685a138cf9bb47498af37aa2fee4 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:26:11 +0900 Subject: [PATCH 27/83] Fixed the logging property in config.yaml.example --- config.yaml.example | 60 ++++++++++++++++++++++----------------------- 1 file changed, 30 insertions(+), 30 deletions(-) diff --git a/config.yaml.example b/config.yaml.example index 9d9176382..cc659a75f 100644 --- a/config.yaml.example +++ b/config.yaml.example @@ -78,38 +78,38 @@ alert_time_limit: # logline: # format: '%(asctime)s %(levelname)+8s %(name)+20s %(message)s' # -# handlers: -# console: -# class: logging.StreamHandler -# formatter: logline -# level: DEBUG -# stream: ext://sys.stderr +# handlers: +# console: +# class: logging.StreamHandler +# formatter: logline +# level: DEBUG +# stream: ext://sys.stderr # -# file: -# class : logging.FileHandler -# formatter: logline -# level: DEBUG -# filename: elastalert.log +# file: +# class : logging.FileHandler +# formatter: logline +# level: DEBUG +# filename: elastalert.log # -# loggers: -# elastalert: -# level: WARN -# handlers: [] -# propagate: true +# loggers: +# elastalert: +# level: WARN +# handlers: [] +# propagate: true # -# elasticsearch: -# level: WARN -# handlers: [] -# propagate: true +# elasticsearch: +# level: WARN +# handlers: [] +# propagate: true # -# elasticsearch.trace: -# level: WARN -# handlers: [] -# propagate: true +# elasticsearch.trace: +# level: WARN +# handlers: [] +# propagate: true # -# '': # root logger -# level: WARN -# handlers: -# - console -# - file -# propagate: false +# '': # root logger +# level: WARN +# handlers: +# - console +# - file +# propagate: false From be95b4c54db3d29600d88594fd8c499e9109b9f6 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:32:07 +0900 Subject: [PATCH 28/83] add opsgenie_addr to docs --- docs/source/ruletypes.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 754c57964..6a5dc8950 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1630,6 +1630,7 @@ Optional: ``opsgenie_account``: The OpsGenie account to integrate with. +``opsgenie_addr``: The OpsGenie URL to to connect against, default is ``https://api.opsgenie.com/v2/alerts``. If using the EU instance of Opsgenie, the URL needs to be ``https://api.eu.opsgenie.com/v2/alerts`` for requests to be successful. ``opsgenie_recipients``: A list OpsGenie recipients who will be notified by the alert. ``opsgenie_recipients_args``: Map of arguments used to format opsgenie_recipients. ``opsgenie_default_recipients``: List of default recipients to notify when the formatting of opsgenie_recipients is unsuccesful. From bacef7dd3bd00517f63e832fc021061bc37f4d34 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:36:00 +0900 Subject: [PATCH 29/83] added squadcast to README --- docs/source/ruletypes.rst | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 6a5dc8950..acb96af6f 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1688,6 +1688,21 @@ Example usage:: aws_access_key_id: 'XXXXXXXXXXXXXXXXXX'' aws_secret_access_key: 'YYYYYYYYYYYYYYYYYYYY' +Squadcast +~~~~~~~~~ + +Alerts can be sent to Squadcast using the `http post` method described above and Squadcast will process it and send Phone, SMS, Email and Push notifications to the relevant person(s) and let them take actions. + +Configuration variables in rules YAML file:: + + alert: post + http_post_url: + http_post_static_payload: + Title: + http_post_all_values: true + +For more details, you can refer the `Squadcast documentation `_. + HipChat ~~~~~~~ From 67e63ff8838ffa60eb7c34186b8e54eb8a538548 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:38:31 +0900 Subject: [PATCH 30/83] Remove duplicate property in example config file --- config.yaml.example | 1 - 1 file changed, 1 deletion(-) diff --git a/config.yaml.example b/config.yaml.example index cc659a75f..89db954be 100644 --- a/config.yaml.example +++ b/config.yaml.example @@ -48,7 +48,6 @@ es_port: 9200 # Use SSL authentication with client certificates client_cert must be # a pem file containing both cert and key for client -#verify_certs: True #ca_certs: /path/to/cacert.pem #client_cert: /path/to/client_cert.pem #client_key: /path/to/client_key.key From 92ccda954df39eb50d76be70f7a9825e55a09a3a Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:42:05 +0900 Subject: [PATCH 31/83] Remove new_style_string_format --- elastalert/alerts.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 46ac4cd4e..fca3dac4f 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -897,10 +897,6 @@ def __init__(self, *args): logging.warning('Warning! You could be vulnerable to shell injection!') self.rule['command'] = [self.rule['command']] - self.new_style_string_format = False - if 'new_style_string_format' in self.rule and self.rule['new_style_string_format']: - self.new_style_string_format = True - def alert(self, matches): # Format the command and arguments try: From fc5afa61965004703826a4c903f67941ce01ceb6 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:49:24 +0900 Subject: [PATCH 32/83] Sync requirements.txt and setup.py --- requirements.txt | 8 ++++---- setup.py | 5 +++-- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/requirements.txt b/requirements.txt index c8066fb5b..49512f615 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,10 +6,10 @@ cffi>=1.11.5 configparser>=3.5.0 croniter>=0.3.16 cryptography<3.4 -elasticsearch>=7.0.0 +elasticsearch==7.0.0 envparse>=0.2.0 exotel>=0.1.3 -jira>=1.0.10,<1.0.15 +jira>=2.0.0 jsonschema>=3.0.2 mock>=2.0.0 prison>=0.1.2 @@ -17,8 +17,8 @@ py-zabbix==1.1.3 PyStaticConfiguration>=0.10.3 python-dateutil>=2.6.0,<2.7.0 PyYAML>=5.1 -requests>=2.0.0 +requests>=2.10.0 stomp.py>=4.1.17 texttable>=0.8.8 -twilio==6.0.0 +twilio>=6.0.0,<6.58 tzlocal<3.0 \ No newline at end of file diff --git a/setup.py b/setup.py index 7584315e1..33ee67784 100644 --- a/setup.py +++ b/setup.py @@ -42,11 +42,12 @@ 'prison>=0.1.2', 'PyStaticConfiguration>=0.10.3', 'python-dateutil>=2.6.0,<2.7.0', - 'PyYAML>=3.12', + 'PyYAML>=5.1', + 'py-zabbix==1.1.3', 'requests>=2.10.0', 'stomp.py>=4.1.17', 'texttable>=0.8.8', - 'twilio>=6.0.0,<6.1', + 'twilio>=6.0.0,<6.58', 'cffi>=1.11.5', 'cryptography<3.4', 'tzlocal<3.0' From 16b74fe2b29c926ce988476ca385a19a9a877f6a Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:51:26 +0900 Subject: [PATCH 33/83] remove alerta_new_style_string_format --- elastalert/schema.yaml | 1 - tests/alerts_test.py | 1 - 2 files changed, 2 deletions(-) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 6dd03cb59..120faa964 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -371,7 +371,6 @@ properties: alerta_value: {type: string} # Python format string alerta_attributes_keys: {type: array, items: {type: string}} alerta_attributes_values: {type: array, items: {type: string}} # Python format string - alerta_new_style_string_format: {type: boolean} ### Simple diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 5cd61ae75..226cc89f2 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -2534,7 +2534,6 @@ def test_alerta_new_style(ea): 'alerta_severity': "debug", 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", 'alerta_value': "UP", - 'alerta_new_style_string_format': True, 'type': 'any', 'alerta_use_match_timestamp': True, 'alert': 'alerta' From 203ff79ef03bd9a1674c2eec2db0a5b00c9488f1 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 00:55:03 +0900 Subject: [PATCH 34/83] Fix initializing self.thread_data.alerts_sent for running elastalert-test-rule --- elastalert/elastalert.py | 1 + 1 file changed, 1 insertion(+) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index b078c86db..24c9f884f 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -159,6 +159,7 @@ def __init__(self, args): self.starttime = self.args.start self.disabled_rules = [] self.replace_dots_in_field_names = self.conf.get('replace_dots_in_field_names', False) + self.thread_data.alerts_sent = 0 self.thread_data.num_hits = 0 self.thread_data.num_dupes = 0 self.scheduler = BackgroundScheduler() From abce867f2ca971b83d5420802c8566d606581759 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 03:08:50 +0900 Subject: [PATCH 35/83] TheHive alerter: Allow severity and tlp to be set by rule --- elastalert/alerts.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index fca3dac4f..89ef8f807 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2151,7 +2151,10 @@ def alert(self, matches): n += 1 custom_fields[cf_key] = cf elif isinstance(alert_config_value, str): - alert_config[alert_config_field] = alert_config_value.format(**context) + alert_value = alert_config_value.format(**context) + if alert_config_field in ['severity', 'tlp']: + alert_value = int(alert_value) + alert_config[alert_config_field] = alert_value elif isinstance(alert_config_value, (list, tuple)): formatted_list = [] for element in alert_config_value: From 579bac575c5b63fc2d03209d7bc55cecdeffbee1 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 03:11:11 +0900 Subject: [PATCH 36/83] Fix opsgenie_default_receipients to docs --- docs/source/ruletypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index acb96af6f..5cd8eab1c 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1633,7 +1633,7 @@ Optional: ``opsgenie_addr``: The OpsGenie URL to to connect against, default is ``https://api.opsgenie.com/v2/alerts``. If using the EU instance of Opsgenie, the URL needs to be ``https://api.eu.opsgenie.com/v2/alerts`` for requests to be successful. ``opsgenie_recipients``: A list OpsGenie recipients who will be notified by the alert. ``opsgenie_recipients_args``: Map of arguments used to format opsgenie_recipients. -``opsgenie_default_recipients``: List of default recipients to notify when the formatting of opsgenie_recipients is unsuccesful. +``opsgenie_default_receipients``: List of default recipients to notify when the formatting of opsgenie_recipients is unsuccesful. ``opsgenie_teams``: A list of OpsGenie teams to notify (useful for schedules with escalation). ``opsgenie_teams_args``: Map of arguments used to format opsgenie_teams (useful for assigning the alerts to teams based on some data) ``opsgenie_default_teams``: List of default teams to notify when the formatting of opsgenie_teams is unsuccesful. From 7962a2eb73057967e0a610b728833ea41bb17fa2 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 03:14:10 +0900 Subject: [PATCH 37/83] added docs opsgenie_proxy --- docs/source/ruletypes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 5cd8eab1c..485809250 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1651,6 +1651,8 @@ Optional: ``opsgenie_details``: Map of custom key/value pairs to include in the alert's details. The value can sourced from either fields in the first match, environment variables, or a constant value. +``opsgenie_proxy``: By default ElastAlert will not use a network proxy to send notifications to OpsGenie. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. + Example usage:: opsgenie_details: From 22fc5b1feda572a20fc19ceb4e73e57a1fb85620 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 03:15:15 +0900 Subject: [PATCH 38/83] added docs pagertree_proxy --- docs/source/ruletypes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 485809250..2c38bffcc 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1951,6 +1951,8 @@ The alerter requires the following options: ``pagertree_integration_url``: URL generated by PagerTree for the integration. +``pagertree_proxy``: By default ElastAlert will not use a network proxy to send notifications to PagerTree. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. + Exotel ~~~~~~ From f06f86fb0925eafa9e23800c6356aa0a9b2e670c Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 03:17:13 +0900 Subject: [PATCH 39/83] added docs telegram_proxy_login/pass --- docs/source/ruletypes.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 2c38bffcc..20e94bd7d 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1869,6 +1869,10 @@ Optional: ``telegram_proxy``: By default ElastAlert will not use a network proxy to send notifications to Telegram. Set this option using ``hostname:port`` if you need to use a proxy. +``telegram_proxy_login``: The Telegram proxy auth username. + +``telegram_proxy_pass``: The Telegram proxy auth password. + GoogleChat ~~~~~~~~~~ GoogleChat alerter will send a notification to a predefined GoogleChat channel. The body of the notification is formatted the same as with other alerters. From 5eaf58b5b2674eaf5aacf7e8678b8bc6434a9e2c Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 03:18:23 +0900 Subject: [PATCH 40/83] added docs slack_ca_certs --- docs/source/ruletypes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 20e94bd7d..7c651e93a 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1823,6 +1823,8 @@ Provide absolute address of the pciture, for example: http://some.address.com/im ``slack_kibana_discover_title``: The title of the Kibana Discover url attachment. Defaults to ``Discover in Kibana``. +``slack_ca_certs``: Set this option to ``True`` if you want to validate the SSL certificate. + Mattermost ~~~~~~~~~~ From 9ae36a4880cc2c7e1105f533b11fbd2ade5e3ef3 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 03:19:10 +0900 Subject: [PATCH 41/83] added docs slack_ignore_ssl_errors --- docs/source/ruletypes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 7c651e93a..d04e429ff 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1825,6 +1825,8 @@ Provide absolute address of the pciture, for example: http://some.address.com/im ``slack_ca_certs``: Set this option to ``True`` if you want to validate the SSL certificate. +``slack_ignore_ssl_errors``: By default ElastAlert will verify SSL certificate. Set this option to ``False`` if you want to ignore SSL errors. + Mattermost ~~~~~~~~~~ From 9084fd44082912209dc242165000d4b79720df54 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 03:25:04 +0900 Subject: [PATCH 42/83] Fix opsgenie/zabbix/stomp for docs --- docs/source/ruletypes.rst | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index d04e429ff..0aa3bc3ce 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1631,12 +1631,19 @@ Optional: ``opsgenie_account``: The OpsGenie account to integrate with. ``opsgenie_addr``: The OpsGenie URL to to connect against, default is ``https://api.opsgenie.com/v2/alerts``. If using the EU instance of Opsgenie, the URL needs to be ``https://api.eu.opsgenie.com/v2/alerts`` for requests to be successful. + ``opsgenie_recipients``: A list OpsGenie recipients who will be notified by the alert. + ``opsgenie_recipients_args``: Map of arguments used to format opsgenie_recipients. + ``opsgenie_default_receipients``: List of default recipients to notify when the formatting of opsgenie_recipients is unsuccesful. + ``opsgenie_teams``: A list of OpsGenie teams to notify (useful for schedules with escalation). + ``opsgenie_teams_args``: Map of arguments used to format opsgenie_teams (useful for assigning the alerts to teams based on some data) + ``opsgenie_default_teams``: List of default teams to notify when the formatting of opsgenie_teams is unsuccesful. + ``opsgenie_tags``: A list of tags for this alert. ``opsgenie_message``: Set the OpsGenie message to something other than the rule name. The message can be formatted with fields from the first match e.g. "Error occurred for {app_name} at {timestamp}.". @@ -2080,12 +2087,20 @@ Stomp This alert type will use the STOMP protocol in order to push a message to a broker like ActiveMQ or RabbitMQ. The message body is a JSON string containing the alert details. The default values will work with a pristine ActiveMQ installation. -Optional: +The alerter requires the following option: ``stomp_hostname``: The STOMP host to use, defaults to localhost. + ``stomp_hostport``: The STOMP port to use, defaults to 61613. + ``stomp_login``: The STOMP login to use, defaults to admin. + ``stomp_password``: The STOMP password to use, defaults to admin. + +Optional: + +``stomp_ssl``: Connect the STOMP host using TLS, defaults to ``False``. + ``stomp_destination``: The STOMP destination to use, defaults to /queue/ALERT The stomp_destination field depends on the broker, the /queue/ALERT example is the nomenclature used by ActiveMQ. Each broker has its own logic. @@ -2277,6 +2292,9 @@ Zabbix will send notification to a Zabbix server. The item in the host specified Required: ``zbx_sender_host``: The address where zabbix server is running. + ``zbx_sender_port``: The port where zabbix server is listenning. + ``zbx_host``: This field setup the host in zabbix that receives the value sent by Elastalert. + ``zbx_key``: This field setup the key in the host that receives the value sent by Elastalert. From 63470276c81a18489952466bef3344069f2d1f52 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 03:26:15 +0900 Subject: [PATCH 43/83] added docs alerta_api_skip_ssl --- docs/source/ruletypes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 0aa3bc3ce..810e2d63c 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2125,6 +2125,8 @@ Optional: ``alerta_use_match_timestamp``: If true, it will use the timestamp of the first match as the ``createTime`` of the alert. otherwise, the current server time is used. +``alerta_api_skip_ssl``: Defaults to False. + ``alert_missing_value``: Text to replace any match field not found when formating strings. Defaults to ````. The following options dictate the values of the API JSON payload: From 042cba994d987dd4433d05bf3879ff6455665dee Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 03:27:13 +0900 Subject: [PATCH 44/83] added docs hive_verify --- docs/source/ruletypes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 810e2d63c..2160798df 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2255,6 +2255,8 @@ Optional: ``hive_proxies``: Proxy configuration. +``hive_verify``: Whether or not to enable SSL certificate validation. Defaults to False. + ``hive_observable_data_mapping``: If needed, matched data fields can be mapped to TheHive observable types using python string formatting. Example usage:: From 9b3861ebc68a2a37c0286fd4683e487db3410534 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 03:28:22 +0900 Subject: [PATCH 45/83] fix docs slack_timeout --- docs/source/ruletypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 2160798df..104fc27a7 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1822,7 +1822,7 @@ Provide absolute address of the pciture, for example: http://some.address.com/im ``slack_title_link``: You can add a link in your Slack notification by setting this to a valid URL. Requires slack_title to be set. -``slack_timeout``: You can specify a timeout value, in seconds, for making communicating with Slac. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. +``slack_timeout``: You can specify a timeout value, in seconds, for making communicating with Slack. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. ``slack_attach_kibana_discover_url``: Enables the attachment of the ``kibana_discover_url`` to the slack notification. The config ``generate_kibana_discover_url`` must also be ``True`` in order to generate the url. Defaults to ``False``. From bee23c48f018add805a92f9deb5e91f7792ece83 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 03:29:21 +0900 Subject: [PATCH 46/83] added docs jira_assignee --- docs/source/ruletypes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 104fc27a7..96f92b3d6 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1528,6 +1528,8 @@ For an example JIRA account file, see ``example_rules/jira_acct.yaml``. The acco Optional: +``jira_assignee``: Assigns an issue to a user. + ``jira_component``: The name of the component or components to set the ticket to. This can be a single string or a list of strings. This is provided for backwards compatibility and will eventually be deprecated. It is preferable to use the plural ``jira_components`` instead. ``jira_components``: The name of the component or components to set the ticket to. This can be a single string or a list of strings. From b30d228ca30e0391ef54c7fbdd442f8a9446a81b Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 03:36:42 +0900 Subject: [PATCH 47/83] Fix for the mapping error reported --- elastalert/es_mappings/6/elastalert.json | 1 + 1 file changed, 1 insertion(+) diff --git a/elastalert/es_mappings/6/elastalert.json b/elastalert/es_mappings/6/elastalert.json index 645a67762..2cc97bcfb 100644 --- a/elastalert/es_mappings/6/elastalert.json +++ b/elastalert/es_mappings/6/elastalert.json @@ -29,6 +29,7 @@ "format": "dateOptionalTime" }, "match_body": { + "enabled": "false", "type": "object" }, "aggregate_id": { From effce139a9a05a725c0bc628ffb8cc487d470ba2 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 04:00:01 +0900 Subject: [PATCH 48/83] Update docs --- README.md | 25 +++++++++++++++--------- docs/source/elastalert.rst | 25 ++++++++++++++++++------ docs/source/ruletypes.rst | 40 +++++++++++++++++++------------------- 3 files changed, 55 insertions(+), 35 deletions(-) diff --git a/README.md b/README.md index 99acc02e7..a7511046c 100644 --- a/README.md +++ b/README.md @@ -39,22 +39,29 @@ Several rule types with common monitoring paradigms are included with ElastAlert Currently, we have built-in support for the following alert types: -- Email -- JIRA +- E-mail +- Jira - OpsGenie -- Commands +- Command - HipChat -- MS Teams +- Stride +- Microsoft Teams - Slack +- Mattermost - Telegram -- GoogleChat -- AWS SNS -- VictorOps +- Google Chat +- Amazon Simple Notification Service (AWS SNS) +- Splunk On-Call (Formerly VictorOps) - PagerDuty - PagerTree - Exotel - Twilio - Gitter +- ServiceNow +- Debug +- Stomp +- Alerta +- HTTP POST - Line Notify - Zabbix @@ -115,13 +122,13 @@ A [Dockerized version](https://github.com/bitsensor/elastalert) of ElastAlert in ```bash git clone https://github.com/bitsensor/elastalert.git; cd elastalert -docker run -d -p 3030:3030 \ +docker run -d -p 3030:3030 -p 3333:3333 \ -v `pwd`/config/elastalert.yaml:/opt/elastalert/config.yaml \ -v `pwd`/config/config.json:/opt/elastalert-server/config/config.json \ -v `pwd`/rules:/opt/elastalert/rules \ -v `pwd`/rule_templates:/opt/elastalert/rule_templates \ --net="host" \ - --name elastalert bitsensor/elastalert:latest + --name elastalert bitsensor/elastalert:3.0.0-beta.1 ``` ## Documentation diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index b1008c3c4..f93cac96d 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -31,18 +31,31 @@ Several rule types with common monitoring paradigms are included with ElastAlert Currently, we have support built in for these alert types: -- Command -- Email -- JIRA +- E-mail +- Jira - OpsGenie -- SNS +- Command - HipChat +- Stride +- Microsoft Teams - Slack +- Mattermost - Telegram -- GoogleChat +- Google Chat +- Amazon Simple Notification Service (AWS SNS) +- Splunk On-Call (Formerly VictorOps) +- PagerDuty +- PagerTree +- Exotel +- Twilio +- Gitter +- ServiceNow - Debug - Stomp -- TheHive +- Alerta +- HTTP POST +- Line Notify +- Zabbix Additional rule types and alerts can be easily imported or written. (See :ref:`Writing rule types ` and :ref:`Writing alerts `) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 96f92b3d6..51e184e39 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -140,7 +140,7 @@ Rule Configuration Cheat Sheet +----------------------------------------------------+--------+-----------+-----------+--------+-----------+-------+----------+--------+-----------+ | ``ignore_null`` (boolean, no default) | | | Req | Req | | | | | | +----------------------------------------------------+--------+-----------+-----------+--------+-----------+-------+----------+--------+-----------+ -| ``query_key`` (string, no default) | Opt | | | Req | Opt | Opt | Opt | Req | Opt | +| ``query_key`` (string or list, no default) | Opt | | | Req | Opt | Opt | Opt | Req | Opt | +----------------------------------------------------+--------+-----------+-----------+--------+-----------+-------+----------+--------+-----------+ | ``aggregation_key`` (string, no default) | Opt | | | | | | | | | +----------------------------------------------------+--------+-----------+-----------+--------+-----------+-------+----------+--------+-----------+ @@ -160,7 +160,7 @@ Rule Configuration Cheat Sheet | | | | | | | | | | | |``doc_type`` (string, no default) | | | | | | | | | | | | | | | | | | | | | -|``query_key`` (string, no default) | | | | | | | | | | +|``query_key`` (string or list, no default) | | | | | | | | | | | | | | | | | | | | | |``terms_size`` (int, default 50) | | | | | | | | | | +----------------------------------------------------+--------+-----------+-----------+--------+-----------+-------+----------+--------+-----------+ @@ -1622,7 +1622,7 @@ OpsGenie alerter will create an alert which can be used to notify Operations peo integration must be created in order to acquire the necessary ``opsgenie_key`` rule variable. Currently the OpsGenieAlerter only creates an alert, however it could be extended to update or close existing alerts. -It is necessary for the user to create an OpsGenie Rest HTTPS API `integration page `_ in order to create alerts. +It is necessary for the user to create an OpsGenie Rest HTTPS API `integration page `_ in order to create alerts. The OpsGenie alert requires one option: @@ -1669,8 +1669,8 @@ Example usage:: Environment: '$VAR' # environment variable Message: { field: message } # field in the first match -SNS -~~~ +Amazon Simple Notification Service (AWS SNS) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The SNS alerter will send an SNS notification. The body of the notification is formatted the same as with other alerters. The SNS alerter uses boto3 and can use credentials in the rule yaml, in a standard AWS credential and config files, or @@ -1771,10 +1771,10 @@ The alerter requires the following two options: ``stride_proxy``: By default ElastAlert will not use a network proxy to send notifications to Stride. Set this option using ``hostname:port`` if you need to use a proxy. -MS Teams -~~~~~~~~ +Microsoft Teams +~~~~~~~~~~~~~~~ -MS Teams alerter will send a notification to a predefined Microsoft Teams channel. +Microsoft Teams alerter will send a notification to a predefined Microsoft Teams channel. The alerter requires the following options: @@ -1872,7 +1872,7 @@ Telegram alerter will send a notification to a predefined Telegram username or c The alerter requires the following two options: -``telegram_bot_token``: The token is a string along the lines of ``110201543:AAHdqTcvCH1vGWJxfSeofSAs0K5PALDsaw`` that will be required to authorize the bot and send requests to the Bot API. You can learn about obtaining tokens and generating new ones in this document https://core.telegram.org/bots#botfather +``telegram_bot_token``: The token is a string along the lines of ``110201543:AAHdqTcvCH1vGWJxfSeofSAs0K5PALDsaw`` that will be required to authorize the bot and send requests to the Bot API. You can learn about obtaining tokens and generating new ones in this document https://core.telegram.org/bots#6-botfather ``telegram_room_id``: Unique identifier for the target chat or username of the target channel using telegram chat_id (in the format "-xxxxxxxx") @@ -1937,7 +1937,7 @@ V2 API Options (Optional): These options are specific to the PagerDuty V2 API -See https://v2.developer.pagerduty.com/docs/send-an-event-events-api-v2 +See https://developer.pagerduty.com/docs/events-api-v2/trigger-events/ ``pagerduty_api_version``: Defaults to `v1`. Set to `v2` to enable the PagerDuty V2 Event API. @@ -1981,7 +1981,7 @@ The alerter requires the following option: ``exotel_auth_token``: Auth token assosiated with your Exotel account. -If you don't know how to find your accound sid and auth token, refer - http://support.exotel.in/support/solutions/articles/3000023019-how-to-find-my-exotel-token-and-exotel-sid- +If you don't know how to find your accound sid and auth token, refer - https://support.exotel.com/support/solutions/articles/3000023019-how-to-find-my-exotel-token-and-exotel-sid ``exotel_to_number``: The phone number where you would like send the notification. @@ -2008,26 +2008,26 @@ The alerter requires the following option: ``twilio_from_number``: Your twilio phone number from which message will be sent. -VictorOps -~~~~~~~~~ +Splunk On-Call (Formerly VictorOps) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -VictorOps alerter will trigger an incident to a predefined VictorOps routing key. The body of the notification is formatted the same as with other alerters. +Splunk On-Call (Formerly VictorOps) alerter will trigger an incident to a predefined Splunk On-Call (Formerly VictorOps) routing key. The body of the notification is formatted the same as with other alerters. The alerter requires the following options: ``victorops_api_key``: API key generated under the 'REST Endpoint' in the Integrations settings. -``victorops_routing_key``: VictorOps routing key to route the alert to. +``victorops_routing_key``: Splunk On-Call (Formerly VictorOps) routing key to route the alert to. -``victorops_message_type``: VictorOps field to specify severity level. Must be one of the following: INFO, WARNING, ACKNOWLEDGEMENT, CRITICAL, RECOVERY +``victorops_message_type``: Splunk On-Call (Formerly VictorOps) field to specify severity level. Must be one of the following: INFO, WARNING, ACKNOWLEDGEMENT, CRITICAL, RECOVERY Optional: -``victorops_entity_id``: The identity of the incident used by VictorOps to correlate incidents throughout the alert lifecycle. If not defined, VictorOps will assign a random string to each alert. +``victorops_entity_id``: The identity of the incident used by Splunk On-Call (Formerly VictorOps) to correlate incidents throughout the alert lifecycle. If not defined, Splunk On-Call (Formerly VictorOps) will assign a random string to each alert. ``victorops_entity_display_name``: Human-readable name of alerting entity to summarize incidents without affecting the life-cycle workflow. -``victorops_proxy``: By default ElastAlert will not use a network proxy to send notifications to VictorOps. Set this option using ``hostname:port`` if you need to use a proxy. +``victorops_proxy``: By default ElastAlert will not use a network proxy to send notifications to Splunk On-Call (Formerly VictorOps). Set this option using ``hostname:port`` if you need to use a proxy. Gitter ~~~~~~ @@ -2052,7 +2052,7 @@ The ServiceNow alerter will create a ne Incident in ServiceNow. The body of the The alerter requires the following options: -``servicenow_rest_url``: The ServiceNow RestApi url, this will look like https://instancename.service-now.com/api/now/v1/table/incident +``servicenow_rest_url``: The ServiceNow RestApi url, this will look like https://developer.servicenow.com/dev.do#!/reference/api/orlando/rest/c_TableAPI#r_TableAPI-POST ``username``: The ServiceNow Username to access the api. @@ -2111,7 +2111,7 @@ Alerta ~~~~~~ Alerta alerter will post an alert in the Alerta server instance through the alert API endpoint. -See http://alerta.readthedocs.io/en/latest/api/alert.html for more details on the Alerta JSON format. +See https://docs.alerta.io/en/latest/api/alert.html for more details on the Alerta JSON format. For Alerta 5.0 From 7fde8af57df993e8c46f79faff2764326033dd04 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 04:07:10 +0900 Subject: [PATCH 49/83] Kibana Discover app link 7.4-7.15 support --- docs/source/ruletypes.rst | 2 +- elastalert/kibana_discover.py | 2 +- elastalert/schema.yaml | 2 +- tests/kibana_discover_test.py | 19 ++++++++++++++++++- 4 files changed, 21 insertions(+), 4 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 51e184e39..9b9952761 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -553,7 +553,7 @@ The currently supported versions of Kibana Discover are: - `5.6` - `6.0`, `6.1`, `6.2`, `6.3`, `6.4`, `6.5`, `6.6`, `6.7`, `6.8` -- `7.0`, `7.1`, `7.2`, `7.3` +- `7.0`, `7.1`, `7.2`, `7.3`, `7.4`, `7.5`, `7.6`, `7.7`, `7.8`, `7.9`, `7.10`, `7.11`, `7.12`, `7.13`, `7.14`, `7.15` ``kibana_discover_version: '7.3'`` diff --git a/elastalert/kibana_discover.py b/elastalert/kibana_discover.py index 7e4dbb5d1..8e1f5c1e2 100644 --- a/elastalert/kibana_discover.py +++ b/elastalert/kibana_discover.py @@ -14,7 +14,7 @@ kibana_default_timedelta = datetime.timedelta(minutes=10) kibana5_kibana6_versions = frozenset(['5.6', '6.0', '6.1', '6.2', '6.3', '6.4', '6.5', '6.6', '6.7', '6.8']) -kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3']) +kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10', '7.11', '7.12', '7.13', '7.14', '7.15']) def generate_kibana_discover_url(rule, match): ''' Creates a link for a kibana discover app. ''' diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 120faa964..ed3c1ed9e 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -217,7 +217,7 @@ properties: ### Kibana Discover App Link generate_kibana_discover_url: {type: boolean} kibana_discover_app_url: {type: string, format: uri} - kibana_discover_version: {type: string, enum: ['7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} + kibana_discover_version: {type: string, enum: ['7.15', '7.14', '7.13', '7.12', '7.11', '7.10', '7.9', '7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} kibana_discover_index_pattern_id: {type: string, minLength: 1} kibana_discover_columns: {type: array, items: {type: string, minLength: 1}, minItems: 1} kibana_discover_from_timedelta: *timedelta diff --git a/tests/kibana_discover_test.py b/tests/kibana_discover_test.py index f06fe4e0c..837130c75 100644 --- a/tests/kibana_discover_test.py +++ b/tests/kibana_discover_test.py @@ -38,7 +38,24 @@ def test_generate_kibana_discover_url_with_kibana_5x_and_6x(kibana_version): assert url == expectedUrl -@pytest.mark.parametrize("kibana_version", ['7.0', '7.1', '7.2', '7.3']) +@pytest.mark.parametrize("kibana_version", [ + '7.0', + '7.1', + '7.2', + '7.3', + '7.4', + '7.5', + '7.6', + '7.7', + '7.8', + '7.9', + '7.10', + '7.11', + '7.12', + '7.13', + '7.14', + '7.15' +]) def test_generate_kibana_discover_url_with_kibana_7x(kibana_version): url = generate_kibana_discover_url( rule={ From e0c88e7f8587f0bd98afa154daef7c1525d0eca5 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 21:43:38 +0900 Subject: [PATCH 50/83] Remove hipchat integration --- README.md | 1 - docs/source/elastalert.rst | 1 - docs/source/ruletypes.rst | 37 ---------------- elastalert/alerts.py | 86 -------------------------------------- elastalert/loaders.py | 8 ---- elastalert/schema.yaml | 9 ---- tests/alerts_test.py | 58 ------------------------- 7 files changed, 200 deletions(-) diff --git a/README.md b/README.md index a7511046c..3d605c046 100644 --- a/README.md +++ b/README.md @@ -43,7 +43,6 @@ Currently, we have built-in support for the following alert types: - Jira - OpsGenie - Command -- HipChat - Stride - Microsoft Teams - Slack diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index f93cac96d..3c0e77055 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -35,7 +35,6 @@ Currently, we have support built in for these alert types: - Jira - OpsGenie - Command -- HipChat - Stride - Microsoft Teams - Slack diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 9b9952761..f1dcfb67b 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1714,43 +1714,6 @@ Configuration variables in rules YAML file:: For more details, you can refer the `Squadcast documentation `_. -HipChat -~~~~~~~ - -HipChat alerter will send a notification to a predefined HipChat room. The body of the notification is formatted the same as with other alerters. - -The alerter requires the following two options: - -``hipchat_auth_token``: The randomly generated notification token created by HipChat. Go to https://XXXXX.hipchat.com/account/api and use -'Create new token' section, choosing 'Send notification' in Scopes list. - -``hipchat_room_id``: The id associated with the HipChat room you want to send the alert to. Go to https://XXXXX.hipchat.com/rooms and choose -the room you want to post to. The room ID will be the numeric part of the URL. - -``hipchat_msg_color``: The color of the message background that is sent to HipChat. May be set to green, yellow or red. Default is red. - -``hipchat_domain``: The custom domain in case you have HipChat own server deployment. Default is api.hipchat.com. - -``hipchat_ignore_ssl_errors``: Ignore TLS errors (self-signed certificates, etc.). Default is false. - -``hipchat_proxy``: By default ElastAlert will not use a network proxy to send notifications to HipChat. Set this option using ``hostname:port`` if you need to use a proxy. - -``hipchat_notify``: When set to true, triggers a hipchat bell as if it were a user. Default is true. - -``hipchat_from``: When humans report to hipchat, a timestamp appears next to their name. For bots, the name is the name of the token. The from, instead of a timestamp, defaults to empty unless set, which you can do here. This is optional. - -``hipchat_message_format``: Determines how the message is treated by HipChat and rendered inside HipChat applications -html - Message is rendered as HTML and receives no special treatment. Must be valid HTML and entities must be escaped (e.g.: '&' instead of '&'). May contain basic tags: a, b, i, strong, em, br, img, pre, code, lists, tables. -text - Message is treated just like a message sent by a user. Can include @mentions, emoticons, pastes, and auto-detected URLs (Twitter, YouTube, images, etc). -Valid values: html, text. -Defaults to 'html'. - -``hipchat_mentions``: When using a ``html`` message format, it's not possible to mentions specific users using the ``@user`` syntax. -In that case, you can set ``hipchat_mentions`` to a list of users which will be first mentioned using a single text message, then the normal ElastAlert message will be sent to Hipchat. -If set, it will mention the users, no matter if the original message format is set to HTML or text. -Valid values: list of strings. -Defaults to ``[]``. - Stride ~~~~~~~ diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 89ef8f807..86dba8319 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -960,92 +960,6 @@ def alert(self, matches): elastalert_logger.info("Sent sns notification to %s" % (self.sns_topic_arn)) -class HipChatAlerter(Alerter): - """ Creates a HipChat room notification for each alert """ - required_options = frozenset(['hipchat_auth_token', 'hipchat_room_id']) - - def __init__(self, rule): - super(HipChatAlerter, self).__init__(rule) - self.hipchat_msg_color = self.rule.get('hipchat_msg_color', 'red') - self.hipchat_message_format = self.rule.get('hipchat_message_format', 'html') - self.hipchat_auth_token = self.rule['hipchat_auth_token'] - self.hipchat_room_id = self.rule['hipchat_room_id'] - self.hipchat_domain = self.rule.get('hipchat_domain', 'api.hipchat.com') - self.hipchat_ignore_ssl_errors = self.rule.get('hipchat_ignore_ssl_errors', False) - self.hipchat_notify = self.rule.get('hipchat_notify', True) - self.hipchat_from = self.rule.get('hipchat_from', '') - self.url = 'https://%s/v2/room/%s/notification?auth_token=%s' % ( - self.hipchat_domain, self.hipchat_room_id, self.hipchat_auth_token) - self.hipchat_proxy = self.rule.get('hipchat_proxy', None) - - def create_alert_body(self, matches): - body = super(HipChatAlerter, self).create_alert_body(matches) - - # HipChat sends 400 bad request on messages longer than 10000 characters - if self.hipchat_message_format == 'html': - # Use appropriate line ending for text/html - br = '
' - body = body.replace('\n', br) - - truncated_message = '
...(truncated)' - truncate_to = 10000 - len(truncated_message) - else: - truncated_message = '..(truncated)' - truncate_to = 10000 - len(truncated_message) - - if (len(body) > 9999): - body = body[:truncate_to] + truncated_message - - return body - - def alert(self, matches): - body = self.create_alert_body(matches) - - # Post to HipChat - headers = {'content-type': 'application/json'} - # set https proxy, if it was provided - proxies = {'https': self.hipchat_proxy} if self.hipchat_proxy else None - payload = { - 'color': self.hipchat_msg_color, - 'message': body, - 'message_format': self.hipchat_message_format, - 'notify': self.hipchat_notify, - 'from': self.hipchat_from - } - - try: - if self.hipchat_ignore_ssl_errors: - requests.packages.urllib3.disable_warnings() - - if self.rule.get('hipchat_mentions', []): - ping_users = self.rule.get('hipchat_mentions', []) - ping_msg = payload.copy() - ping_msg['message'] = "ping {}".format( - ", ".join("@{}".format(user) for user in ping_users) - ) - ping_msg['message_format'] = "text" - - response = requests.post( - self.url, - data=json.dumps(ping_msg, cls=DateTimeEncoder), - headers=headers, - verify=not self.hipchat_ignore_ssl_errors, - proxies=proxies) - - response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, - verify=not self.hipchat_ignore_ssl_errors, - proxies=proxies) - warnings.resetwarnings() - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to HipChat: %s" % e) - elastalert_logger.info("Alert sent to HipChat room %s" % self.hipchat_room_id) - - def get_info(self): - return {'type': 'hipchat', - 'hipchat_room_id': self.hipchat_room_id} - - class MsTeamsAlerter(Alerter): """ Creates a Microsoft Teams Conversation Message for each alert """ required_options = frozenset(['ms_teams_webhook_url', 'ms_teams_alert_summary']) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index f0351ab7f..c54942135 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -62,7 +62,6 @@ class RulesLoader(object): 'debug': alerts.DebugAlerter, 'command': alerts.CommandAlerter, 'sns': alerts.SnsAlerter, - 'hipchat': alerts.HipChatAlerter, 'stride': alerts.StrideAlerter, 'ms_teams': alerts.MsTeamsAlerter, 'slack': alerts.SlackAlerter, @@ -317,13 +316,6 @@ def _dt_to_ts_with_format(dt): rule.setdefault('client_cert', conf.get('client_cert')) rule.setdefault('client_key', conf.get('client_key')) - # Set HipChat options from global config - rule.setdefault('hipchat_msg_color', 'red') - rule.setdefault('hipchat_domain', 'api.hipchat.com') - rule.setdefault('hipchat_notify', True) - rule.setdefault('hipchat_from', '') - rule.setdefault('hipchat_ignore_ssl_errors', False) - # Make sure we have required options if self.required_locals - frozenset(list(rule.keys())): raise EAException('Missing required option(s): %s' % (', '.join(self.required_locals - frozenset(list(rule.keys()))))) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index ed3c1ed9e..7dadea47e 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -261,15 +261,6 @@ properties: jira_max_age: {type: number} jira_watchers: *arrayOfString - ### HipChat - hipchat_auth_token: {type: string} - hipchat_room_id: {type: [string, integer]} - hipchat_domain: {type: string} - hipchat_ignore_ssl_errors: {type: boolean} - hipchat_notify: {type: boolean} - hipchat_from: {type: string} - hipchat_mentions: {type: array, items: {type: string}} - ### Stride stride_access_token: {type: string} stride_cloud_id: {type: string} diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 226cc89f2..8e7280422 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -13,7 +13,6 @@ from elastalert.alerts import BasicMatchString from elastalert.alerts import CommandAlerter from elastalert.alerts import EmailAlerter -from elastalert.alerts import HipChatAlerter from elastalert.alerts import HTTPPostAlerter from elastalert.alerts import JiraAlerter from elastalert.alerts import JiraFormattedMatchString @@ -2363,63 +2362,6 @@ def test_stride_html(): mock_post_request.call_args_list[0][1]['data']) -def test_hipchat_body_size_limit_text(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'hipchat_auth_token': 'token', - 'hipchat_room_id': 'room_id', - 'hipchat_message_format': 'text', - 'alert_subject': 'Cool subject', - 'alert_text': 'Alert: we found something.\n\n{message}', - 'alert_text_type': 'alert_text_only', - 'alert': [], - 'alert_text_kw': { - '@timestamp': 'time', - 'message': 'message', - }, - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = HipChatAlerter(rule) - match = { - '@timestamp': '2018-01-01T00:00:00', - 'message': 'foo bar\n' * 5000, - } - body = alert.create_alert_body([match]) - - assert len(body) <= 10000 - - -def test_hipchat_body_size_limit_html(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'hipchat_auth_token': 'token', - 'hipchat_room_id': 'room_id', - 'hipchat_message_format': 'html', - 'alert_subject': 'Cool subject', - 'alert_text': 'Alert: we found something.\n\n{message}', - 'alert_text_type': 'alert_text_only', - 'alert': [], - 'alert_text_kw': { - '@timestamp': 'time', - 'message': 'message', - }, - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = HipChatAlerter(rule) - match = { - '@timestamp': '2018-01-01T00:00:00', - 'message': 'foo bar\n' * 5000, - } - - body = alert.create_alert_body([match]) - - assert len(body) <= 10000 - - def test_alerta_no_auth(ea): rule = { 'name': 'Test Alerta rule!', From fc4559f48304deac76a38ba38a7e4e272f871502 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 21:51:22 +0900 Subject: [PATCH 51/83] Remove Stride integration --- README.md | 1 - docs/source/elastalert.rst | 1 - docs/source/ruletypes.rst | 19 --- elastalert/alerts.py | 94 ------------- elastalert/loaders.py | 1 - elastalert/schema.yaml | 6 - tests/alerts_test.py | 278 ------------------------------------- 7 files changed, 400 deletions(-) diff --git a/README.md b/README.md index 3d605c046..dcce2e95c 100644 --- a/README.md +++ b/README.md @@ -43,7 +43,6 @@ Currently, we have built-in support for the following alert types: - Jira - OpsGenie - Command -- Stride - Microsoft Teams - Slack - Mattermost diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 3c0e77055..fd13b595c 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -35,7 +35,6 @@ Currently, we have support built in for these alert types: - Jira - OpsGenie - Command -- Stride - Microsoft Teams - Slack - Mattermost diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index f1dcfb67b..610bf4f9b 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1715,25 +1715,6 @@ Configuration variables in rules YAML file:: For more details, you can refer the `Squadcast documentation `_. -Stride -~~~~~~~ - -Stride alerter will send a notification to a predefined Stride room. The body of the notification is formatted the same as with other alerters. -Simple HTML such as and tags will be parsed into a format that Stride can consume. - -The alerter requires the following two options: - -``stride_access_token``: The randomly generated notification token created by Stride. - -``stride_cloud_id``: The site_id associated with the Stride site you want to send the alert to. - -``stride_conversation_id``: The conversation_id associated with the Stride conversation you want to send the alert to. - -``stride_ignore_ssl_errors``: Ignore TLS errors (self-signed certificates, etc.). Default is false. - -``stride_proxy``: By default ElastAlert will not use a network proxy to send notifications to Stride. Set this option using ``hostname:port`` if you need to use a proxy. - - Microsoft Teams ~~~~~~~~~~~~~~~ diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 86dba8319..6be5ea603 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -12,7 +12,6 @@ import warnings from email.mime.text import MIMEText from email.utils import formatdate -from html.parser import HTMLParser from smtplib import SMTP from smtplib import SMTP_SSL from smtplib import SMTPAuthenticationError @@ -1896,99 +1895,6 @@ def get_info(self): 'http_post_webhook_url': self.post_url} -class StrideHTMLParser(HTMLParser): - """Parse html into stride's fabric structure""" - - def __init__(self): - """ - Define a couple markup place holders. - """ - self.content = [] - self.mark = None - HTMLParser.__init__(self) - - def handle_starttag(self, tag, attrs): - """Identify and verify starting tag is fabric compatible.""" - if tag == 'b' or tag == 'strong': - self.mark = dict(type='strong') - if tag == 'u': - self.mark = dict(type='underline') - if tag == 'a': - self.mark = dict(type='link', attrs=dict(attrs)) - - def handle_endtag(self, tag): - """Clear mark on endtag.""" - self.mark = None - - def handle_data(self, data): - """Construct data node for our data.""" - node = dict(type='text', text=data) - if self.mark: - node['marks'] = [self.mark] - self.content.append(node) - - -class StrideAlerter(Alerter): - """ Creates a Stride conversation message for each alert """ - - required_options = frozenset( - ['stride_access_token', 'stride_cloud_id', 'stride_conversation_id']) - - def __init__(self, rule): - super(StrideAlerter, self).__init__(rule) - - self.stride_access_token = self.rule['stride_access_token'] - self.stride_cloud_id = self.rule['stride_cloud_id'] - self.stride_conversation_id = self.rule['stride_conversation_id'] - self.stride_ignore_ssl_errors = self.rule.get('stride_ignore_ssl_errors', False) - self.stride_proxy = self.rule.get('stride_proxy', None) - self.url = 'https://api.atlassian.com/site/%s/conversation/%s/message' % ( - self.stride_cloud_id, self.stride_conversation_id) - - def alert(self, matches): - body = self.create_alert_body(matches).strip() - - # parse body with StrideHTMLParser - parser = StrideHTMLParser() - parser.feed(body) - - # Post to Stride - headers = { - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(self.stride_access_token) - } - - # set https proxy, if it was provided - proxies = {'https': self.stride_proxy} if self.stride_proxy else None - - # build stride json payload - # https://developer.atlassian.com/cloud/stride/apis/document/structure/ - payload = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': parser.content} - ]} - ]}} - - try: - if self.stride_ignore_ssl_errors: - requests.packages.urllib3.disable_warnings() - response = requests.post( - self.url, data=json.dumps(payload, cls=DateTimeEncoder), - headers=headers, verify=not self.stride_ignore_ssl_errors, - proxies=proxies) - warnings.resetwarnings() - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to Stride: %s" % e) - elastalert_logger.info( - "Alert sent to Stride conversation %s" % self.stride_conversation_id) - - def get_info(self): - return {'type': 'stride', - 'stride_cloud_id': self.stride_cloud_id, - 'stride_converstation_id': self.stride_converstation_id} - - class LineNotifyAlerter(Alerter): """ Created a Line Notify for each alert """ required_option = frozenset(["linenotify_access_token"]) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index c54942135..8b9c1eb23 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -62,7 +62,6 @@ class RulesLoader(object): 'debug': alerts.DebugAlerter, 'command': alerts.CommandAlerter, 'sns': alerts.SnsAlerter, - 'stride': alerts.StrideAlerter, 'ms_teams': alerts.MsTeamsAlerter, 'slack': alerts.SlackAlerter, 'mattermost': alerts.MattermostAlerter, diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 7dadea47e..dde7f8cd3 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -261,12 +261,6 @@ properties: jira_max_age: {type: number} jira_watchers: *arrayOfString - ### Stride - stride_access_token: {type: string} - stride_cloud_id: {type: string} - stride_conversation_id: {type: string} - stride_ignore_ssl_errors: {type: boolean} - ### Slack slack_webhook_url: *arrayOfString slack_username_override: {type: string} diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 8e7280422..df727727b 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -19,7 +19,6 @@ from elastalert.alerts import MsTeamsAlerter from elastalert.alerts import PagerDutyAlerter from elastalert.alerts import SlackAlerter -from elastalert.alerts import StrideAlerter from elastalert.loaders import FileRulesLoader from elastalert.opsgenie import OpsGenieAlerter from elastalert.util import ts_add @@ -2085,283 +2084,6 @@ def test_resolving_rule_references(ea): assert 'the_owner' == alert.rule['nested_dict']['nested_owner'] -def test_stride_plain_text(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = StrideAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - body = "{0}\n\n@timestamp: {1}\nsomefield: {2}".format( - rule['name'], match['@timestamp'], match['somefield'] - ) - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': body} - ]} - ]} - ]}} - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_stride_underline_text(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', - 'alert_subject': 'Cool subject', - 'alert_text': 'Underline Text', - 'alert_text_type': 'alert_text_only', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = StrideAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - body = "Underline Text" - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': body, 'marks': [ - {'type': 'underline'} - ]} - ]} - ]} - ]}} - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_stride_bold_text(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', - 'alert_subject': 'Cool subject', - 'alert_text': 'Bold Text', - 'alert_text_type': 'alert_text_only', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = StrideAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - body = "Bold Text" - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': body, 'marks': [ - {'type': 'strong'} - ]} - ]} - ]} - ]}} - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_stride_strong_text(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', - 'alert_subject': 'Cool subject', - 'alert_text': 'Bold Text', - 'alert_text_type': 'alert_text_only', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = StrideAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - body = "Bold Text" - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': body, 'marks': [ - {'type': 'strong'} - ]} - ]} - ]} - ]}} - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_stride_hyperlink(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', - 'alert_subject': 'Cool subject', - 'alert_text': 'Link', - 'alert_text_type': 'alert_text_only', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = StrideAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - body = "Link" - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': body, 'marks': [ - {'type': 'link', 'attrs': {'href': 'http://stride.com'}} - ]} - ]} - ]} - ]}} - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_stride_html(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', - 'alert_subject': 'Cool subject', - 'alert_text': 'Alert: we found something. Link', - 'alert_text_type': 'alert_text_only', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = StrideAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': 'Alert', 'marks': [ - {'type': 'strong'} - ]}, - {'type': 'text', 'text': ': we found something. '}, - {'type': 'text', 'text': 'Link', 'marks': [ - {'type': 'link', 'attrs': {'href': 'http://stride.com'}} - ]} - ]} - ]} - ]}} - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - def test_alerta_no_auth(ea): rule = { 'name': 'Test Alerta rule!', From 8675923ec3a3a9112ad0b6d6ca2bd86dddf4afe0 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 21:56:12 +0900 Subject: [PATCH 52/83] Fix slack ssl verification --- elastalert/alerts.py | 2 +- tests/alerts_test.py | 63 +++++++++++++++++++++++++++++++++++++------- 2 files changed, 55 insertions(+), 10 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 6be5ea603..fdfeb64a0 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1115,7 +1115,7 @@ def alert(self, matches): if self.slack_ca_certs: verify = self.slack_ca_certs else: - verify = self.slack_ignore_ssl_errors + verify = not self.slack_ignore_ssl_errors if self.slack_ignore_ssl_errors: requests.packages.urllib3.disable_warnings() payload['channel'] = channel_override diff --git a/tests/alerts_test.py b/tests/alerts_test.py index df727727b..bad5d85d3 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -1240,7 +1240,7 @@ def test_slack_uses_custom_title(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1286,7 +1286,7 @@ def test_slack_uses_custom_timeout(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=20 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1330,7 +1330,7 @@ def test_slack_uses_rule_name_when_custom_title_is_not_provided(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1375,7 +1375,7 @@ def test_slack_uses_custom_slack_channel(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1436,7 +1436,7 @@ def test_slack_uses_list_of_custom_slack_channel(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) assert expected_data1 == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1487,7 +1487,7 @@ def test_slack_attach_kibana_discover_url_when_generated(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1532,7 +1532,7 @@ def test_slack_attach_kibana_discover_url_when_not_generated(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1584,7 +1584,7 @@ def test_slack_kibana_discover_title(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1636,13 +1636,58 @@ def test_slack_kibana_discover_color(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data +def test_slack_ignore_ssl_errors(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_ignore_ssl_errors': True, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=False, + timeout=10 + ) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + def test_http_alerter_with_payload(): rule = { 'name': 'Test HTTP Post Alerter With Payload', From 3e4b80449c39885d6fa7061ec19e266caa0f5e60 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 22:03:21 +0900 Subject: [PATCH 53/83] change mock to build in unittest.mock --- elastalert/test_rule.py | 2 +- requirements.txt | 1 - setup.py | 1 - tests/alerts_test.py | 2 +- tests/base_test.py | 2 +- tests/conftest.py | 2 +- tests/loaders_test.py | 2 +- tests/rules_test.py | 2 +- tests/util_test.py | 2 +- 9 files changed, 7 insertions(+), 9 deletions(-) diff --git a/elastalert/test_rule.py b/elastalert/test_rule.py index af1eaa497..965e4972c 100644 --- a/elastalert/test_rule.py +++ b/elastalert/test_rule.py @@ -10,7 +10,7 @@ import string import sys -import mock +from unittest import mock from elastalert.config import load_conf from elastalert.elastalert import ElastAlerter diff --git a/requirements.txt b/requirements.txt index 49512f615..f90b092eb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,7 +11,6 @@ envparse>=0.2.0 exotel>=0.1.3 jira>=2.0.0 jsonschema>=3.0.2 -mock>=2.0.0 prison>=0.1.2 py-zabbix==1.1.3 PyStaticConfiguration>=0.10.3 diff --git a/setup.py b/setup.py index 33ee67784..a2723553e 100644 --- a/setup.py +++ b/setup.py @@ -38,7 +38,6 @@ 'exotel>=0.1.3', 'jira>=2.0.0', 'jsonschema>=3.0.2', - 'mock>=2.0.0', 'prison>=0.1.2', 'PyStaticConfiguration>=0.10.3', 'python-dateutil>=2.6.0,<2.7.0', diff --git a/tests/alerts_test.py b/tests/alerts_test.py index bad5d85d3..1bd7a9b2f 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -4,7 +4,7 @@ import json import subprocess -import mock +from unittest import mock import pytest from jira.exceptions import JIRAError diff --git a/tests/base_test.py b/tests/base_test.py index 92dc35f7e..7e756ba92 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -5,7 +5,7 @@ import threading import elasticsearch -import mock +from unittest import mock import pytest from elasticsearch.exceptions import ConnectionError from elasticsearch.exceptions import ElasticsearchException diff --git a/tests/conftest.py b/tests/conftest.py index 6844296ee..853c0ddbd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,7 +3,7 @@ import logging import os -import mock +from unittest import mock import pytest import elastalert.elastalert diff --git a/tests/loaders_test.py b/tests/loaders_test.py index bb8d3d873..4524348be 100644 --- a/tests/loaders_test.py +++ b/tests/loaders_test.py @@ -3,7 +3,7 @@ import datetime import os -import mock +from unittest import mock import pytest import elastalert.alerts diff --git a/tests/rules_test.py b/tests/rules_test.py index 1954b5d54..12d4f9298 100644 --- a/tests/rules_test.py +++ b/tests/rules_test.py @@ -2,7 +2,7 @@ import copy import datetime -import mock +from unittest import mock import pytest from elastalert.ruletypes import AnyRule diff --git a/tests/util_test.py b/tests/util_test.py index 55a2f9c8f..95f026b5b 100644 --- a/tests/util_test.py +++ b/tests/util_test.py @@ -2,7 +2,7 @@ from datetime import datetime from datetime import timedelta -import mock +from unittest import mock import pytest from dateutil.parser import parse as dt From d48d3e3c05fcb0380f9b86ebc170cc91c6d1a0d6 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 22:21:23 +0900 Subject: [PATCH 54/83] Fix Logging to elastalert_logger --- elastalert/alerts.py | 15 +++++++-------- elastalert/elastalert.py | 32 ++++++++++++++++---------------- elastalert/kibana_discover.py | 9 +++++---- elastalert/loaders.py | 14 +++++++------- elastalert/opsgenie.py | 15 +++++++-------- elastalert/util.py | 6 +++--- 6 files changed, 45 insertions(+), 46 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index fdfeb64a0..033f1811d 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2,7 +2,6 @@ import copy import datetime import json -import logging import os import re import subprocess @@ -584,7 +583,7 @@ def __init__(self, rule): msg = '%s Both have common statuses of (%s). As such, no tickets will ever be found.' % ( msg, ','.join(intersection)) msg += ' This should be simplified to use only one or the other.' - logging.warning(msg) + elastalert_logger.warning(msg) self.reset_jira_args() @@ -604,7 +603,7 @@ def set_priority(self): if self.priority is not None and self.client is not None: self.jira_args['priority'] = {'id': self.priority_ids[self.priority]} except KeyError: - logging.error("Priority %s not found. Valid priorities are %s" % (self.priority, list(self.priority_ids.keys()))) + elastalert_logger.error("Priority %s not found. Valid priorities are %s" % (self.priority, list(self.priority_ids.keys()))) def reset_jira_args(self): self.jira_args = {'project': {'key': self.project}, @@ -747,7 +746,7 @@ def find_existing_ticket(self, matches): try: issues = self.client.search_issues(jql) except JIRAError as e: - logging.exception("Error while searching for JIRA ticket using jql '%s': %s" % (jql, e)) + elastalert_logger.exception("Error while searching for JIRA ticket using jql '%s': %s" % (jql, e)) return None if len(issues): @@ -790,19 +789,19 @@ def alert(self, matches): try: self.comment_on_ticket(ticket, match) except JIRAError as e: - logging.exception("Error while commenting on ticket %s: %s" % (ticket, e)) + elastalert_logger.exception("Error while commenting on ticket %s: %s" % (ticket, e)) if self.labels: for label in self.labels: try: ticket.fields.labels.append(label) except JIRAError as e: - logging.exception("Error while appending labels to ticket %s: %s" % (ticket, e)) + elastalert_logger.exception("Error while appending labels to ticket %s: %s" % (ticket, e)) if self.transition: elastalert_logger.info('Transitioning existing ticket %s' % (ticket.key)) try: self.transition_ticket(ticket) except JIRAError as e: - logging.exception("Error while transitioning ticket %s: %s" % (ticket, e)) + elastalert_logger.exception("Error while transitioning ticket %s: %s" % (ticket, e)) if self.pipeline is not None: self.pipeline['jira_ticket'] = ticket @@ -893,7 +892,7 @@ def __init__(self, *args): if isinstance(self.rule['command'], str): self.shell = True if '%' in self.rule['command']: - logging.warning('Warning! You could be vulnerable to shell injection!') + elastalert_logger.warning('Warning! You could be vulnerable to shell injection!') self.rule['command'] = [self.rule['command']] def alert(self, matches): diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 24c9f884f..137e22dad 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -402,7 +402,7 @@ def get_hits(self, rule, starttime, endtime, index, scroll=False): # Different versions of ES have this formatted in different ways. Fallback to str-ing the whole thing raise ElasticsearchException(str(res['_shards']['failures'])) - logging.debug(str(res)) + elastalert_logger.debug(str(res)) except ElasticsearchException as e: # Elasticsearch sometimes gives us GIGANTIC error messages # (so big that they will fill the entire terminal buffer) @@ -845,7 +845,7 @@ def enhance_filter(self, rule): filters.append(query_str_filter) else: filters.append({'query': query_str_filter}) - logging.debug("Enhanced filter with {} terms: {}".format(listname, str(query_str_filter))) + elastalert_logger.debug("Enhanced filter with {} terms: {}".format(listname, str(query_str_filter))) def run_rule(self, rule, endtime, starttime=None): """ Run a rule for a given time period, including querying and alerting on results. @@ -874,7 +874,7 @@ def run_rule(self, rule, endtime, starttime=None): # Don't run if starttime was set to the future if ts_now() <= rule['starttime']: - logging.warning("Attempted to use query start time in the future (%s), sleeping instead" % (starttime)) + elastalert_logger.warning("Attempted to use query start time in the future (%s), sleeping instead" % (starttime)) return 0 # Run the rule. If querying over a large time period, split it up into segments @@ -1083,7 +1083,7 @@ def load_rule_changes(self): try: new_rule = self.rules_loader.load_configuration(rule_file, self.conf) if not new_rule: - logging.error('Invalid rule file skipped: %s' % rule_file) + elastalert_logger.error('Invalid rule file skipped: %s' % rule_file) continue if 'is_enabled' in new_rule and not new_rule['is_enabled']: elastalert_logger.info('Rule file %s is now disabled.' % (rule_file)) @@ -1123,7 +1123,7 @@ def load_rule_changes(self): try: new_rule = self.rules_loader.load_configuration(rule_file, self.conf) if not new_rule: - logging.error('Invalid rule file skipped: %s' % rule_file) + elastalert_logger.error('Invalid rule file skipped: %s' % rule_file) continue if 'is_enabled' in new_rule and not new_rule['is_enabled']: continue @@ -1206,12 +1206,12 @@ def wait_until_responsive(self, timeout, clock=timeit.default_timer): time.sleep(1.0) if self.writeback_es.ping(): - logging.error( + elastalert_logger.error( 'Writeback alias "%s" does not exist, did you run `elastalert-create-index`?', self.writeback_alias, ) else: - logging.error( + elastalert_logger.error( 'Could not reach ElasticSearch at "%s:%d".', self.conf['es_host'], self.conf['es_port'], @@ -1286,7 +1286,7 @@ def handle_rule_execution(self, rule): # We were processing for longer than our refresh interval # This can happen if --start was specified with a large time period # or if we are running too slow to process events in real time. - logging.warning( + elastalert_logger.warning( "Querying from %s to %s took longer than %s!" % ( old_starttime, pretty_ts(endtime, rule.get('use_local_time')), @@ -1619,7 +1619,7 @@ def writeback(self, doc_type, body, rule=None, match_body=None): res = self.writeback_es.index(index=index, doc_type=doc_type, body=body) return res except ElasticsearchException as e: - logging.exception("Error writing alert info to Elasticsearch: %s" % (e)) + elastalert_logger.exception("Error writing alert info to Elasticsearch: %s" % (e)) def find_recent_pending_alerts(self, time_limit): """ Queries writeback_es to find alerts that did not send @@ -1647,7 +1647,7 @@ def find_recent_pending_alerts(self, time_limit): if res['hits']['hits']: return res['hits']['hits'] except ElasticsearchException as e: - logging.exception("Error finding recent pending alerts: %s %s" % (e, query)) + elastalert_logger.exception("Error finding recent pending alerts: %s %s" % (e, query)) return [] def send_pending_alerts(self): @@ -1847,11 +1847,11 @@ def add_aggregated_alert(self, match, rule): def silence(self, silence_cache_key=None): """ Silence an alert for a period of time. --silence and --rule must be passed as args. """ if self.debug: - logging.error('--silence not compatible with --debug') + elastalert_logger.error('--silence not compatible with --debug') exit(1) if not self.args.rule: - logging.error('--silence must be used with --rule') + elastalert_logger.error('--silence must be used with --rule') exit(1) # With --rule, self.rules will only contain that specific rule @@ -1861,11 +1861,11 @@ def silence(self, silence_cache_key=None): try: silence_ts = parse_deadline(self.args.silence) except (ValueError, TypeError): - logging.error('%s is not a valid time period' % (self.args.silence)) + elastalert_logger.error('%s is not a valid time period' % (self.args.silence)) exit(1) if not self.set_realert(silence_cache_key, silence_ts, 0): - logging.error('Failed to save silence command to Elasticsearch') + elastalert_logger.error('Failed to save silence command to Elasticsearch') exit(1) elastalert_logger.info('Success. %s will be silenced until %s' % (silence_cache_key, silence_ts)) @@ -1926,7 +1926,7 @@ def is_silenced(self, rule_name): def handle_error(self, message, data=None): ''' Logs message at error level and writes message, data and traceback to Elasticsearch. ''' - logging.error(message) + elastalert_logger.error(message) body = {'message': message} tb = traceback.format_exc() body['traceback'] = tb.strip().split('\n') @@ -1936,7 +1936,7 @@ def handle_error(self, message, data=None): def handle_uncaught_exception(self, exception, rule): """ Disables a rule and sends a notification. """ - logging.error(traceback.format_exc()) + elastalert_logger.error(traceback.format_exc()) self.handle_error('Uncaught exception running rule %s: %s' % (rule['name'], exception), {'rule': rule['name']}) if self.disable_rules_on_error: self.rules = [running_rule for running_rule in self.rules if running_rule['name'] != rule['name']] diff --git a/elastalert/kibana_discover.py b/elastalert/kibana_discover.py index 8e1f5c1e2..2ebc0f804 100644 --- a/elastalert/kibana_discover.py +++ b/elastalert/kibana_discover.py @@ -8,6 +8,7 @@ import urllib.parse from .util import EAException +from .util import elastalert_logger from .util import lookup_es_key from .util import ts_add @@ -21,7 +22,7 @@ def generate_kibana_discover_url(rule, match): discover_app_url = rule.get('kibana_discover_app_url') if not discover_app_url: - logging.warning( + elastalert_logger.warning( 'Missing kibana_discover_app_url for rule %s' % ( rule.get('name', '') ) @@ -30,7 +31,7 @@ def generate_kibana_discover_url(rule, match): kibana_version = rule.get('kibana_discover_version') if not kibana_version: - logging.warning( + elastalert_logger.warning( 'Missing kibana_discover_version for rule %s' % ( rule.get('name', '') ) @@ -39,7 +40,7 @@ def generate_kibana_discover_url(rule, match): index = rule.get('kibana_discover_index_pattern_id') if not index: - logging.warning( + elastalert_logger.warning( 'Missing kibana_discover_index_pattern_id for rule %s' % ( rule.get('name', '') ) @@ -70,7 +71,7 @@ def generate_kibana_discover_url(rule, match): appState = kibana_discover_app_state(index, columns, filters, query_keys, match) else: - logging.warning( + elastalert_logger.warning( 'Unknown kibana discover application version %s for rule %s' % ( kibana_version, rule.get('name', '') diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 8b9c1eb23..381ac26ed 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -2,7 +2,6 @@ import copy import datetime import hashlib -import logging import os import sys @@ -20,6 +19,7 @@ from .util import dt_to_unix from .util import dt_to_unixms from .util import EAException +from .util import elastalert_logger from .util import get_module from .util import ts_to_dt from .util import ts_to_dt_with_format @@ -115,7 +115,7 @@ def load(self, conf, args=None): rule = self.load_configuration(rule_file, conf, args) # A rule failed to load, don't try to process it if not rule: - logging.error('Invalid rule file skipped: %s' % rule_file) + elastalert_logger.error('Invalid rule file skipped: %s' % rule_file) continue # By setting "is_enabled: False" in rule file, a rule is easily disabled if 'is_enabled' in rule and not rule['is_enabled']: @@ -386,10 +386,10 @@ def _dt_to_ts_with_format(dt): if rule.get('use_strftime_index'): for token in ['%y', '%M', '%D']: if token in rule.get('index'): - logging.warning('Did you mean to use %s in the index? ' - 'The index will be formatted like %s' % (token, - datetime.datetime.now().strftime( - rule.get('index')))) + elastalert_logger.warning('Did you mean to use %s in the index? ' + 'The index will be formatted like %s' % (token, + datetime.datetime.now().strftime( + rule.get('index')))) if rule.get('scan_entire_timeframe') and not rule.get('timeframe'): raise EAException('scan_entire_timeframe can only be used if there is a timeframe specified') @@ -478,7 +478,7 @@ def adjust_deprecated_values(rule): rule['http_post_proxy'] = rule['simple_proxy'] if 'simple_webhook_url' in rule: rule['http_post_url'] = rule['simple_webhook_url'] - logging.warning( + elastalert_logger.warning( '"simple" alerter has been renamed "post" and comptability may be removed in a future release.') diff --git a/elastalert/opsgenie.py b/elastalert/opsgenie.py index bcdaf2d05..8db52d89f 100644 --- a/elastalert/opsgenie.py +++ b/elastalert/opsgenie.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- import json -import logging import os.path import requests @@ -46,11 +45,11 @@ def _parse_responders(self, responders, responder_args, matches, default_respond try: formated_responders.append(responder.format(**responders_values)) except KeyError as error: - logging.warn("OpsGenieAlerter: Cannot create responder for OpsGenie Alert. Key not foud: %s. " % (error)) + elastalert_logger.warning("OpsGenieAlerter: Cannot create responder for OpsGenie Alert. Key not foud: %s. " % (error)) if not formated_responders: - logging.warn("OpsGenieAlerter: no responders can be formed. Trying the default responder ") + elastalert_logger.warning("OpsGenieAlerter: no responders can be formed. Trying the default responder ") if not default_responders: - logging.warn("OpsGenieAlerter: default responder not set. Falling back") + elastalert_logger.warning("OpsGenieAlerter: default responder not set. Falling back") formated_responders = responders else: formated_responders = default_responders @@ -90,7 +89,7 @@ def alert(self, matches): post['tags'] = self.tags if self.priority and self.priority not in ('P1', 'P2', 'P3', 'P4', 'P5'): - logging.warn("Priority level does not appear to be specified correctly. \ + elastalert_logger.warning("Priority level does not appear to be specified correctly. \ Please make sure to set it to a value between P1 and P5") else: post['priority'] = self.priority @@ -102,7 +101,7 @@ def alert(self, matches): if details: post['details'] = details - logging.debug(json.dumps(post)) + elastalert_logger.debug(json.dumps(post)) headers = { 'Content-Type': 'application/json', @@ -114,12 +113,12 @@ def alert(self, matches): try: r = requests.post(self.to_addr, json=post, headers=headers, proxies=proxies) - logging.debug('request response: {0}'.format(r)) + elastalert_logger.debug('request response: {0}'.format(r)) if r.status_code != 202: elastalert_logger.info("Error response from {0} \n " "API Response: {1}".format(self.to_addr, r)) r.raise_for_status() - logging.info("Alert sent to OpsGenie") + elastalert_logger.info("Alert sent to OpsGenie") except Exception as err: raise EAException("Error sending alert: {0}".format(err)) diff --git a/elastalert/util.py b/elastalert/util.py index bbb0600ff..3e9c9f664 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -152,7 +152,7 @@ def ts_to_dt(timestamp): def dt_to_ts(dt): if not isinstance(dt, datetime.datetime): - logging.warning('Expected datetime, got %s' % (type(dt))) + elastalert_logger.warning('Expected datetime, got %s' % (type(dt))) return dt ts = dt.isoformat() # Round microseconds to milliseconds @@ -176,7 +176,7 @@ def ts_to_dt_with_format(timestamp, ts_format): def dt_to_ts_with_format(dt, ts_format): if not isinstance(dt, datetime.datetime): - logging.warning('Expected datetime, got %s' % (type(dt))) + elastalert_logger.warning('Expected datetime, got %s' % (type(dt))) return dt ts = dt.strftime(ts_format) return ts @@ -361,7 +361,7 @@ def build_es_conn_config(conf): # Deprecated if 'boto_profile' in conf: - logging.warning('Found deprecated "boto_profile", use "profile" instead!') + elastalert_logger.warning('Found deprecated "boto_profile", use "profile" instead!') parsed_conf['profile'] = conf['boto_profile'] if 'profile' in conf: From 12e0c744378e86b7adc1798c4757c1fe26b2fa1d Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 22:22:23 +0900 Subject: [PATCH 55/83] apscheduler>=3.3.0,<4.0 --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index f90b092eb..c22697375 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -apscheduler>=3.3.0 +apscheduler>=3.3.0,<4.0 aws-requests-auth>=0.3.0 sortedcontainers>=2.2.2 boto3>=1.4.4 diff --git a/setup.py b/setup.py index a2723553e..356387056 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ packages=find_packages(), package_data={'elastalert': ['schema.yaml', 'es_mappings/**/*.json']}, install_requires=[ - 'apscheduler>=3.3.0', + 'apscheduler>=3.3.0,<4.0', 'aws-requests-auth>=0.3.0', 'sortedcontainers>=2.2.2', 'boto3>=1.4.4', From 596e017c36c3c0ddfe1aadd46a892143c9a7432c Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 22:27:24 +0900 Subject: [PATCH 56/83] Fix is_enabled not work with reload --- elastalert/elastalert.py | 17 ++++++++++++++--- elastalert/loaders.py | 3 --- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 137e22dad..1d69ef969 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -171,7 +171,10 @@ def __init__(self, args): remove = [] for rule in self.rules: - if not self.init_rule(rule): + if 'is_enabled' in rule and not rule['is_enabled']: + self.disabled_rules.append(rule) + remove.append(rule) + elif not self.init_rule(rule): remove.append(rule) list(map(self.rules.remove, remove)) @@ -969,7 +972,7 @@ def run_rule(self, rule, endtime, starttime=None): def init_rule(self, new_rule, new=True): ''' Copies some necessary non-config state from an exiting rule to a new rule. ''' - if not new: + if not new and self.scheduler.get_job(job_id=new_rule['name']): self.scheduler.remove_job(job_id=new_rule['name']) try: @@ -1089,6 +1092,15 @@ def load_rule_changes(self): elastalert_logger.info('Rule file %s is now disabled.' % (rule_file)) # Remove this rule if it's been disabled self.rules = [rule for rule in self.rules if rule['rule_file'] != rule_file] + # Stop job if is running + if self.scheduler.get_job(job_id=new_rule['name']): + self.scheduler.remove_job(job_id=new_rule['name']) + # Append to disabled_rule + for disabled_rule in self.disabled_rules: + if disabled_rule['name'] == new_rule['name']: + break + else: + self.disabled_rules.append(new_rule) continue except EAException as e: message = 'Could not load rule %s: %s' % (rule_file, e) @@ -1107,7 +1119,6 @@ def load_rule_changes(self): # Re-enable if rule had been disabled for disabled_rule in self.disabled_rules: if disabled_rule['name'] == new_rule['name']: - self.rules.append(disabled_rule) self.disabled_rules.remove(disabled_rule) break diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 381ac26ed..1c37d5b13 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -117,9 +117,6 @@ def load(self, conf, args=None): if not rule: elastalert_logger.error('Invalid rule file skipped: %s' % rule_file) continue - # By setting "is_enabled: False" in rule file, a rule is easily disabled - if 'is_enabled' in rule and not rule['is_enabled']: - continue if rule['name'] in names: raise EAException('Duplicate rule named %s' % (rule['name'])) except EAException as e: From 00ed9855369f53ccab06c77f307361f910539cd0 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 24 Sep 2021 22:36:58 +0900 Subject: [PATCH 57/83] Bugfix and better error handling on zabbix alerter --- elastalert/loaders.py | 4 ++- elastalert/zabbix.py | 59 +++++++++++++++++++++++++++++-------------- 2 files changed, 43 insertions(+), 20 deletions(-) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 1c37d5b13..31b0e46aa 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -14,6 +14,7 @@ from . import enhancements from . import ruletypes from .opsgenie import OpsGenieAlerter +from .zabbix import ZabbixAlerter from .util import dt_to_ts from .util import dt_to_ts_with_format from .util import dt_to_unix @@ -77,7 +78,8 @@ class RulesLoader(object): 'alerta': alerts.AlertaAlerter, 'post': alerts.HTTPPostAlerter, 'linenotify': alerts.LineNotifyAlerter, - 'hivealerter': alerts.HiveAlerter + 'hivealerter': alerts.HiveAlerter, + 'zabbix': ZabbixAlerter } # A partial ordering of alert types. Relative order will be preserved in the resulting alerts list diff --git a/elastalert/zabbix.py b/elastalert/zabbix.py index e3f13aa03..acced28d7 100644 --- a/elastalert/zabbix.py +++ b/elastalert/zabbix.py @@ -1,42 +1,44 @@ -from alerts import Alerter # , BasicMatchString -import logging -from pyzabbix.api import ZabbixAPI -from pyzabbix import ZabbixSender, ZabbixMetric from datetime import datetime +from pyzabbix import ZabbixSender, ZabbixMetric, ZabbixAPI + +from .alerts import Alerter +from .util import elastalert_logger, EAException + class ZabbixClient(ZabbixAPI): - def __init__(self, url='http://localhost', use_authenticate=False, user='Admin', password='zabbix', sender_host='localhost', - sender_port=10051): + def __init__(self, url='http://localhost', use_authenticate=False, user='Admin', password='zabbix', + sender_host='localhost', sender_port=10051): self.url = url self.use_authenticate = use_authenticate self.sender_host = sender_host self.sender_port = sender_port self.metrics_chunk_size = 200 self.aggregated_metrics = [] - self.logger = logging.getLogger(self.__class__.__name__) - super(ZabbixClient, self).__init__(url=self.url, use_authenticate=self.use_authenticate, user=user, password=password) + + super(ZabbixClient, self).__init__(url=self.url, + use_authenticate=self.use_authenticate, + user=user, + password=password) def send_metric(self, hostname, key, data): zm = ZabbixMetric(hostname, key, data) if self.send_aggregated_metrics: - self.aggregated_metrics.append(zm) if len(self.aggregated_metrics) > self.metrics_chunk_size: - self.logger.info("Sending: %s metrics" % (len(self.aggregated_metrics))) + elastalert_logger.info("Sending: %s metrics" % (len(self.aggregated_metrics))) try: - ZabbixSender(zabbix_server=self.sender_host, zabbix_port=self.sender_port).send(self.aggregated_metrics) + ZabbixSender(zabbix_server=self.sender_host, zabbix_port=self.sender_port) \ + .send(self.aggregated_metrics) self.aggregated_metrics = [] except Exception as e: - self.logger.exception(e) - pass + elastalert_logger.exception(e) else: try: - ZabbixSender(zabbix_server=self.sender_host, zabbix_port=self.sender_port).send(zm) + ZabbixSender(zabbix_server=self.sender_host, zabbix_port=self.sender_port).send([zm]) except Exception as e: - self.logger.exception(e) - pass + elastalert_logger.exception(e) class ZabbixAlerter(Alerter): @@ -54,6 +56,9 @@ def __init__(self, *args): self.zbx_sender_port = self.rule.get('zbx_sender_port', 10051) self.zbx_host = self.rule.get('zbx_host') self.zbx_key = self.rule.get('zbx_key') + self.timestamp_field = self.rule.get('timestamp_field', '@timestamp') + self.timestamp_type = self.rule.get('timestamp_type', 'iso') + self.timestamp_strptime = self.rule.get('timestamp_strptime', '%Y-%m-%dT%H:%M:%S.%f%z') # Alert is called def alert(self, matches): @@ -63,10 +68,26 @@ def alert(self, matches): # the aggregation option set zm = [] for match in matches: - ts_epoch = int(datetime.strptime(match['@timestamp'], "%Y-%m-%dT%H:%M:%S.%fZ").strftime('%s')) - zm.append(ZabbixMetric(host=self.zbx_host, key=self.zbx_key, value=1, clock=ts_epoch)) + if ':' not in match[self.timestamp_field] or '-' not in match[self.timestamp_field]: + ts_epoch = int(match[self.timestamp_field]) + else: + try: + ts_epoch = int(datetime.strptime(match[self.timestamp_field], self.timestamp_strptime) + .timestamp()) + except ValueError: + ts_epoch = int(datetime.strptime(match[self.timestamp_field], '%Y-%m-%dT%H:%M:%S%z') + .timestamp()) + zm.append(ZabbixMetric(host=self.zbx_host, key=self.zbx_key, value='1', clock=ts_epoch)) - ZabbixSender(zabbix_server=self.zbx_sender_host, zabbix_port=self.zbx_sender_port).send(zm) + try: + response = ZabbixSender(zabbix_server=self.zbx_sender_host, zabbix_port=self.zbx_sender_port).send(zm) + if response.failed: + elastalert_logger.warning("Missing zabbix host '%s' or host's item '%s', alert will be discarded" + % (self.zbx_host, self.zbx_key)) + else: + elastalert_logger.info("Alert sent to Zabbix") + except Exception as e: + raise EAException("Error sending alert to Zabbix: %s" % e) # get_info is called after an alert is sent to get data that is written back # to Elasticsearch in the field "alert_info" From 227410d7e13bfb9e6421943944b91b465b4f9214 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 25 Sep 2021 01:09:15 +0900 Subject: [PATCH 58/83] Revert travis-ci job error --- requirements.txt | 1 - setup.py | 1 - 2 files changed, 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index c22697375..aa379b943 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,6 @@ boto3>=1.4.4 cffi>=1.11.5 configparser>=3.5.0 croniter>=0.3.16 -cryptography<3.4 elasticsearch==7.0.0 envparse>=0.2.0 exotel>=0.1.3 diff --git a/setup.py b/setup.py index 356387056..9a5272d4b 100644 --- a/setup.py +++ b/setup.py @@ -48,7 +48,6 @@ 'texttable>=0.8.8', 'twilio>=6.0.0,<6.58', 'cffi>=1.11.5', - 'cryptography<3.4', 'tzlocal<3.0' ] ) From d1ab0ba0cd7069ec51da10ca09b2fb5f57cf5b03 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 25 Sep 2021 01:30:24 +0900 Subject: [PATCH 59/83] Update Pytest --- pytest.ini | 2 ++ requirements-dev.txt | 9 +++++---- tests/base_test.py | 12 ++++++------ tests/loaders_test.py | 2 +- tox.ini | 4 ++-- 5 files changed, 16 insertions(+), 13 deletions(-) diff --git a/pytest.ini b/pytest.ini index 0ad3341d9..259ba35a2 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,3 +1,5 @@ [pytest] markers = elasticsearch: mark a test as using elasticsearch. +filterwarnings = + ignore::pytest.PytestUnhandledThreadExceptionWarning \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt index d15887c01..2b23894a9 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,9 +1,10 @@ -r requirements.txt -coverage==4.5.4 +coverage==5.5 +docutils<0.17 flake8 pre-commit -pylint<1.4 -pytest<3.3.0 +pylint<2.10 +pytest==6.2.4 setuptools sphinx_rtd_theme -tox==3.20.1 +tox==3.24.1 diff --git a/tests/base_test.py b/tests/base_test.py index 7e756ba92..15d6e5ea3 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -1159,7 +1159,7 @@ def test_wait_until_responsive(ea): ] -def test_wait_until_responsive_timeout_es_not_available(ea, capsys): +def test_wait_until_responsive_timeout_es_not_available(ea, caplog): """Bail out if ElasticSearch doesn't (quickly) become responsive.""" # Never becomes responsive :-) @@ -1175,8 +1175,8 @@ def test_wait_until_responsive_timeout_es_not_available(ea, capsys): assert exc.value.code == 1 # Ensure we get useful diagnostics. - output, errors = capsys.readouterr() - assert 'Could not reach ElasticSearch at "es:14900".' in errors + user, level, message = caplog.record_tuples[0] + assert 'Could not reach ElasticSearch at "es:14900".' in message # Slept until we passed the deadline. sleep.mock_calls == [ @@ -1186,7 +1186,7 @@ def test_wait_until_responsive_timeout_es_not_available(ea, capsys): ] -def test_wait_until_responsive_timeout_index_does_not_exist(ea, capsys): +def test_wait_until_responsive_timeout_index_does_not_exist(ea, caplog): """Bail out if ElasticSearch doesn't (quickly) become responsive.""" # Never becomes responsive :-) @@ -1202,8 +1202,8 @@ def test_wait_until_responsive_timeout_index_does_not_exist(ea, capsys): assert exc.value.code == 1 # Ensure we get useful diagnostics. - output, errors = capsys.readouterr() - assert 'Writeback alias "wb_a" does not exist, did you run `elastalert-create-index`?' in errors + user, level, message = caplog.record_tuples[0] + assert 'Writeback alias "wb_a" does not exist, did you run `elastalert-create-index`?' in message # Slept until we passed the deadline. sleep.mock_calls == [ diff --git a/tests/loaders_test.py b/tests/loaders_test.py index 4524348be..009207fd5 100644 --- a/tests/loaders_test.py +++ b/tests/loaders_test.py @@ -340,7 +340,7 @@ def test_raises_on_missing_config(): mock_rule_open.return_value = test_rule_copy with mock.patch('os.walk') as mock_walk: mock_walk.return_value = [('', [], ['testrule.yaml'])] - with pytest.raises(EAException, message='key %s should be required' % key): + with pytest.raises(EAException): rules = load_conf(test_args) rules['rules'] = rules['rules_loader'].load(rules) diff --git a/tox.ini b/tox.ini index 71099e17c..76e606b2b 100644 --- a/tox.ini +++ b/tox.ini @@ -5,7 +5,7 @@ envlist = py36,docs [testenv] deps = -rrequirements-dev.txt commands = - coverage run --source=elastalert/,tests/ -m pytest --strict {posargs} + coverage run --source=elastalert/,tests/ -m pytest --strict-markers {posargs} coverage report -m flake8 . @@ -25,6 +25,6 @@ norecursedirs = .* virtualenv_run docs build venv env [testenv:docs] deps = {[testenv]deps} - sphinx==1.6.6 + sphinx==4.1.2 changedir = docs commands = sphinx-build -b html -d build/doctrees -W source build/html From 38846cee47a20fc8e0b16187814bbf4d6e54eee1 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Tue, 28 Sep 2021 02:48:49 +0900 Subject: [PATCH 60/83] Revert Add tzlocal<3.0 --- requirements.txt | 3 +-- setup.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/requirements.txt b/requirements.txt index aa379b943..16a431c37 100644 --- a/requirements.txt +++ b/requirements.txt @@ -18,5 +18,4 @@ PyYAML>=5.1 requests>=2.10.0 stomp.py>=4.1.17 texttable>=0.8.8 -twilio>=6.0.0,<6.58 -tzlocal<3.0 \ No newline at end of file +twilio>=6.0.0,<6.58 \ No newline at end of file diff --git a/setup.py b/setup.py index 9a5272d4b..3b2769050 100644 --- a/setup.py +++ b/setup.py @@ -47,7 +47,6 @@ 'stomp.py>=4.1.17', 'texttable>=0.8.8', 'twilio>=6.0.0,<6.58', - 'cffi>=1.11.5', - 'tzlocal<3.0' + 'cffi>=1.11.5' ] ) From 72ae2c721b393000445f2fd2d0f6ea8a4cca3ed8 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Wed, 29 Sep 2021 02:17:58 +0900 Subject: [PATCH 61/83] remove configparser --- requirements.txt | 1 - setup.py | 1 - 2 files changed, 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 16a431c37..55b0c1bba 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,6 @@ aws-requests-auth>=0.3.0 sortedcontainers>=2.2.2 boto3>=1.4.4 cffi>=1.11.5 -configparser>=3.5.0 croniter>=0.3.16 elasticsearch==7.0.0 envparse>=0.2.0 diff --git a/setup.py b/setup.py index 3b2769050..20ba4a9f1 100644 --- a/setup.py +++ b/setup.py @@ -31,7 +31,6 @@ 'aws-requests-auth>=0.3.0', 'sortedcontainers>=2.2.2', 'boto3>=1.4.4', - 'configparser>=3.5.0', 'croniter>=0.3.16', 'elasticsearch==7.0.0', 'envparse>=0.2.0', From b547545129134be643ca8325f51fdb9beaa94879 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 3 Oct 2021 21:46:57 +0900 Subject: [PATCH 62/83] Add ca certs and ignore ssl to HTTP Post --- docs/source/ruletypes.rst | 4 ++++ elastalert/alerts.py | 12 +++++++++++- tests/alerts_test.py | 9 ++++++--- 3 files changed, 21 insertions(+), 4 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 8d41d0d8e..29545fe4b 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2158,6 +2158,10 @@ Optional: ``http_post_timeout``: The timeout value, in seconds, for making the post. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. +``http_post_ca_certs``: Set this option to ``True`` if you want to validate the SSL certificate. + +``http_post_ignore_ssl_errors``: By default ElastAlert 2 will verify SSL certificate. Set this option to ``False`` if you want to ignore SSL errors. + Example usage:: alert: post diff --git a/elastalert/alerts.py b/elastalert/alerts.py index dcee79587..00ca898e6 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1873,6 +1873,8 @@ def __init__(self, rule): self.post_all_values = self.rule.get('http_post_all_values', not self.post_payload) self.post_http_headers = self.rule.get('http_post_headers', {}) self.timeout = self.rule.get('http_post_timeout', 10) + self.post_ca_certs = self.rule.get('http_post_ca_certs') + self.post_ignore_ssl_errors = self.rule.get('http_post_ignore_ssl_errors', False) def alert(self, matches): """ Each match will trigger a POST to the specified endpoint(s). """ @@ -1885,12 +1887,20 @@ def alert(self, matches): "Content-Type": "application/json", "Accept": "application/json;charset=utf-8" } + if self.post_ca_certs: + verify = self.post_ca_certs + else: + verify = not self.post_ignore_ssl_errors + if self.post_ignore_ssl_errors: + requests.packages.urllib3.disable_warnings() + headers.update(self.post_http_headers) proxies = {'https': self.post_proxy} if self.post_proxy else None for url in self.post_url: try: response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder), - headers=headers, proxies=proxies, timeout=self.timeout) + headers=headers, proxies=proxies, timeout=self.timeout, + verify=verify) response.raise_for_status() except RequestException as e: raise EAException("Error posting HTTP Post alert: %s" % e) diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 1bd7a9b2f..087869076 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -1715,7 +1715,8 @@ def test_http_alerter_with_payload(): data=mock.ANY, headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, proxies=None, - timeout=10 + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1750,7 +1751,8 @@ def test_http_alerter_with_payload_all_values(): data=mock.ANY, headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, proxies=None, - timeout=10 + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1782,7 +1784,8 @@ def test_http_alerter_without_payload(): data=mock.ANY, headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, proxies=None, - timeout=10 + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) From 0a09e368d656b4f8c35638b241b65b3a3bd94068 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 3 Oct 2021 21:58:13 +0900 Subject: [PATCH 63/83] Add Slack Alerts Footer --- docs/source/ruletypes.rst | 4 ++++ elastalert/alerts.py | 8 ++++++++ 2 files changed, 12 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 29545fe4b..4d32b2662 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1768,6 +1768,10 @@ Provide absolute address of the pciture, for example: http://some.address.com/im ``slack_title_link``: You can add a link in your Slack notification by setting this to a valid URL. Requires slack_title to be set. +``slack_footer``: Add a static footer text for alert. Defaults to "". + +``slack_footer_icon``: A Public Url for a footer icon. Defaults to "". + ``slack_timeout``: You can specify a timeout value, in seconds, for making communicating with Slack. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. ``slack_attach_kibana_discover_url``: Enables the attachment of the ``kibana_discover_url`` to the slack notification. The config ``generate_kibana_discover_url`` must also be ``True`` in order to generate the url. Defaults to ``False``. diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 00ca898e6..4ed508413 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1037,6 +1037,8 @@ def __init__(self, rule): self.slack_attach_kibana_discover_url = self.rule.get('slack_attach_kibana_discover_url', False) self.slack_kibana_discover_color = self.rule.get('slack_kibana_discover_color', '#ec4b98') self.slack_kibana_discover_title = self.rule.get('slack_kibana_discover_title', 'Discover in Kibana') + self.slack_footer = self.rule.get('slack_footer', '') + self.slack_footer_icon = self.rule.get('slack_footer_icon', '') def format_body(self, body): # https://api.slack.com/docs/formatting @@ -1099,6 +1101,12 @@ def alert(self, matches): if self.slack_title_link != '': payload['attachments'][0]['title_link'] = self.slack_title_link + if self.slack_footer != '': + payload['attachments'][0]['footer'] = self.slack_footer + + if self.slack_footer_icon != '': + payload['attachments'][0]['footer_icon'] = self.slack_footer_icon + if self.slack_attach_kibana_discover_url: kibana_discover_url = lookup_es_key(matches[0], 'kibana_discover_url') if kibana_discover_url: From c7d1d7788a471a9045deb4bd10d0312e8794f1d6 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 3 Oct 2021 22:22:45 +0900 Subject: [PATCH 64/83] Add Support for Twilio Copilot --- docs/source/ruletypes.rst | 43 ++++++-- elastalert/alerts.py | 28 ++++-- tests/alerts_test.py | 201 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 258 insertions(+), 14 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 4d32b2662..3a683cd5a 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1948,18 +1948,49 @@ The alerter has one optional argument: Twilio ~~~~~~ -Twilio alerter will trigger an incident to a mobile phone as sms from your twilio phone number. Alert name will arrive as sms once this option is chosen. +The Twilio alerter will send an alert to a mobile phone as an SMS from your Twilio +phone number. The SMS will contain the alert name. You may use either Twilio SMS +or Twilio Copilot to send the message, controlled by the ``twilio_use_copilot`` +option. -The alerter requires the following option: +Note that when Twilio Copilot *is* used the ``twilio_message_service_sid`` +option is required. Likewise, when *not* using Twilio Copilot, the +``twilio_from_number`` option is required. + +The alerter requires the following options: -``twilio_account_sid``: This is sid of your twilio account. +``twilio_account_sid``: The SID of your Twilio account. -``twilio_auth_token``: Auth token assosiated with your twilio account. +``twilio_auth_token``: Auth token associated with your Twilio account. -``twilio_to_number``: The phone number where you would like send the notification. +``twilio_to_number``: The phone number where you would like to send the alert. + +Either one of + * ``twilio_from_number``: The Twilio phone number from which the alert will be sent. + * ``twilio_message_service_sid``: The SID of your Twilio message service. + +Optional: -``twilio_from_number``: Your twilio phone number from which message will be sent. +``twilio_use_copilot``: Whether or not to use Twilio Copilot, False by default. +Example with Copilot usage:: + + alert: + - "twilio" + twilio_use_copilot: True + twilio_to_number: "0123456789" + twilio_auth_token: "abcdefghijklmnopqrstuvwxyz012345" + twilio_account_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" + twilio_message_service_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" + +Example with SMS usage:: + + alert: + - "twilio" + twilio_to_number: "0123456789" + twilio_from_number: "9876543210" + twilio_auth_token: "abcdefghijklmnopqrstuvwxyz012345" + twilio_account_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" Splunk On-Call (Formerly VictorOps) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 4ed508413..7e4418def 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1453,23 +1453,35 @@ def get_info(self): class TwilioAlerter(Alerter): - required_options = frozenset(['twilio_account_sid', 'twilio_auth_token', 'twilio_to_number', 'twilio_from_number']) + required_options = frozenset(['twilio_account_sid', 'twilio_auth_token', 'twilio_to_number']) def __init__(self, rule): super(TwilioAlerter, self).__init__(rule) - self.twilio_account_sid = self.rule['twilio_account_sid'] - self.twilio_auth_token = self.rule['twilio_auth_token'] - self.twilio_to_number = self.rule['twilio_to_number'] - self.twilio_from_number = self.rule['twilio_from_number'] + self.twilio_account_sid = self.rule.get('twilio_account_sid', None) + self.twilio_auth_token = self.rule.get('twilio_auth_token', None) + self.twilio_to_number = self.rule.get('twilio_to_number', None) + self.twilio_from_number = self.rule.get('twilio_from_number', None) + self.twilio_message_service_sid = self.rule.get('twilio_message_service_sid', None) + self.twilio_use_copilot = self.rule.get('twilio_use_copilot', False) def alert(self, matches): client = TwilioClient(self.twilio_account_sid, self.twilio_auth_token) try: - client.messages.create(body=self.rule['name'], - to=self.twilio_to_number, - from_=self.twilio_from_number) + if self.twilio_use_copilot: + if self.twilio_message_service_sid is None: + raise EAException("Twilio Copilot requires the 'twilio_message_service_sid' option") + client.messages.create(body=self.rule['name'], + to=self.twilio_to_number, + messaging_service_sid=self.twilio_message_service_sid) + else: + if self.twilio_from_number is None: + raise EAException("Twilio SMS requires the 'twilio_from_number' option") + + client.messages.create(body=self.rule['name'], + to=self.twilio_to_number, + from_=self.twilio_from_number) except TwilioRestException as e: raise EAException("Error posting to twilio: %s" % e) diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 087869076..bb3e6fb93 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -2,6 +2,7 @@ import base64 import datetime import json +import logging import subprocess from unittest import mock @@ -19,10 +20,12 @@ from elastalert.alerts import MsTeamsAlerter from elastalert.alerts import PagerDutyAlerter from elastalert.alerts import SlackAlerter +from elastalert.alerts import TwilioAlerter from elastalert.loaders import FileRulesLoader from elastalert.opsgenie import OpsGenieAlerter from elastalert.util import ts_add from elastalert.util import ts_now +from elastalert.util import EAException class mock_rule: @@ -2322,3 +2325,201 @@ def test_alert_subject_size_limit_with_args(ea): alert = Alerter(rule) alertSubject = alert.create_custom_title([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) assert 6 == len(alertSubject) + + +def test_twilio_getinfo(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'twilio_account_sid': 'xxxxx1', + 'twilio_auth_token': 'xxxxx2', + 'twilio_to_number': 'xxxxx3', + 'twilio_from_number': 'xxxxx4', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TwilioAlerter(rule) + + expected_data = { + 'type': 'twilio', + 'twilio_client_name': 'xxxxx4' + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('twilio_account_sid, twilio_auth_token, twilio_to_number, expected_data', [ + ('', '', '', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('xxxx1', '', '', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('', 'xxxx2', '', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('', '', 'INFO', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('xxxx1', 'xxxx2', '', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('xxxx1', '', 'INFO', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('', 'xxxx2', 'INFO', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('xxxx1', 'xxxx2', 'INFO', + { + 'type': 'twilio', + 'twilio_client_name': 'xxxxx4' + }), +]) +def test_twilio_required_error(twilio_account_sid, twilio_auth_token, twilio_to_number, expected_data): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'twilio_from_number': 'xxxxx4', + 'alert': [] + } + + if twilio_account_sid: + rule['twilio_account_sid'] = twilio_account_sid + + if twilio_auth_token: + rule['twilio_auth_token'] = twilio_auth_token + + if twilio_to_number: + rule['twilio_to_number'] = twilio_to_number + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TwilioAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except Exception as ea: + assert expected_data in str(ea) + + +@pytest.mark.parametrize('twilio_use_copilot, twilio_message_service_sid, twilio_from_number, expected_data', [ + (True, None, 'test', True), + (False, 'test', None, True), +]) +def test_twilio_use_copilot(twilio_use_copilot, twilio_message_service_sid, twilio_from_number, expected_data): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'twilio_account_sid': 'xxxxx1', + 'twilio_auth_token': 'xxxxx2', + 'twilio_to_number': 'xxxxx3', + 'alert': [] + } + + if twilio_use_copilot: + rule['twilio_use_copilot'] = twilio_use_copilot + + if twilio_message_service_sid: + rule['twilio_message_service_sid'] = twilio_message_service_sid + + if twilio_from_number: + rule['twilio_from_number'] = twilio_from_number + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TwilioAlerter(rule) + + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + alert.alert([match]) + except EAException: + assert expected_data + + +def test_twilio(caplog): + caplog.set_level(logging.INFO) + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'twilio_account_sid': 'xxxxx1', + 'twilio_auth_token': 'xxxxx2', + 'twilio_to_number': 'xxxxx3', + 'twilio_from_number': 'xxxxx4', + 'alert': [] + } + match = { + '@timestamp': '2021-01-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + + with mock.patch('elastalert.alerts.TwilioClient.messages') as mock_twilio: + mock_twilio.messages.create() + mock_twilio.return_value = 200 + alert = TwilioAlerter(rule) + alert.alert([match]) + expected = [ + mock.call.messages.create(), + mock.call.create(body='Test Rule', from_='xxxxx4', to='xxxxx3'), + ] + + assert mock_twilio.mock_calls == expected + assert ('elastalert', logging.INFO, 'Trigger sent to Twilio') == caplog.record_tuples[0] + + +def test_twilio_copilot(caplog): + caplog.set_level(logging.INFO) + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'twilio_account_sid': 'xxxxx1', + 'twilio_auth_token': 'xxxxx2', + 'twilio_to_number': 'xxxxx3', + 'twilio_message_service_sid': 'xxxxx5', + 'twilio_use_copilot': True, + 'alert': [] + } + match = { + '@timestamp': '2021-01-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + + with mock.patch('elastalert.alerts.TwilioClient.messages') as mock_twilio: + mock_twilio.messages.create() + mock_twilio.return_value = 200 + alert = TwilioAlerter(rule) + alert.alert([match]) + expected = [ + mock.call.messages.create(), + mock.call.create(body='Test Rule', messaging_service_sid='xxxxx5', to='xxxxx3'), + ] + + assert mock_twilio.mock_calls == expected + assert ('elastalert', logging.INFO, 'Trigger sent to Twilio') == caplog.record_tuples[0] + + +def test_twilio_rest_exception(): + with pytest.raises(EAException) as ea: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'twilio_account_sid': 'xxxxx1', + 'twilio_auth_token': 'xxxxx2', + 'twilio_to_number': 'xxxxx3', + 'twilio_from_number': 'xxxxx4', + 'alert': [] + } + match = { + '@timestamp': '2021-01-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TwilioAlerter(rule) + alert.alert([match]) + + assert 'Error posting to twilio: ' in str(ea) From 877640ac99f5cbc655ba547fa50f3a69d2308a67 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 3 Oct 2021 22:43:28 +0900 Subject: [PATCH 65/83] add discord alerter --- README.md | 34 ++-- docs/source/elastalert.rst | 34 ++-- docs/source/ruletypes.rst | 34 ++++ elastalert/alerts.py | 60 +++++++ elastalert/loaders.py | 1 + elastalert/schema.yaml | 10 ++ tests/alerts_test.py | 312 +++++++++++++++++++++++++++++++++++++ 7 files changed, 453 insertions(+), 32 deletions(-) diff --git a/README.md b/README.md index dcce2e95c..b7396f63a 100644 --- a/README.md +++ b/README.md @@ -39,28 +39,30 @@ Several rule types with common monitoring paradigms are included with ElastAlert Currently, we have built-in support for the following alert types: -- E-mail -- Jira -- OpsGenie +- Alerta +- Amazon Simple Notification Service (AWS SNS) - Command -- Microsoft Teams -- Slack -- Mattermost -- Telegram +- Debug +- Discord +- Email +- Exotel +- Gitter - Google Chat -- Amazon Simple Notification Service (AWS SNS) -- Splunk On-Call (Formerly VictorOps) +- HTTP POST +- Jira +- Line Notify +- Mattermost +- Microsoft Teams +- OpsGenie - PagerDuty - PagerTree -- Exotel -- Twilio -- Gitter - ServiceNow -- Debug +- Slack +- Splunk On-Call (Formerly VictorOps) - Stomp -- Alerta -- HTTP POST -- Line Notify +- Telegram +- TheHive +- Twilio - Zabbix Additional rule types and alerts can be easily imported or written. diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index fd13b595c..17b7b6ad6 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -31,28 +31,30 @@ Several rule types with common monitoring paradigms are included with ElastAlert Currently, we have support built in for these alert types: -- E-mail -- Jira -- OpsGenie +- Alerta +- Amazon Simple Notification Service (AWS SNS) - Command -- Microsoft Teams -- Slack -- Mattermost -- Telegram +- Debug +- Discord +- Email +- Exotel +- Gitter - Google Chat -- Amazon Simple Notification Service (AWS SNS) -- Splunk On-Call (Formerly VictorOps) +- HTTP POST +- Jira +- Line Notify +- Mattermost +- Microsoft Teams +- OpsGenie - PagerDuty - PagerTree -- Exotel -- Twilio -- Gitter - ServiceNow -- Debug +- Slack +- Splunk On-Call (Formerly VictorOps) - Stomp -- Alerta -- HTTP POST -- Line Notify +- Telegram +- TheHive +- Twilio - Zabbix Additional rule types and alerts can be easily imported or written. (See :ref:`Writing rule types ` and :ref:`Writing alerts `) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 3a683cd5a..7abaa8daa 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1450,6 +1450,40 @@ Example usage using new-style format:: - command command: ["/bin/send_alert", "--username", "{match[username]}"] +Discord +~~~~~~~ + +Discord will send notification to a Discord application. The body of the notification is formatted the same as with other alerters. + +Required: + +``discord_webhook_url``: The webhook URL. + +Optional: + +``discord_emoji_title``: By default ElastAlert 2 will use the ``:warning:`` emoji when posting to the channel. You can use a different emoji per ElastAlert 2 rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If slack_icon_url_override parameter is provided, emoji is ignored. + +``discord_proxy``: By default ElastAlert 2 will not use a network proxy to send notifications to Discord. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. + +``discord_proxy_login``: The Discord proxy auth username. + +``discord_proxy_password``: The Discord proxy auth username. + +``discord_embed_color``: embed color. By default ``0xffffff``. + +``discord_embed_footer``: embed footer. + +``discord_embed_icon_url``: You can provide icon_url to use custom image. Provide absolute address of the pciture. + +Example usage:: + + alert: + - "discord" + discord_webhook_url: "Your discord webhook url" + discord_emoji_title: ":lock:" + discord_embed_color: 0xE24D42 + discord_embed_footer: "Message sent by from your computer" + discord_embed_icon_url: "https://humancoders-formations.s3.amazonaws.com/uploads/course/logo/38/thumb_bigger_formation-elasticsearch.png" Email ~~~~~ diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 7e4418def..8c84213ee 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2038,3 +2038,63 @@ def get_info(self): 'type': 'hivealerter', 'hive_host': self.rule.get('hive_connection', {}).get('hive_host', '') } + + +class DiscordAlerter(Alerter): + """ Created a Discord for each alert """ + required_options = frozenset(['discord_webhook_url']) + + def __init__(self, rule): + super(DiscordAlerter, self).__init__(rule) + self.discord_webhook_url = self.rule.get('discord_webhook_url', None) + self.discord_emoji_title = self.rule.get('discord_emoji_title', ':warning:') + self.discord_proxy = self.rule.get('discord_proxy', None) + self.discord_proxy_login = self.rule.get('discord_proxy_login', None) + self.discord_proxy_password = self.rule.get('discord_proxy_password', None) + self.discord_embed_color = self.rule.get('discord_embed_color', 0xffffff) + self.discord_embed_footer = self.rule.get('discord_embed_footer', None) + self.discord_embed_icon_url = self.rule.get('discord_embed_icon_url', None) + + def alert(self, matches): + body = '' + title = u'%s' % (self.create_title(matches)) + for match in matches: + body += str(BasicMatchString(self.rule, match)) + if len(matches) > 1: + body += '\n----------------------------------------\n' + if len(body) > 2047: + body = body[0:1950] + '\n *message was cropped according to discord embed description limits!*' + + proxies = {'https': self.discord_proxy} if self.discord_proxy else None + auth = HTTPProxyAuth(self.discord_proxy_login, self.discord_proxy_password) if self.discord_proxy_login else None + headers = {"Content-Type": "application/json"} + + data = {} + data["content"] = "%s %s %s" % (self.discord_emoji_title, title, self.discord_emoji_title) + data["embeds"] = [] + embed = {} + embed["description"] = "%s" % (body) + embed["color"] = (self.discord_embed_color) + + if self.discord_embed_footer: + embed["footer"] = {} + embed["footer"]["text"] = (self.discord_embed_footer) if self.discord_embed_footer else None + embed["footer"]["icon_url"] = (self.discord_embed_icon_url) if self.discord_embed_icon_url else None + else: + None + + data["embeds"].append(embed) + + try: + response = requests.post(self.discord_webhook_url, data=json.dumps(data), headers=headers, proxies=proxies, auth=auth) + warnings.resetwarnings() + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to Discord: %s. Details: %s" % (e, "" if e.response is None else e.response.text)) + + elastalert_logger.info( + "Alert sent to the webhook %s" % self.discord_webhook_url) + + def get_info(self): + return {'type': 'discord', + 'discord_webhook_url': self.discord_webhook_url} diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 31b0e46aa..6ac18290c 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -61,6 +61,7 @@ class RulesLoader(object): 'opsgenie': OpsGenieAlerter, 'stomp': alerts.StompAlerter, 'debug': alerts.DebugAlerter, + 'discord': alerts.DiscordAlerter, 'command': alerts.CommandAlerter, 'sns': alerts.SnsAlerter, 'ms_teams': alerts.MsTeamsAlerter, diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 25f5f8459..a3479b7fa 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -239,6 +239,16 @@ properties: pipe_match_json: {type: boolean} fail_on_non_zero_exit: {type: boolean} + ## Discord + discord_webhook_url: {type: string} + discord_emoji_title: {type: string} + discord_proxy: {type: string} + discord_proxy_login: {type: string} + discord_proxy_password: {type: string} + discord_embed_color: {type: integer} + discord_embed_footer: {type: string} + discord_embed_icon_url: {type: string} + ### Email email: *arrayOfString email_reply_to: {type: string} diff --git a/tests/alerts_test.py b/tests/alerts_test.py index bb3e6fb93..410a9998a 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -5,6 +5,9 @@ import logging import subprocess +from requests import RequestException +from requests.auth import HTTPProxyAuth + from unittest import mock import pytest from jira.exceptions import JIRAError @@ -13,6 +16,7 @@ from elastalert.alerts import Alerter from elastalert.alerts import BasicMatchString from elastalert.alerts import CommandAlerter +from elastalert.alerts import DiscordAlerter from elastalert.alerts import EmailAlerter from elastalert.alerts import HTTPPostAlerter from elastalert.alerts import JiraAlerter @@ -2523,3 +2527,311 @@ def test_twilio_rest_exception(): alert.alert([match]) assert 'Error posting to twilio: ' in str(ea) + + +def test_discord(caplog): + caplog.set_level(logging.INFO) + rule = { + 'name': 'Test Discord Rule', + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'discord_embed_footer': 'footer', + 'discord_embed_icon_url': 'http://xxxx/image.png', + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'color': 0xffffff, + 'footer': { + 'text': 'footer', + 'icon_url': 'http://xxxx/image.png' + } + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + assert ('elastalert', logging.INFO, 'Alert sent to the webhook http://xxxxxxx') == caplog.record_tuples[0] + + +def test_discord_not_footer(): + rule = { + 'name': 'Test Discord Rule', + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'color': 0xffffff + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_discord_proxy(): + rule = { + 'name': 'Test Discord Rule', + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'discord_proxy': 'http://proxy.url', + 'discord_proxy_login': 'admin', + 'discord_proxy_password': 'password', + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'color': 0xffffff + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies={'https': 'http://proxy.url'}, + auth=HTTPProxyAuth('admin', 'password') + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_discord_description_maxlength(): + rule = { + 'name': 'Test Discord Rule' + ('a' * 2069), + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule' + ('a' * 1933) + + '\n *message was cropped according to discord embed description limits!*', + 'color': 0xffffff + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_discord_ea_exception(): + with pytest.raises(EAException) as ea: + rule = { + 'name': 'Test Discord Rule' + ('a' * 2069), + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + assert 'Error posting to Discord: . Details: ' in str(ea) + + +def test_discord_getinfo(): + rule = { + 'name': 'Test Discord Rule' + ('a' * 2069), + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + + expected_data = { + 'type': 'discord', + 'discord_webhook_url': 'http://xxxxxxx' + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('discord_webhook_url, expected_data', [ + ('', 'Missing required option(s): discord_webhook_url'), + ('http://xxxxxxx', + { + 'type': 'discord', + 'discord_webhook_url': 'http://xxxxxxx' + }), +]) +def test_discord_required_error(discord_webhook_url, expected_data): + try: + rule = { + 'name': 'Test Discord Rule' + ('a' * 2069), + 'type': 'any', + 'alert': [], + 'alert_subject': 'Test Discord' + } + + if discord_webhook_url: + rule['discord_webhook_url'] = discord_webhook_url + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except Exception as ea: + assert expected_data in str(ea) + + +def test_discord_matches(): + rule = { + 'name': 'Test Discord Rule', + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'discord_embed_footer': 'footer', + 'discord_embed_icon_url': 'http://xxxx/image.png', + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match, match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule\n' + + '\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'somefield: foobarbaz\n' + + '\n' + + '----------------------------------------\n' + + 'Test Discord Rule\n' + + '\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'somefield: foobarbaz\n' + + '\n' + + '----------------------------------------\n', + 'color': 0xffffff, + 'footer': { + 'text': 'footer', + 'icon_url': 'http://xxxx/image.png' + } + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data From abc5e1c5ed2fad8c777b0da50a0543ebf59f3342 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 3 Oct 2021 23:55:51 +0900 Subject: [PATCH 66/83] Add support for RocketChat --- README.md | 1 + docs/source/elastalert.rst | 1 + docs/source/ruletypes.rst | 79 ++- elastalert/alerts.py | 111 +++++ elastalert/loaders.py | 1 + elastalert/schema.yaml | 16 +- tests/alerts_test.py | 954 +++++++++++++++++++++++++++++++++++++ 7 files changed, 1159 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index b7396f63a..aa72f8c0b 100644 --- a/README.md +++ b/README.md @@ -54,6 +54,7 @@ Currently, we have built-in support for the following alert types: - Mattermost - Microsoft Teams - OpsGenie +- Rocket.Chat - PagerDuty - PagerTree - ServiceNow diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 17b7b6ad6..82b023ed4 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -48,6 +48,7 @@ Currently, we have support built in for these alert types: - OpsGenie - PagerDuty - PagerTree +- Rocket.Chat - ServiceNow - Slack - Splunk On-Call (Formerly VictorOps) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 7abaa8daa..bfb7647dc 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1461,9 +1461,9 @@ Required: Optional: -``discord_emoji_title``: By default ElastAlert 2 will use the ``:warning:`` emoji when posting to the channel. You can use a different emoji per ElastAlert 2 rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If slack_icon_url_override parameter is provided, emoji is ignored. +``discord_emoji_title``: By default ElastAlert will use the ``:warning:`` emoji when posting to the channel. You can use a different emoji per ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If slack_icon_url_override parameter is provided, emoji is ignored. -``discord_proxy``: By default ElastAlert 2 will not use a network proxy to send notifications to Discord. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. +``discord_proxy``: By default ElastAlert will not use a network proxy to send notifications to Discord. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. ``discord_proxy_login``: The Discord proxy auth username. @@ -1733,6 +1733,79 @@ Example usage:: aws_access_key_id: 'XXXXXXXXXXXXXXXXXX'' aws_secret_access_key: 'YYYYYYYYYYYYYYYYYYYY' +Rocket.Chat +~~~~~~~~~~~ + +Rocket.Chat alerter will send a notification to a predefined channel. The body of the notification is formatted the same as with other alerters. +https://developer.rocket.chat/api/rest-api/methods/chat/postmessage + +The alerter requires the following option: + +``rocket_chat_webhook_url``: The webhook URL that includes your auth data and the ID of the channel (room) you want to post to. You can use a list of URLs to send to multiple channels. + +Optional: + +``rocket_chat_username_override``: By default Rocket.Chat will use username defined in Integration when posting to the channel. Use this option to change it (free text). + +``rocket_chat_channel_override``: Incoming webhooks have a default channel, but it can be overridden. A public channel can be specified “#other-channel”, and a Direct Message with “@username”. + +``rocket_chat_emoji_override``: By default ElastAlert will use the :ghost: emoji when posting to the channel. You can use a different emoji per +ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . + +``rocket_chat_msg_color``: By default the alert will be posted with the ‘danger’ color. You can also use ‘good’ or ‘warning’ colors. + +``rocket_chat_text_string``: Notification message you want to add. + +``rocket_chat_proxy``: By default ElastAlert will not use a network proxy to send notifications to Rocket.Chat. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. + +``rocket_chat_ca_certs``: Set this option to ``True`` if you want to validate the SSL certificate. + +``rocket_chat_ignore_ssl_errors``: By default ElastAlert will verify SSL certificate. Set this option to ``False`` if you want to ignore SSL errors. + +``rocket_chat_timeout``: You can specify a timeout value, in seconds, for making communicating with Rocket.Chat. The default is 10. If a timeout occurs, the alert will be retried next time ElastAlert cycles. + +``rocket_chat_attach_kibana_discover_url``: Enables the attachment of the ``kibana_discover_url`` to the Rocket.Chat notification. The config ``generate_kibana_discover_url`` must also be ``True`` in order to generate the url. Defaults to ``False``. + +``rocket_chat_kibana_discover_color``: The color of the Kibana Discover url attachment. Defaults to ``#ec4b98``. + +``rocket_chat_kibana_discover_title``: The title of the Kibana Discover url attachment. Defaults to ``Discover in Kibana``. + +Example rocket_chat_attach_kibana_discover_url, rocket_chat_kibana_discover_color, rocket_chat_kibana_discover_title:: + + # (Required) + generate_kibana_discover_url: True + kibana_discover_app_url: "http://localhost:5601/app/discover#/" + kibana_discover_index_pattern_id: "4babf380-c3b1-11eb-b616-1b59c2feec54" + kibana_discover_version: "7.15" + + # (Optional) + kibana_discover_from_timedelta: + minutes: 10 + kibana_discover_to_timedelta: + minutes: 10 + + # (Required) + rocket_chat_attach_kibana_discover_url: True + + # (Optional) + rocket_chat_kibana_discover_color: "#ec4b98" + rocket_chat_kibana_discover_title: "Discover in Kibana" + +``rocket_chat_alert_fields``: You can add additional fields to your Rocket.Chat alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. + +Example rocket_chat_alert_fields:: + + rocket_chat_alert_fields: + - title: Host + value: monitor.host + short: true + - title: Status + value: monitor.status + short: true + - title: Zone + value: beat.name + short: true + Squadcast ~~~~~~~~~ @@ -2229,7 +2302,7 @@ Optional: ``http_post_ca_certs``: Set this option to ``True`` if you want to validate the SSL certificate. -``http_post_ignore_ssl_errors``: By default ElastAlert 2 will verify SSL certificate. Set this option to ``False`` if you want to ignore SSL errors. +``http_post_ignore_ssl_errors``: By default ElastAlert will verify SSL certificate. Set this option to ``False`` if you want to ignore SSL errors. Example usage:: diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 8c84213ee..8d1d2db6c 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2098,3 +2098,114 @@ def alert(self, matches): def get_info(self): return {'type': 'discord', 'discord_webhook_url': self.discord_webhook_url} + + +class RocketChatAlerter(Alerter): + """ Creates a RocketChat notification for each alert """ + required_options = set(['rocket_chat_webhook_url']) + + def __init__(self, rule): + super(RocketChatAlerter, self).__init__(rule) + self.rocket_chat_webhook_url = self.rule.get('rocket_chat_webhook_url', None) + if isinstance(self.rocket_chat_webhook_url, str): + self.rocket_chat_webhook_url = [self.rocket_chat_webhook_url] + self.rocket_chat_proxy = self.rule.get('rocket_chat_proxy', None) + + self.rocket_chat_username_override = self.rule.get('rocket_chat_username_override', 'elastalert') + self.rocket_chat_channel_override = self.rule.get('rocket_chat_channel_override', '') + if isinstance(self.rocket_chat_channel_override, str): + self.rocket_chat_channel_override = [self.rocket_chat_channel_override] + self.rocket_chat_emoji_override = self.rule.get('rocket_chat_emoji_override', ':ghost:') + self.rocket_chat_msg_color = self.rule.get('rocket_chat_msg_color', 'danger') + self.rocket_chat_text_string = self.rule.get('rocket_chat_text_string', '') + self.rocket_chat_alert_fields = self.rule.get('rocket_chat_alert_fields', '') + self.rocket_chat_attach_kibana_discover_url = self.rule.get('rocket_chat_attach_kibana_discover_url', False) + self.rocket_chat_kibana_discover_color = self.rule.get('rocket_chat_kibana_discover_color', '#ec4b98') + self.rocket_chat_kibana_discover_title = self.rule.get('rocket_chat_kibana_discover_title', 'Discover in Kibana') + self.rocket_chat_ignore_ssl_errors = self.rule.get('rocket_chat_ignore_ssl_errors', False) + self.rocket_chat_timeout = self.rule.get('rocket_chat_timeout', 10) + self.rocket_chat_ca_certs = self.rule.get('rocket_chat_ca_certs') + + def format_body(self, body): + return body + + def get_aggregation_summary_text__maximum_width(self): + width = super(RocketChatAlerter, self).get_aggregation_summary_text__maximum_width() + + # Reduced maximum width for prettier Slack display. + return min(width, 75) + + def get_aggregation_summary_text(self, matches): + text = super(RocketChatAlerter, self).get_aggregation_summary_text(matches) + if text: + text = '```\n{0}```\n'.format(text) + return text + + def populate_fields(self, matches): + alert_fields = [] + for arg in self.rocket_chat_alert_fields: + arg = copy.copy(arg) + arg['value'] = lookup_es_key(matches[0], arg['value']) + alert_fields.append(arg) + return alert_fields + + def alert(self, matches): + body = self.create_alert_body(matches) + body = self.format_body(body) + headers = {'content-type': 'application/json'} + proxies = {'https': self.rocket_chat_proxy} if self.rocket_chat_proxy else None + payload = { + 'username': self.rocket_chat_username_override, + 'text': self.rocket_chat_text_string, + 'attachments': [ + { + 'color': self.rocket_chat_msg_color, + 'title': self.create_title(matches), + 'text': body, + 'fields': [] + } + ] + } + + # if we have defined fields, populate noteable fields for the alert + if self.rocket_chat_alert_fields != '': + payload['attachments'][0]['fields'] = self.populate_fields(matches) + + if self.rocket_chat_emoji_override != '': + payload['emoji'] = self.rocket_chat_emoji_override + + if self.rocket_chat_attach_kibana_discover_url: + kibana_discover_url = lookup_es_key(matches[0], 'kibana_discover_url') + if kibana_discover_url: + payload['attachments'].append({ + 'color': self.rocket_chat_kibana_discover_color, + 'title': self.rocket_chat_kibana_discover_title, + 'title_link': kibana_discover_url + }) + + for url in self.rocket_chat_webhook_url: + for channel_override in self.rocket_chat_channel_override: + try: + if self.rocket_chat_ca_certs: + verify = self.rocket_chat_ca_certs + else: + verify = not self.rocket_chat_ignore_ssl_errors + if self.rocket_chat_ignore_ssl_errors: + requests.packages.urllib3.disable_warnings() + payload['channel'] = channel_override + response = requests.post( + url, data=json.dumps(payload, cls=DateTimeEncoder), + headers=headers, + verify=verify, + proxies=proxies, + timeout=self.rocket_chat_timeout) + warnings.resetwarnings() + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to Rocket.Chat: %s" % e) + elastalert_logger.info("Alert sent to Rocket.Chat") + + def get_info(self): + return {'type': 'rocketchat', + 'rocket_chat_username_override': self.rocket_chat_username_override, + 'rocket_chat_webhook_url': self.rocket_chat_webhook_url} diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 6ac18290c..0c140e2f5 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -80,6 +80,7 @@ class RulesLoader(object): 'post': alerts.HTTPPostAlerter, 'linenotify': alerts.LineNotifyAlerter, 'hivealerter': alerts.HiveAlerter, + 'rocketchat': alerts.RocketChatAlerter, 'zabbix': ZabbixAlerter } diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index a3479b7fa..fef0f9abe 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -317,9 +317,23 @@ properties: pagerduty_client_name: {type: string} pagerduty_event_type: {enum: [none, trigger, resolve, acknowledge]} -### PagerTree + ### PagerTree pagertree_integration_url: {type: string} + ### RocketChat + rocket_chat_webhook_url: *arrayOfString + rocket_chat_username_override: {type: string} + rocket_chat_channel_override: *arrayOfString + rocket_chat_emoji_override: {type: string} + rocket_chat_msg_color: {enum: [good, warning, danger]} + rocket_chat_text_string: {type: string} + rocket_chat_proxy: {type: string} + rocket_chat_attach_kibana_discover_url {type: boolean} + rocket_chat_kibana_discover_color {type: string} + rocket_chat_kibana_discover_title {type: string} + rocket_chat_ca_certs: {type: boolean} + rocket_chat_ignore_ssl_errors: {type: boolean} + rocket_chat_timeout: {type: integer} ### Exotel exotel_account_sid: {type: string} diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 410a9998a..b07a01b3c 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -23,6 +23,7 @@ from elastalert.alerts import JiraFormattedMatchString from elastalert.alerts import MsTeamsAlerter from elastalert.alerts import PagerDutyAlerter +from elastalert.alerts import RocketChatAlerter from elastalert.alerts import SlackAlerter from elastalert.alerts import TwilioAlerter from elastalert.loaders import FileRulesLoader @@ -2835,3 +2836,956 @@ def test_discord_matches(): actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data + + +def test_rocketchat_uses_custom_title(caplog): + caplog.set_level(logging.INFO) + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + assert ('elastalert', logging.INFO, 'Alert sent to Rocket.Chat') == caplog.record_tuples[0] + + +def test_rocketchat_uses_rule_name_when_custom_title_is_not_provided(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_username_override(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'test elastalert', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'test elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_chat_channel(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'], + 'rocket_chat_channel_override': '#test-alert', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '#test-alert', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_uses_list_of_custom_rocket_chat_channel(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'], + 'rocket_chat_channel_override': ['#test-alert', '#test-alert2'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data1 = { + 'username': 'elastalert', + 'channel': '#test-alert', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + expected_data2 = { + 'username': 'elastalert', + 'channel': '#test-alert2', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_with( + rule['rocket_chat_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data1 == json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data2 == json.loads(mock_post_request.call_args_list[1][1]['data']) + + +def test_rocketchat_emoji_override(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'], + 'rocket_chat_emoji_override': ':shushing_face:', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':shushing_face:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_emoji_override_blank(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'], + 'rocket_chat_emoji_override': '', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +@pytest.mark.parametrize('msg_color, except_msg_color', [ + ('', 'danger'), + ('danger', 'danger'), + ('good', 'good'), + ('warning', 'warning') +]) +def test_rocketchat_msg_color(msg_color, except_msg_color): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'elastalert', + 'alert_subject': 'Cool subject', + 'alert': [] + } + + if msg_color: + rule['rocket_chat_msg_color'] = msg_color + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': except_msg_color, + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_text_string(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'elastalert', + 'rocket_chat_text_string': 'text str', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': 'text str' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_proxy(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_proxy': 'http://proxy.url', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': rule['rocket_chat_proxy']}, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_alert_fields(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'elastalert', + 'rocket_chat_alert_fields': [ + { + 'title': 'Host', + 'value': 'somefield', + 'short': 'true' + }, + { + 'title': 'Sensors', + 'value': '@timestamp', + 'short': 'true' + } + ], + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': + [ + { + 'short': 'true', + 'title': 'Host', + 'value': 'foobarbaz' + }, + { + 'short': 'true', + 'title': 'Sensors', + 'value': '2021-01-01T00:00:00' + } + ], + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_msg_color_required_error(): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_msg_color': 'abc', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post'): + alert.alert([match]) + except KeyError: + assert True + + +def test_rocketchat_ea_exception(): + with pytest.raises(EAException) as ea: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'elastalert', + 'rocket_chat_msg_pretext': 'pretext value', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + assert 'Error posting to Rocket.Chat: ' in str(ea) + + +def test_rocketchat_get_aggregation_summary_text__maximum_width(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'elastalert', + 'rocket_chat_msg_pretext': 'pretext value', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + assert 75 == alert.get_aggregation_summary_text__maximum_width() + + +def test_rocketchat_getinfo(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + + expected_data = { + 'type': 'rocketchat', + 'rocket_chat_username_override': 'elastalert', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'] + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('rocket_chat_webhook_url, expected_data', [ + ('', 'Missing required option(s): rocket_chat_webhook_url'), + ('http://please.dontgohere.rocketchat', + { + 'type': 'rocketchat', + 'rocket_chat_username_override': 'elastalert', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'] + }) +]) +def test_rocketchat_required_error(rocket_chat_webhook_url, expected_data): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'alert': [] + } + + if rocket_chat_webhook_url: + rule['rocket_chat_webhook_url'] = rocket_chat_webhook_url + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except Exception as ea: + assert expected_data in str(ea) + + +def test_rocketchat_attach_kibana_discover_url_when_generated(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'alert': [], + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_attach_kibana_discover_url': True + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'kibana_discover_url': 'http://localhost:5601/app/discover#/' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Cool subject', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + }, + { + 'color': '#ec4b98', + 'title': 'Discover in Kibana', + 'title_link': 'http://localhost:5601/app/discover#/' + } + ], + 'text': '' + } + + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_attach_kibana_discover_url_when_not_generated(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'alert': [], + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_attach_kibana_discover_url': True + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + 'somefield': 'foobarbaz', + '@timestamp': '2021-01-01T00:00:00' + } + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Cool subject', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_kibana_discover_title(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'alert': [], + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_attach_kibana_discover_url': True, + 'rocket_chat_kibana_discover_title': 'Click to discover in Kibana' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + 'somefield': 'foobarbaz', + '@timestamp': '2021-01-01T00:00:00', + 'kibana_discover_url': 'http://localhost:5601/app/discover#/' + } + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Cool subject', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + }, + { + 'color': '#ec4b98', + 'title': 'Click to discover in Kibana', + 'title_link': 'http://localhost:5601/app/discover#/' + } + ], + 'text': '' + } + + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_kibana_discover_color(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'alert': [], + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocket_chat', + 'rocket_chat_attach_kibana_discover_url': True, + 'rocket_chat_kibana_discover_color': 'blue' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + 'somefield': 'foobarbaz', + '@timestamp': '2021-01-01T00:00:00', + 'kibana_discover_url': 'http://localhost:5601/app/discover#/' + } + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + }, + { + 'color': 'blue', + 'title': 'Discover in Kibana', + 'title_link': 'http://localhost:5601/app/discover#/' + } + ], + 'text': '' + } + + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +@pytest.mark.parametrize('ca_certs, ignore_ssl_errors, excpet_verify', [ + ('', '', True), + ('', True, False), + ('', False, True), + (True, '', True), + (True, True, True), + (True, False, True), + (False, '', True), + (False, True, False), + (False, False, True) +]) +def test_rocketchat_ca_certs(ca_certs, ignore_ssl_errors, excpet_verify): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'alert_subject': 'Cool subject', + 'alert': [] + } + if ca_certs: + rule['rocket_chat_ca_certs'] = ca_certs + + if ignore_ssl_errors: + rule['rocket_chat_ignore_ssl_errors'] = ignore_ssl_errors + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Cool subject', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=excpet_verify, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_uses_custom_timeout(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'alert_subject': 'Cool subject', + 'alert': [], + 'rocket_chat_timeout': 20 + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Cool subject', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=20 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) From cd27d98aa6f9ece86d3767388588238750614322 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Mon, 4 Oct 2021 00:12:36 +0900 Subject: [PATCH 67/83] Add alert handler to create Datadog Events --- README.md | 1 + docs/source/elastalert.rst | 1 + docs/source/ruletypes.rst | 23 ++++++++ elastalert/alerts.py | 31 ++++++++++ elastalert/loaders.py | 1 + elastalert/schema.yaml | 4 ++ tests/alerts_test.py | 114 +++++++++++++++++++++++++++++++++++++ 7 files changed, 175 insertions(+) diff --git a/README.md b/README.md index aa72f8c0b..920fe446d 100644 --- a/README.md +++ b/README.md @@ -42,6 +42,7 @@ Currently, we have built-in support for the following alert types: - Alerta - Amazon Simple Notification Service (AWS SNS) - Command +- Datadog - Debug - Discord - Email diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 82b023ed4..9167abe94 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -34,6 +34,7 @@ Currently, we have support built in for these alert types: - Alerta - Amazon Simple Notification Service (AWS SNS) - Command +- Datadog - Debug - Discord - Email diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index bfb7647dc..1692c3152 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1450,6 +1450,29 @@ Example usage using new-style format:: - command command: ["/bin/send_alert", "--username", "{match[username]}"] +Datadog +~~~~~~~ + +This alert will create a `Datadog Event`_. Events are limited to 4000 characters. If an event is sent that contains +a message that is longer than 4000 characters, only his first 4000 characters will be displayed. + +This alert requires two additional options: + +``datadog_api_key``: `Datadog API key`_ + +``datadog_app_key``: `Datadog application key`_ + +Example usage:: + + alert: + - "datadog" + datadog_api_key: "Datadog API Key" + datadog_app_key: "Datadog APP Key" + +.. _`Datadog Event`: https://docs.datadoghq.com/events/ +.. _`Datadog API key`: https://docs.datadoghq.com/account_management/api-app-keys/#api-keys +.. _`Datadog application key`: https://docs.datadoghq.com/account_management/api-app-keys/#application-keys + Discord ~~~~~~~ diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 8d1d2db6c..35f173759 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2209,3 +2209,34 @@ def get_info(self): return {'type': 'rocketchat', 'rocket_chat_username_override': self.rocket_chat_username_override, 'rocket_chat_webhook_url': self.rocket_chat_webhook_url} + + +class DatadogAlerter(Alerter): + """ Creates a Datadog Event for each alert """ + required_options = frozenset(['datadog_api_key', 'datadog_app_key']) + + def __init__(self, rule): + super(DatadogAlerter, self).__init__(rule) + self.dd_api_key = self.rule.get('datadog_api_key', None) + self.dd_app_key = self.rule.get('datadog_app_key', None) + + def alert(self, matches): + url = 'https://api.datadoghq.com/api/v1/events' + headers = { + 'Content-Type': 'application/json', + 'DD-API-KEY': self.dd_api_key, + 'DD-APPLICATION-KEY': self.dd_app_key + } + payload = { + 'title': self.create_title(matches), + 'text': self.create_alert_body(matches) + } + try: + response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers) + response.raise_for_status() + except RequestException as e: + raise EAException('Error posting event to Datadog: %s' % e) + elastalert_logger.info('Alert sent to Datadog') + + def get_info(self): + return {'type': 'datadog'} diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 0c140e2f5..a93cd1ddb 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -61,6 +61,7 @@ class RulesLoader(object): 'opsgenie': OpsGenieAlerter, 'stomp': alerts.StompAlerter, 'debug': alerts.DebugAlerter, + 'datadog': alerts.DatadogAlerter, 'discord': alerts.DiscordAlerter, 'command': alerts.CommandAlerter, 'sns': alerts.SnsAlerter, diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index fef0f9abe..ea7502ee6 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -239,6 +239,10 @@ properties: pipe_match_json: {type: boolean} fail_on_non_zero_exit: {type: boolean} + ### Datadog + datadog_api_key: {type: string} + datadog_app_key: {type: string} + ## Discord discord_webhook_url: {type: string} discord_emoji_title: {type: string} diff --git a/tests/alerts_test.py b/tests/alerts_test.py index b07a01b3c..ceda77938 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -16,6 +16,7 @@ from elastalert.alerts import Alerter from elastalert.alerts import BasicMatchString from elastalert.alerts import CommandAlerter +from elastalert.alerts import DatadogAlerter from elastalert.alerts import DiscordAlerter from elastalert.alerts import EmailAlerter from elastalert.alerts import HTTPPostAlerter @@ -3789,3 +3790,116 @@ def test_rocketchat_uses_custom_timeout(): timeout=20 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_datadog_alerter(caplog): + caplog.set_level(logging.INFO) + rule = { + 'name': 'Test Datadog Event Alerter', + 'type': 'any', + 'datadog_api_key': 'test-api-key', + 'datadog_app_key': 'test-app-key', + 'alert': [], + 'alert_subject': 'Test Datadog Event Alert' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DatadogAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'name': 'datadog-test-name' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'title': rule['alert_subject'], + 'text': "Test Datadog Event Alerter\n\n@timestamp: 2021-01-01T00:00:00\nname: datadog-test-name\n" + } + mock_post_request.assert_called_once_with( + "https://api.datadoghq.com/api/v1/events", + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'DD-API-KEY': rule['datadog_api_key'], + 'DD-APPLICATION-KEY': rule['datadog_app_key'] + } + ) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + assert ('elastalert', logging.INFO, 'Alert sent to Datadog') == caplog.record_tuples[0] + + +def test_datadog_ea_exception(): + with pytest.raises(EAException) as ea: + rule = { + 'name': 'Test Datadog Event Alerter', + 'type': 'any', + 'datadog_api_key': 'test-api-key', + 'datadog_app_key': 'test-app-key', + 'alert': [], + 'alert_subject': 'Test Datadog Event Alert' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DatadogAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'name': 'datadog-test-name' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + assert 'Error posting event to Datadog:' in str(ea) + + +def test_datadog_getinfo(): + rule = { + 'name': 'Test Datadog Event Alerter', + 'type': 'any', + 'datadog_api_key': 'test-api-key', + 'datadog_app_key': 'test-app-key', + 'alert': [], + 'alert_subject': 'Test Datadog Event Alert' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DatadogAlerter(rule) + + expected_data = {'type': 'datadog'} + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('datadog_api_key, datadog_app_key, expected_data', [ + ('', '', 'Missing required option(s): datadog_api_key, datadog_app_key'), + ('xxxx1', '', 'Missing required option(s): datadog_api_key, datadog_app_key'), + ('', 'xxxx2', 'Missing required option(s): datadog_api_key, datadog_app_key'), + ('xxxx1', 'xxxx2', + { + 'type': 'datadog' + }), +]) +def test_datadog_required_error(datadog_api_key, datadog_app_key, expected_data): + try: + rule = { + 'name': 'Test Datadog Event Alerter', + 'type': 'any', + 'alert': [], + 'alert_subject': 'Test Datadog Event Alert' + } + + if datadog_api_key: + rule['datadog_api_key'] = datadog_api_key + + if datadog_app_key: + rule['datadog_app_key'] = datadog_app_key + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DatadogAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except Exception as ea: + assert expected_data in str(ea) From c80561921d9d3cc3c8f075131a016f9f237b2265 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Mon, 25 Oct 2021 20:42:09 +0900 Subject: [PATCH 68/83] Added Support Alertmanager --- README.md | 1 + docs/source/elastalert.rst | 1 + docs/source/ruletypes.rst | 49 ++++++ elastalert/alerts.py | 75 +++++++++ elastalert/loaders.py | 1 + elastalert/schema.yaml | 45 ++++++ setup.py | 3 + tests/alerts_test.py | 314 +++++++++++++++++++++++++++++++++++++ 8 files changed, 489 insertions(+) diff --git a/README.md b/README.md index 920fe446d..71495a544 100644 --- a/README.md +++ b/README.md @@ -40,6 +40,7 @@ Several rule types with common monitoring paradigms are included with ElastAlert Currently, we have built-in support for the following alert types: - Alerta +- Alertmanager - Amazon Simple Notification Service (AWS SNS) - Command - Datadog diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 9167abe94..cbe469fc7 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -32,6 +32,7 @@ Several rule types with common monitoring paradigms are included with ElastAlert Currently, we have support built in for these alert types: - Alerta +- Alertmanager - Amazon Simple Notification Service (AWS SNS) - Command - Datadog diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 1692c3152..586eae543 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1407,6 +1407,55 @@ come from an individual event, usually the one which triggers the alert. When using ``alert_text_args``, you can access nested fields and index into arrays. For example, if your match was ``{"data": {"ips": ["127.0.0.1", "12.34.56.78"]}}``, then by using ``"data.ips[1]"`` in ``alert_text_args``, it would replace value with ``"12.34.56.78"``. This can go arbitrarily deep into fields and will still work on keys that contain dots themselves. +Alertmanager +~~~~~~~~~~~~ + +This alert type will send alerts to Alertmanager postAlerts. ``alert_subject`` and ``alert_text`` are passed as the annotations labeled ``summary`` and ``description`` accordingly. The labels can be changed. +See https://prometheus.io/docs/alerting/clients/ for more details about the Alertmanager alert format. + +Required: + +``alertmanager_hosts``: The list of hosts pointing to the Alertmanager. + +Optional: + +``alertmanager_api_version``: Defaults to `v1`. Set to `v2` to enable the Alertmanager V2 API postAlerts. + +``alertmanager_alertname``: ``alertname`` is the only required label. Defaults to using the rule name of the alert. + +``alertmanager_labels``: Key:value pairs of arbitrary labels to be attached to every alert. Keys should match the regular expression ``^[a-zA-Z_][a-zA-Z0-9_]*$``. + +``alertmanager_annotations``: Key:value pairs of arbitrary annotations to be attached to every alert. Keys should match the regular expression ``^[a-zA-Z_][a-zA-Z0-9_]*$``. + +``alertmanager_fields``: Key:value pairs of labels and corresponding match fields. When using ``alertmanager_fields`` you can access nested fields and index into arrays the same way as with ``alert_text_args``. Keys should match the regular expression ``^[a-zA-Z_][a-zA-Z0-9_]*$``. This dictionary will be merged with the ``alertmanager_labels``. + +``alertmanager_alert_subject_labelname``: Rename the annotations' label name for ``alert_subject``. Default is ``summary``. + +``alertmanager_alert_text_labelname``: Rename the annotations' label name for ``alert_text``. Default is ``description``. + +``alertmanager_proxy``: By default ElastAlert 2 will not use a network proxy to send notifications to Alertmanager. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. + +``alertmanager_ca_certs``: Set this option to ``True`` if you want to validate the SSL certificate. + +``alertmanager_ignore_ssl_errors``: By default ElastAlert 2 will verify SSL certificate. Set this option to ``False`` if you want to ignore SSL errors. + +``alertmanager_timeout``: You can specify a timeout value, in seconds, for making communicating with Alertmanager. The default is 10. If a timeout occurs, the alert will be retried next time ElastAlert 2 cycles. + +Example usage:: + + alert: + - "alertmanager" + alertmanager_hosts: + - "http://alertmanager:9093" + alertmanager_alertname: "Title" + alertmanager_annotations: + severity: "error" + alertmanager_labels: + source: "elastalert" + alertmanager_fields: + msg: "message" + log: "@log_name" + Command ~~~~~~~ diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 35f173759..591ada849 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2240,3 +2240,78 @@ def alert(self, matches): def get_info(self): return {'type': 'datadog'} + + +class AlertmanagerAlerter(Alerter): + """ Sends an alert to Alertmanager """ + + required_options = frozenset({'alertmanager_hosts'}) + + def __init__(self, rule): + super(AlertmanagerAlerter, self).__init__(rule) + self.api_version = self.rule.get('alertmanager_api_version', 'v1') + self.hosts = self.rule.get('alertmanager_hosts') + self.alertname = self.rule.get('alertmanager_alertname', self.rule.get('name')) + self.labels = self.rule.get('alertmanager_labels', dict()) + self.annotations = self.rule.get('alertmanager_annotations', dict()) + self.fields = self.rule.get('alertmanager_fields', dict()) + self.title_labelname = self.rule.get('alertmanager_alert_subject_labelname', 'summary') + self.body_labelname = self.rule.get('alertmanager_alert_text_labelname', 'description') + self.proxies = self.rule.get('alertmanager_proxy', None) + self.ca_certs = self.rule.get('alertmanager_ca_certs') + self.ignore_ssl_errors = self.rule.get('alertmanager_ignore_ssl_errors', False) + self.timeout = self.rule.get('alertmanager_timeout', 10) + + @staticmethod + def _json_or_string(obj): + """helper to encode non-string objects to JSON""" + if isinstance(obj, str): + return obj + return json.dumps(obj, cls=DateTimeEncoder) + + def alert(self, matches): + headers = {'content-type': 'application/json'} + proxies = {'https': self.proxies} if self.proxies else None + + self.labels.update({ + label: self._json_or_string(lookup_es_key(matches[0], term)) + for label, term in self.fields.items()}) + self.labels.update( + alertname=self.alertname, + elastalert_rule=self.rule.get('name')) + self.annotations.update({ + self.title_labelname: self.create_title(matches), + self.body_labelname: self.create_alert_body(matches)}) + payload = { + 'annotations': self.annotations, + 'labels': self.labels + } + + for host in self.hosts: + try: + url = '{}/api/{}/alerts'.format(host, self.api_version) + + if self.ca_certs: + verify = self.ca_certs + else: + verify = not self.ignore_ssl_errors + if self.ignore_ssl_errors: + requests.packages.urllib3.disable_warnings() + + response = requests.post( + url, + data=json.dumps([payload], cls=DateTimeEncoder), + headers=headers, + verify=verify, + proxies=proxies, + timeout=self.timeout + ) + + warnings.resetwarnings() + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to Alertmanager: %s" % e) + elastalert_logger.info("Alert sent to Alertmanager") + + def get_info(self): + return {'type': 'alertmanager'} diff --git a/elastalert/loaders.py b/elastalert/loaders.py index a93cd1ddb..b092f9f1f 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -78,6 +78,7 @@ class RulesLoader(object): 'gitter': alerts.GitterAlerter, 'servicenow': alerts.ServiceNowAlerter, 'alerta': alerts.AlertaAlerter, + 'alertmanager': alerts.AlertmanagerAlerter, 'post': alerts.HTTPPostAlerter, 'linenotify': alerts.LineNotifyAlerter, 'hivealerter': alerts.HiveAlerter, diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index ea7502ee6..f7d4f372c 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -387,6 +387,51 @@ properties: alerta_attributes_keys: {type: array, items: {type: string}} alerta_attributes_values: {type: array, items: {type: string}} # Python format string + ### Alertmanager + alertmanager_hosts: {type: array, items: {type: string}} + alertmanager_api_version: {type: string, enum: ['v1', 'v2']} + alertmanager_alert_subject_labelname: {type: string} + alertmanager_alert_text_labelname: {type: string} + alertmanager_proxy: {type: string} + alertmanager_ca_certs: {type: boolean} + alertmanager_ignore_ssl_errors: {type: boolean} + alertmanager_timeout: {type: integer} + alertmanager_labels: + type: object + minProperties: 1 + patternProperties: + "^.+$": + oneOf: + - type: string + - type: object + additionalProperties: false + required: [field] + properties: + field: {type: string, minLength: 1} + alertmanager_annotations: + type: object + minProperties: 1 + patternProperties: + "^.+$": + oneOf: + - type: string + - type: object + additionalProperties: false + required: [field] + properties: + field: {type: string, minLength: 1} + alertmanager_fields: + type: object + minProperties: 1 + patternProperties: + "^.+$": + oneOf: + - type: string + - type: object + additionalProperties: false + required: [field] + properties: + field: {type: string, minLength: 1} ### Simple simple_webhook_url: *arrayOfString diff --git a/setup.py b/setup.py index 20ba4a9f1..cc4bb98e3 100644 --- a/setup.py +++ b/setup.py @@ -16,6 +16,9 @@ license='Copyright 2014 Yelp', classifiers=[ 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', ], diff --git a/tests/alerts_test.py b/tests/alerts_test.py index ceda77938..01ac79701 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -13,6 +13,7 @@ from jira.exceptions import JIRAError from elastalert.alerts import AlertaAlerter +from elastalert.alerts import AlertmanagerAlerter from elastalert.alerts import Alerter from elastalert.alerts import BasicMatchString from elastalert.alerts import CommandAlerter @@ -3903,3 +3904,316 @@ def test_datadog_required_error(datadog_api_key, datadog_app_key, expected_data) assert expected_data == actual_data except Exception as ea: assert expected_data in str(ea) + + +def test_alertmanager(caplog): + caplog.set_level(logging.INFO) + rule = { + 'name': 'Test Alertmanager Rule', + 'type': 'any', + 'alertmanager_hosts': ['http://alertmanager:9093'], + 'alertmanager_alertname': 'Title', + 'alertmanager_annotations': {'severity': 'error'}, + 'alertmanager_labels': {'source': 'elastalert'}, + 'alertmanager_fields': {'msg': 'message', 'log': '@log_name'}, + 'alert_subject_args': ['message', '@log_name'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertmanagerAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'message': 'Quit 123', + '@log_name': 'mysqld.general' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = [ + { + 'annotations': + { + 'severity': 'error', + 'summary': 'Test Alertmanager Rule', + 'description': 'Test Alertmanager Rule\n\n' + + '@log_name: mysqld.general\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'message: Quit 123\nsomefield: foobarbaz\n' + }, + 'labels': { + 'source': 'elastalert', + 'msg': 'Quit 123', + 'log': 'mysqld.general', + 'alertname': 'Title', + 'elastalert_rule': 'Test Alertmanager Rule' + } + } + ] + + mock_post_request.assert_called_once_with( + 'http://alertmanager:9093/api/v1/alerts', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + assert ('elastalert', logging.INFO, "Alert sent to Alertmanager") == caplog.record_tuples[0] + + +def test_alertmanager_porxy(): + rule = { + 'name': 'Test Alertmanager Rule', + 'type': 'any', + 'alertmanager_hosts': ['http://alertmanager:9093'], + 'alertmanager_alertname': 'Title', + 'alertmanager_annotations': {'severity': 'error'}, + 'alertmanager_labels': {'source': 'elastalert'}, + 'alertmanager_fields': {'msg': 'message', 'log': '@log_name'}, + 'alertmanager_proxy': 'http://proxy.url', + 'alert_subject_args': ['message', '@log_name'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertmanagerAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'message': 'Quit 123', + '@log_name': 'mysqld.general' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = [ + { + 'annotations': + { + 'severity': 'error', + 'summary': 'Test Alertmanager Rule', + 'description': 'Test Alertmanager Rule\n\n' + + '@log_name: mysqld.general\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'message: Quit 123\nsomefield: foobarbaz\n' + }, + 'labels': { + 'source': 'elastalert', + 'msg': 'Quit 123', + 'log': 'mysqld.general', + 'alertname': 'Title', + 'elastalert_rule': 'Test Alertmanager Rule' + } + } + ] + + mock_post_request.assert_called_once_with( + 'http://alertmanager:9093/api/v1/alerts', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'}, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_alertmanager_timeout(): + rule = { + 'name': 'Test Alertmanager Rule', + 'type': 'any', + 'alertmanager_hosts': ['http://alertmanager:9093'], + 'alertmanager_alertname': 'Title', + 'alertmanager_annotations': {'severity': 'error'}, + 'alertmanager_labels': {'source': 'elastalert'}, + 'alertmanager_fields': {'msg': 'message', 'log': '@log_name'}, + 'alertmanager_timeout': 20, + 'alert_subject_args': ['message', '@log_name'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertmanagerAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'message': 'Quit 123', + '@log_name': 'mysqld.general' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = [ + { + 'annotations': + { + 'severity': 'error', + 'summary': 'Test Alertmanager Rule', + 'description': 'Test Alertmanager Rule\n\n' + + '@log_name: mysqld.general\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'message: Quit 123\nsomefield: foobarbaz\n' + }, + 'labels': { + 'source': 'elastalert', + 'msg': 'Quit 123', + 'log': 'mysqld.general', + 'alertname': 'Title', + 'elastalert_rule': 'Test Alertmanager Rule' + } + } + ] + + mock_post_request.assert_called_once_with( + 'http://alertmanager:9093/api/v1/alerts', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=20 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +@pytest.mark.parametrize('ca_certs, ignore_ssl_errors, excpet_verify', [ + ('', '', True), + ('', True, False), + ('', False, True), + (True, '', True), + (True, True, True), + (True, False, True), + (False, '', True), + (False, True, False), + (False, False, True) +]) +def test_alertmanager_ca_certs(ca_certs, ignore_ssl_errors, excpet_verify): + rule = { + 'name': 'Test Alertmanager Rule', + 'type': 'any', + 'alertmanager_hosts': ['http://alertmanager:9093'], + 'alertmanager_alertname': 'Title', + 'alertmanager_annotations': {'severity': 'error'}, + 'alertmanager_labels': {'source': 'elastalert'}, + 'alertmanager_fields': {'msg': 'message', 'log': '@log_name'}, + 'alert_subject_args': ['message', '@log_name'], + 'alert': [] + } + if ca_certs: + rule['alertmanager_ca_certs'] = ca_certs + + if ignore_ssl_errors: + rule['alertmanager_ignore_ssl_errors'] = ignore_ssl_errors + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertmanagerAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'message': 'Quit 123', + '@log_name': 'mysqld.general' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = [ + { + 'annotations': + { + 'severity': 'error', + 'summary': 'Test Alertmanager Rule', + 'description': 'Test Alertmanager Rule\n\n' + + '@log_name: mysqld.general\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'message: Quit 123\nsomefield: foobarbaz\n' + }, + 'labels': { + 'source': 'elastalert', + 'msg': 'Quit 123', + 'log': 'mysqld.general', + 'alertname': 'Title', + 'elastalert_rule': 'Test Alertmanager Rule' + } + } + ] + + mock_post_request.assert_called_once_with( + 'http://alertmanager:9093/api/v1/alerts', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=excpet_verify, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_alertmanager_ea_exception(): + with pytest.raises(EAException) as ea: + rule = { + 'name': 'Test Alertmanager Rule', + 'type': 'any', + 'alertmanager_hosts': ['http://alertmanager:9093'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertmanagerAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + assert 'Error posting to Alertmanager' in str(ea) + + +def test_alertmanager_getinfo(): + rule = { + 'name': 'Test Alertmanager Rule', + 'type': 'any', + 'alertmanager_hosts': 'http://alertmanager:9093', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertmanagerAlerter(rule) + + expected_data = { + 'type': 'alertmanager' + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('alertmanager_hosts, expected_data', [ + ([], 'Missing required option(s): alertmanager_hosts'), + (['http://alertmanager:9093'], + { + 'type': 'alertmanager' + }), +]) +def test_alertmanager_required_error(alertmanager_hosts, expected_data): + try: + rule = { + 'name': 'Test Alertmanager Rule', + 'type': 'any', + 'alert': [] + } + + if alertmanager_hosts: + rule['alertmanager_hosts'] = alertmanager_hosts + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertmanagerAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except Exception as ea: + print('ea %s' % str(ea)) + assert expected_data in str(ea) From ba397669dd2100b2c54f6399e2ee33c967357a90 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Mon, 25 Oct 2021 20:49:30 +0900 Subject: [PATCH 69/83] Update coverage, pylint, pytest, tox, sphinx MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit coverage 5.5 → 6.0.2 pylint<2.10 → pylint<2.12 pytest 6.2.4 → 6.2.5 tox 3.24.1 → 3.24.4 sphinx 4.1.2 → 4.2.0 --- requirements-dev.txt | 9 ++++----- tox.ini | 2 +- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 2b23894a9..207d21d33 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,10 +1,9 @@ -r requirements.txt -coverage==5.5 -docutils<0.17 +coverage==6.0.2 flake8 pre-commit -pylint<2.10 -pytest==6.2.4 +pylint<2.12 +pytest==6.2.5 setuptools sphinx_rtd_theme -tox==3.24.1 +tox==3.24.4 diff --git a/tox.ini b/tox.ini index 76e606b2b..120dd0f4f 100644 --- a/tox.ini +++ b/tox.ini @@ -25,6 +25,6 @@ norecursedirs = .* virtualenv_run docs build venv env [testenv:docs] deps = {[testenv]deps} - sphinx==4.1.2 + sphinx==4.2.0 changedir = docs commands = sphinx-build -b html -d build/doctrees -W source build/html From 34599e6c8c3d57266af6916f1f32d849581b4e10 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 30 Oct 2021 02:09:51 +0900 Subject: [PATCH 70/83] Update Dockerfile-test --- Dockerfile-test | 8 ++++---- requirements.txt | 2 +- tox.ini | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Dockerfile-test b/Dockerfile-test index fb8a78409..034c3da40 100644 --- a/Dockerfile-test +++ b/Dockerfile-test @@ -1,9 +1,9 @@ -FROM ubuntu:latest +FROM ubuntu:21.10 -RUN apt-get update && apt-get upgrade -y -RUN apt-get install software-properties-common -y +RUN apt update && apt upgrade -y +RUN apt install software-properties-common -y RUN add-apt-repository ppa:deadsnakes/ppa -RUN apt-get -y install build-essential python3.6 python3.6-dev python3-pip libssl-dev git +RUN apt -y install build-essential python3.9 python3.9-dev python3-pip libssl-dev git WORKDIR /home/elastalert diff --git a/requirements.txt b/requirements.txt index 55b0c1bba..ade213102 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,7 +12,7 @@ jsonschema>=3.0.2 prison>=0.1.2 py-zabbix==1.1.3 PyStaticConfiguration>=0.10.3 -python-dateutil>=2.6.0,<2.7.0 +python-dateutil>=2.6.0,<2.9.0 PyYAML>=5.1 requests>=2.10.0 stomp.py>=4.1.17 diff --git a/tox.ini b/tox.ini index 120dd0f4f..0ffed0c10 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [tox] project = elastalert -envlist = py36,docs +envlist = py39,docs [testenv] deps = -rrequirements-dev.txt From 561bfbb67771b75d80f3a87902a7fea114b3cd93 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Mon, 15 Nov 2021 23:03:51 +0900 Subject: [PATCH 71/83] pin tzlocal==2.1 --- requirements.txt | 35 ++++++++++++++++++----------------- setup.py | 3 ++- 2 files changed, 20 insertions(+), 18 deletions(-) diff --git a/requirements.txt b/requirements.txt index ade213102..b5db239db 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,20 +1,21 @@ apscheduler>=3.3.0,<4.0 -aws-requests-auth>=0.3.0 -sortedcontainers>=2.2.2 -boto3>=1.4.4 -cffi>=1.11.5 -croniter>=0.3.16 +aws-requests-auth>=0.4.3 +sortedcontainers>=2.4.0 +boto3>=1.19.7 +cffi>=1.15.0 +croniter>=1.0.15 elasticsearch==7.0.0 envparse>=0.2.0 -exotel>=0.1.3 -jira>=2.0.0 -jsonschema>=3.0.2 -prison>=0.1.2 -py-zabbix==1.1.3 -PyStaticConfiguration>=0.10.3 -python-dateutil>=2.6.0,<2.9.0 -PyYAML>=5.1 -requests>=2.10.0 -stomp.py>=4.1.17 -texttable>=0.8.8 -twilio>=6.0.0,<6.58 \ No newline at end of file +exotel>=0.1.5 +jira>=3.0.1 +jsonschema>=4.1.2 +prison>=0.2.1 +py-zabbix==1.1.7 +PyStaticConfiguration>=0.10.5 +python-dateutil>=2.8.2 +PyYAML>=6.0 +requests>=2.26.0 +stomp.py>=7.0.0 +texttable>=1.6.4 +twilio>=6.0.0,<6.58 +tzlocal==2.1 \ No newline at end of file diff --git a/setup.py b/setup.py index cc4bb98e3..1aad3a602 100644 --- a/setup.py +++ b/setup.py @@ -49,6 +49,7 @@ 'stomp.py>=4.1.17', 'texttable>=0.8.8', 'twilio>=6.0.0,<6.58', - 'cffi>=1.11.5' + 'cffi>=1.11.5', + 'tzlocal==2.1' ] ) From 59593c7eb816313bee91536a731d64237906f3e8 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Tue, 16 Nov 2021 22:02:37 +0900 Subject: [PATCH 72/83] sphinx 4.3.0 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 0ffed0c10..46e9f822c 100644 --- a/tox.ini +++ b/tox.ini @@ -25,6 +25,6 @@ norecursedirs = .* virtualenv_run docs build venv env [testenv:docs] deps = {[testenv]deps} - sphinx==4.2.0 + sphinx==4.3.0 changedir = docs commands = sphinx-build -b html -d build/doctrees -W source build/html From e78432b653c1e7122d3b3d17638d71075056e1f3 Mon Sep 17 00:00:00 2001 From: Naoyuki Sano Date: Wed, 15 Dec 2021 01:34:25 +0900 Subject: [PATCH 73/83] sphinx 4.3.1 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 46e9f822c..902c1e1fd 100644 --- a/tox.ini +++ b/tox.ini @@ -25,6 +25,6 @@ norecursedirs = .* virtualenv_run docs build venv env [testenv:docs] deps = {[testenv]deps} - sphinx==4.3.0 + sphinx==4.3.1 changedir = docs commands = sphinx-build -b html -d build/doctrees -W source build/html From 15d327f9f967b8807067f9ec93f4aa18a646d8fb Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 26 Dec 2021 01:31:36 +0900 Subject: [PATCH 74/83] python 3.10 support --- Dockerfile-test | 2 +- requirements-dev.txt | 2 +- setup.py | 30 +++++++++++++++--------------- tox.ini | 4 ++-- 4 files changed, 19 insertions(+), 19 deletions(-) diff --git a/Dockerfile-test b/Dockerfile-test index 034c3da40..9e7165720 100644 --- a/Dockerfile-test +++ b/Dockerfile-test @@ -3,7 +3,7 @@ FROM ubuntu:21.10 RUN apt update && apt upgrade -y RUN apt install software-properties-common -y RUN add-apt-repository ppa:deadsnakes/ppa -RUN apt -y install build-essential python3.9 python3.9-dev python3-pip libssl-dev git +RUN apt -y install build-essential python3.10 python3.10-dev python3-pip libssl-dev git WORKDIR /home/elastalert diff --git a/requirements-dev.txt b/requirements-dev.txt index 207d21d33..cc9c813bc 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,5 @@ -r requirements.txt -coverage==6.0.2 +coverage==6.2 flake8 pre-commit pylint<2.12 diff --git a/setup.py b/setup.py index 1aad3a602..b320d05ea 100644 --- a/setup.py +++ b/setup.py @@ -31,23 +31,23 @@ package_data={'elastalert': ['schema.yaml', 'es_mappings/**/*.json']}, install_requires=[ 'apscheduler>=3.3.0,<4.0', - 'aws-requests-auth>=0.3.0', - 'sortedcontainers>=2.2.2', - 'boto3>=1.4.4', - 'croniter>=0.3.16', + 'aws-requests-auth>=0.4.3', + 'sortedcontainers>=2.4.0', + 'boto3>=1.19.7', + 'croniter>=1.0.15', 'elasticsearch==7.0.0', 'envparse>=0.2.0', - 'exotel>=0.1.3', - 'jira>=2.0.0', - 'jsonschema>=3.0.2', - 'prison>=0.1.2', - 'PyStaticConfiguration>=0.10.3', - 'python-dateutil>=2.6.0,<2.7.0', - 'PyYAML>=5.1', - 'py-zabbix==1.1.3', - 'requests>=2.10.0', - 'stomp.py>=4.1.17', - 'texttable>=0.8.8', + 'exotel>=0.1.5', + 'jira>=3.0.1', + 'jsonschema>=4.1.2', + 'prison>=0.2.1', + 'PyStaticConfiguration>=0.10.5', + 'python-dateutil>=2.8.2', + 'PyYAML>=6.0', + 'py-zabbix==1.1.7', + 'requests>=2.26.0', + 'stomp.py>=7.0.0', + 'texttable>=1.6.4', 'twilio>=6.0.0,<6.58', 'cffi>=1.11.5', 'tzlocal==2.1' diff --git a/tox.ini b/tox.ini index 46e9f822c..e55c3ec98 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [tox] project = elastalert -envlist = py39,docs +envlist = py310,docs [testenv] deps = -rrequirements-dev.txt @@ -25,6 +25,6 @@ norecursedirs = .* virtualenv_run docs build venv env [testenv:docs] deps = {[testenv]deps} - sphinx==4.3.0 + sphinx==4.3.2 changedir = docs commands = sphinx-build -b html -d build/doctrees -W source build/html From ec5abd8fee55d5bb8a753aaa455c8ee56c0c3ee6 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 26 Dec 2021 01:35:00 +0900 Subject: [PATCH 75/83] fix --- tox.ini | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tox.ini b/tox.ini index 09c567c88..e55c3ec98 100644 --- a/tox.ini +++ b/tox.ini @@ -25,10 +25,6 @@ norecursedirs = .* virtualenv_run docs build venv env [testenv:docs] deps = {[testenv]deps} -<<<<<<< HEAD sphinx==4.3.2 -======= - sphinx==4.3.1 ->>>>>>> e78432b653c1e7122d3b3d17638d71075056e1f3 changedir = docs commands = sphinx-build -b html -d build/doctrees -W source build/html From 663b6528073501dd6f7c25ffa8da845cde0e30de Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 26 Dec 2021 01:45:09 +0900 Subject: [PATCH 76/83] Kibana Discover support kibana 7.16 --- docs/source/ruletypes.rst | 2 +- elastalert/kibana_discover.py | 2 +- elastalert/schema.yaml | 2 +- tests/kibana_discover_test.py | 3 ++- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 586eae543..22ca94891 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -553,7 +553,7 @@ The currently supported versions of Kibana Discover are: - `5.6` - `6.0`, `6.1`, `6.2`, `6.3`, `6.4`, `6.5`, `6.6`, `6.7`, `6.8` -- `7.0`, `7.1`, `7.2`, `7.3`, `7.4`, `7.5`, `7.6`, `7.7`, `7.8`, `7.9`, `7.10`, `7.11`, `7.12`, `7.13`, `7.14`, `7.15` +- `7.0`, `7.1`, `7.2`, `7.3`, `7.4`, `7.5`, `7.6`, `7.7`, `7.8`, `7.9`, `7.10`, `7.11`, `7.12`, `7.13`, `7.14`, `7.15`, `7.16` ``kibana_discover_version: '7.3'`` diff --git a/elastalert/kibana_discover.py b/elastalert/kibana_discover.py index 2ebc0f804..1099a6986 100644 --- a/elastalert/kibana_discover.py +++ b/elastalert/kibana_discover.py @@ -15,7 +15,7 @@ kibana_default_timedelta = datetime.timedelta(minutes=10) kibana5_kibana6_versions = frozenset(['5.6', '6.0', '6.1', '6.2', '6.3', '6.4', '6.5', '6.6', '6.7', '6.8']) -kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10', '7.11', '7.12', '7.13', '7.14', '7.15']) +kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10', '7.11', '7.12', '7.13', '7.14', '7.15', '7.16']) def generate_kibana_discover_url(rule, match): ''' Creates a link for a kibana discover app. ''' diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index f7d4f372c..1f64c3949 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -219,7 +219,7 @@ properties: ### Kibana Discover App Link generate_kibana_discover_url: {type: boolean} kibana_discover_app_url: {type: string, format: uri} - kibana_discover_version: {type: string, enum: ['7.15', '7.14', '7.13', '7.12', '7.11', '7.10', '7.9', '7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} + kibana_discover_version: {type: string, enum: ['7.16', '7.15', '7.14', '7.13', '7.12', '7.11', '7.10', '7.9', '7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} kibana_discover_index_pattern_id: {type: string, minLength: 1} kibana_discover_columns: {type: array, items: {type: string, minLength: 1}, minItems: 1} kibana_discover_from_timedelta: *timedelta diff --git a/tests/kibana_discover_test.py b/tests/kibana_discover_test.py index 837130c75..10b36667e 100644 --- a/tests/kibana_discover_test.py +++ b/tests/kibana_discover_test.py @@ -54,7 +54,8 @@ def test_generate_kibana_discover_url_with_kibana_5x_and_6x(kibana_version): '7.12', '7.13', '7.14', - '7.15' + '7.15', + '7.16' ]) def test_generate_kibana_discover_url_with_kibana_7x(kibana_version): url = generate_kibana_discover_url( From 442ab2f0ec240a7f67f7a882711bc69ac2419608 Mon Sep 17 00:00:00 2001 From: Naoyuki Sano Date: Wed, 5 Jan 2022 22:38:39 +0900 Subject: [PATCH 77/83] Setting size to 0 avoids executing the fetch phase of the search making the request more efficient --- elastalert/ruletypes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index e4dba44e6..e000b71b6 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -678,7 +678,7 @@ def get_all_terms(self, args): time_filter = {self.rules['timestamp_field']: {'lt': self.rules['dt_to_ts'](tmp_end), 'gte': self.rules['dt_to_ts'](tmp_start)}} query_template['filter'] = {'bool': {'must': [{'range': time_filter}]}} - query = {'aggs': {'filtered': query_template}} + query = {'aggs': {'filtered': query_template}, 'size': 0} if 'filter' in self.rules: for item in self.rules['filter']: From 63325e47db54e3dc742482368427fe7fa68f1cb8 Mon Sep 17 00:00:00 2001 From: Naoyuki Sano Date: Wed, 5 Jan 2022 22:43:08 +0900 Subject: [PATCH 78/83] Update setup.py --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index b320d05ea..7b15575b0 100644 --- a/setup.py +++ b/setup.py @@ -19,6 +19,7 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', ], From 2cb0ec43ae0308272f35ab2ccb6c77c277aa06fe Mon Sep 17 00:00:00 2001 From: Naoyuki Sano Date: Thu, 13 Jan 2022 01:30:56 +0900 Subject: [PATCH 79/83] tox 3.24.5 and pylint 2.12.2 --- requirements-dev.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index cc9c813bc..b6cb95033 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,8 +2,8 @@ coverage==6.2 flake8 pre-commit -pylint<2.12 +pylint==2.12.2 pytest==6.2.5 setuptools sphinx_rtd_theme -tox==3.24.4 +tox==3.24.5 From e75ee30ae6a865059835c5c7e9484280a9d7f0aa Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Wed, 19 Jan 2022 23:09:48 +0900 Subject: [PATCH 80/83] sphinx 4.3.2 to 4.4.0 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index e55c3ec98..b5763f264 100644 --- a/tox.ini +++ b/tox.ini @@ -25,6 +25,6 @@ norecursedirs = .* virtualenv_run docs build venv env [testenv:docs] deps = {[testenv]deps} - sphinx==4.3.2 + sphinx==4.4.0 changedir = docs commands = sphinx-build -b html -d build/doctrees -W source build/html From 63a7d66e23ea979bfd1c3252dbeba39c58045b8d Mon Sep 17 00:00:00 2001 From: Naoyuki Sano Date: Tue, 1 Feb 2022 02:08:54 +0900 Subject: [PATCH 81/83] Added exotel_message_body to schema.yaml --- elastalert/schema.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 1f64c3949..4f3ca2780 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -344,6 +344,7 @@ properties: exotel_auth_token: {type: string} exotel_to_number: {type: string} exotel_from_number: {type: string} + exotel_message_body: {type: string} ### Twilio twilio_account_sid: {type: string} From 5d1e4438645c96c4759724eeb039369cd1029ac6 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 5 Feb 2022 00:31:09 +0900 Subject: [PATCH 82/83] Kibana Discover support kibana 7.17 --- docs/source/ruletypes.rst | 2 +- elastalert/kibana_discover.py | 2 +- elastalert/schema.yaml | 2 +- tests/kibana_discover_test.py | 3 ++- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 22ca94891..208fe4cf4 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -553,7 +553,7 @@ The currently supported versions of Kibana Discover are: - `5.6` - `6.0`, `6.1`, `6.2`, `6.3`, `6.4`, `6.5`, `6.6`, `6.7`, `6.8` -- `7.0`, `7.1`, `7.2`, `7.3`, `7.4`, `7.5`, `7.6`, `7.7`, `7.8`, `7.9`, `7.10`, `7.11`, `7.12`, `7.13`, `7.14`, `7.15`, `7.16` +- `7.0`, `7.1`, `7.2`, `7.3`, `7.4`, `7.5`, `7.6`, `7.7`, `7.8`, `7.9`, `7.10`, `7.11`, `7.12`, `7.13`, `7.14`, `7.15`, `7.16`, `7.17` ``kibana_discover_version: '7.3'`` diff --git a/elastalert/kibana_discover.py b/elastalert/kibana_discover.py index 1099a6986..78db3118b 100644 --- a/elastalert/kibana_discover.py +++ b/elastalert/kibana_discover.py @@ -15,7 +15,7 @@ kibana_default_timedelta = datetime.timedelta(minutes=10) kibana5_kibana6_versions = frozenset(['5.6', '6.0', '6.1', '6.2', '6.3', '6.4', '6.5', '6.6', '6.7', '6.8']) -kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10', '7.11', '7.12', '7.13', '7.14', '7.15', '7.16']) +kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10', '7.11', '7.12', '7.13', '7.14', '7.15', '7.16', '7.17']) def generate_kibana_discover_url(rule, match): ''' Creates a link for a kibana discover app. ''' diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 1f64c3949..d4acb2251 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -219,7 +219,7 @@ properties: ### Kibana Discover App Link generate_kibana_discover_url: {type: boolean} kibana_discover_app_url: {type: string, format: uri} - kibana_discover_version: {type: string, enum: ['7.16', '7.15', '7.14', '7.13', '7.12', '7.11', '7.10', '7.9', '7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} + kibana_discover_version: {type: string, enum: ['7.17', '7.16', '7.15', '7.14', '7.13', '7.12', '7.11', '7.10', '7.9', '7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} kibana_discover_index_pattern_id: {type: string, minLength: 1} kibana_discover_columns: {type: array, items: {type: string, minLength: 1}, minItems: 1} kibana_discover_from_timedelta: *timedelta diff --git a/tests/kibana_discover_test.py b/tests/kibana_discover_test.py index 10b36667e..635fbcadb 100644 --- a/tests/kibana_discover_test.py +++ b/tests/kibana_discover_test.py @@ -55,7 +55,8 @@ def test_generate_kibana_discover_url_with_kibana_5x_and_6x(kibana_version): '7.13', '7.14', '7.15', - '7.16' + '7.16', + '7.17' ]) def test_generate_kibana_discover_url_with_kibana_7x(kibana_version): url = generate_kibana_discover_url( From 7444552b13782ad793dc091cfe554dc7f74316bc Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 6 Feb 2022 02:45:32 +0900 Subject: [PATCH 83/83] Pytest 6.2.5 to 7.0.0 --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index b6cb95033..2adb92368 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -3,7 +3,7 @@ coverage==6.2 flake8 pre-commit pylint==2.12.2 -pytest==6.2.5 +pytest==7.0.0 setuptools sphinx_rtd_theme tox==3.24.5