From 0fd1f524f34a14d29a58b4a36db5f9660aeec176 Mon Sep 17 00:00:00 2001 From: dylanjf Date: Fri, 11 Jan 2019 18:34:06 -0500 Subject: [PATCH 0001/1065] corrected spike aggregation support for avg,min,max --- elastalert/ruletypes.py | 53 ++++++++++++++++++++++++++++++++--------- 1 file changed, 42 insertions(+), 11 deletions(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index e25ef148b..9164379bf 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -350,6 +350,20 @@ def mean(self): else: return None + def min(self): + """ The minimum of the value_field in the window. """ + if len(self.data) > 0: + return min([x[1] for x in self.data]) + else: + return None + + def max(self): + """ The maximum of the value_field in the window. """ + if len(self.data) > 0: + return max([x[1] for x in self.data]) + else: + return None + def __iter__(self): return iter(self.data) @@ -421,17 +435,34 @@ def add_data(self, data): if qk is None: qk = 'other' if self.field_value is not None: - count = lookup_es_key(event, self.field_value) - if count is not None: - try: - count = int(count) - except ValueError: - elastalert_logger.warn('{} is not a number: {}'.format(self.field_value, count)) - else: - self.handle_event(event, count, qk) + if self.field_value in event: + count = lookup_es_key(event, self.field_value) + if count is not None: + try: + count = int(count) + except ValueError: + elastalert_logger.warn('{} is not a number: {}'.format(self.field_value, count)) + else: + self.handle_event(event, count, qk) else: self.handle_event(event, 1, qk) + def get_spike_values(self, qk): + """ + extending ref/cur value retrieval logic for spike aggregations + """ + spike_check_type = self.rules.get('metric_agg_type') + if spike_check_type in [None, 'sum', 'value_count']: + # default count logic is appropriate in all these cases + return self.ref_windows[qk].count(), self.cur_windows[qk].count() + elif spike_check_type == 'avg': + return self.ref_windows[qk].mean(), self.cur_windows[qk].mean() + elif spike_check_type == 'min': + return self.ref_windows[qk].min(), self.cur_windows[qk].min() + elif spike_check_type == 'max': + return self.ref_windows[qk].max(), self.cur_windows[qk].max() + + def clear_windows(self, qk, event): # Reset the state and prevent alerts until windows filled again self.ref_windows[qk].clear() @@ -469,7 +500,8 @@ def handle_event(self, event, count, qk='all'): self.add_match(match, qk) self.clear_windows(qk, match) else: - if self.find_matches(self.ref_windows[qk].count(), self.cur_windows[qk].count()): + ref, cur = self.get_spike_values(qk) + if self.find_matches(ref, cur): # skip over placeholder events which have count=0 for match, count in self.cur_windows[qk].data: if count: @@ -481,8 +513,7 @@ def handle_event(self, event, count, qk='all'): def add_match(self, match, qk): extra_info = {} if self.field_value is None: - spike_count = self.cur_windows[qk].count() - reference_count = self.ref_windows[qk].count() + spike_count, reference_count = self.get_spike_values(qk) else: spike_count = self.cur_windows[qk].mean() reference_count = self.ref_windows[qk].mean() From 49d9b053fc9d2f5e7f8548f7533c145a5127283f Mon Sep 17 00:00:00 2001 From: dylanjf Date: Mon, 14 Jan 2019 11:37:23 -0500 Subject: [PATCH 0002/1065] correct order of variables for spike alert text --- elastalert/ruletypes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 9164379bf..ca508614a 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -513,7 +513,7 @@ def handle_event(self, event, count, qk='all'): def add_match(self, match, qk): extra_info = {} if self.field_value is None: - spike_count, reference_count = self.get_spike_values(qk) + reference_count, spike_count = self.get_spike_values(qk) else: spike_count = self.cur_windows[qk].mean() reference_count = self.ref_windows[qk].mean() From 6849cda20e741ee67c0ceabc9d8aaeaa7fc71377 Mon Sep 17 00:00:00 2001 From: dylanjf Date: Mon, 14 Jan 2019 11:39:05 -0500 Subject: [PATCH 0003/1065] linebreak fix for travis CI --- elastalert/ruletypes.py | 1 - 1 file changed, 1 deletion(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index ca508614a..c1a526d17 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -462,7 +462,6 @@ def get_spike_values(self, qk): elif spike_check_type == 'max': return self.ref_windows[qk].max(), self.cur_windows[qk].max() - def clear_windows(self, qk, event): # Reset the state and prevent alerts until windows filled again self.ref_windows[qk].clear() From ebd3f9eae4ffd1d847f4befa8134d7eef01871fc Mon Sep 17 00:00:00 2001 From: dylanjf Date: Tue, 23 Apr 2019 13:15:05 -0400 Subject: [PATCH 0004/1065] force elasticsearch 6.3 on build --- elastalert/alerts.py | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 27e03ac91..fe3bd1e24 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1118,7 +1118,7 @@ def __init__(self, rule): self.slack_channel_override = [self.slack_channel_override] self.slack_title_link = self.rule.get('slack_title_link', '') self.slack_title = self.rule.get('slack_title', '') - self.slack_emoji_override = self.rule.get('slack_emoji_override', ':ghost:') + self.slack_emoji_override = self.rule.get('slack_emoji_override', ':dylan:') self.slack_icon_url_override = self.rule.get('slack_icon_url_override', '') self.slack_msg_color = self.rule.get('slack_msg_color', 'danger') self.slack_parse_override = self.rule.get('slack_parse_override', 'none') diff --git a/requirements.txt b/requirements.txt index 4f23f2ec1..5e0c4e47a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ blist>=1.3.6 boto3>=1.4.4 configparser>=3.5.0 croniter>=0.3.16 -elasticsearch +elasticsearch==6.3.1 envparse>=0.2.0 exotel>=0.1.3 jira>=1.0.10,<1.0.15 From 885896c55216980d013b7061444b60351ec2408a Mon Sep 17 00:00:00 2001 From: Abhishek Jaisingh Date: Mon, 19 Aug 2019 15:37:14 +0530 Subject: [PATCH 0005/1065] Add Docs: Elasticsearch Security Privileges Add new page in documentation specifying the Elasticsearch Security Privileges required for ElastAlert --- .../elasticsearch_security_privileges.rst | 35 +++++++++++++++++++ docs/source/index.rst | 1 + 2 files changed, 36 insertions(+) create mode 100644 docs/source/elasticsearch_security_privileges.rst diff --git a/docs/source/elasticsearch_security_privileges.rst b/docs/source/elasticsearch_security_privileges.rst new file mode 100644 index 000000000..d74497f7c --- /dev/null +++ b/docs/source/elasticsearch_security_privileges.rst @@ -0,0 +1,35 @@ +Elasticsearch Security Privileges +********************************* + +While ElastAlert will just work out-of-the-box for unsecured Elasticsearch, it will need a user with a certain set of permissions to work on secure Elasticseach that allow it to read the documents, check the cluster status etc. + +SearchGuard Permissions +======================= + +The permissions in Elasticsearch are specific to the plugin being used for RBAC. However, the permissions mentioned here can be mapped easily to different plugins other than Searchguard. + +Details about SearchGuard Action Groups: https://docs.search-guard.com/latest/action-groups + + +Writeback Permissions +--------------------------- + +For the global config (which writes to the writeback index), you would need to give all permissions on the writeback indices. +In addition, some permissions related to Cluster Monitor Access are required. + +``Cluster Permissions``: CLUSTER_MONITOR, indices:data/read/scroll* + +``Index Permissions`` (Over Writeback Indices): INDICES_ALL + + +Per Rule Permissions +-------------------------- + +For per rule Elasticsearch config, you would need at least the read permissions on the index you want to query. +Detailed SearchGuard Permissions: + +``Cluster Permissions``: CLUSTER_COMPOSITE_OPS_RO + +``Index Permissions`` (Over the index the rule is querying on): READ, indices:data/read/scroll* + + diff --git a/docs/source/index.rst b/docs/source/index.rst index 4219bf13e..cf6d439c6 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -15,6 +15,7 @@ Contents: running_elastalert ruletypes elastalert_status + elasticsearch_security_privileges recipes/adding_rules recipes/adding_alerts recipes/writing_filters From 5f03e489c0747ee94dfb872df3f81a9645821047 Mon Sep 17 00:00:00 2001 From: Alvaro Olmedo Date: Tue, 10 Dec 2019 17:22:29 +0100 Subject: [PATCH 0006/1065] Disable ssl warnnings --- elastalert/util.py | 1 + 1 file changed, 1 insertion(+) diff --git a/elastalert/util.py b/elastalert/util.py index bbb0600ff..5cf0360a4 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -348,6 +348,7 @@ def build_es_conn_config(conf): parsed_conf['es_url_prefix'] = '' parsed_conf['es_conn_timeout'] = conf.get('es_conn_timeout', 20) parsed_conf['send_get_body_as'] = conf.get('es_send_get_body_as', 'GET') + parsed_conf['ssl_show_warn'] = conf.get('ssl_show_warn', True) if os.environ.get('ES_USERNAME'): parsed_conf['es_username'] = os.environ.get('ES_USERNAME') From 216244c3b8ac955448ce5a39f253bed31bd6ad3b Mon Sep 17 00:00:00 2001 From: Alvaro Olmedo Date: Wed, 11 Dec 2019 00:37:25 +0100 Subject: [PATCH 0007/1065] Get ssl_show_warn option from config file and passed to elasticsearch's client --- elastalert/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/elastalert/__init__.py b/elastalert/__init__.py index 55bfdb32f..2242706e2 100644 --- a/elastalert/__init__.py +++ b/elastalert/__init__.py @@ -22,6 +22,7 @@ def __init__(self, conf): use_ssl=conf['use_ssl'], verify_certs=conf['verify_certs'], ca_certs=conf['ca_certs'], + ssl_show_warn=conf['ssl_show_warn'], connection_class=RequestsHttpConnection, http_auth=conf['http_auth'], timeout=conf['es_conn_timeout'], From 5326ac947b91b17ea43c74a6650344559dbda437 Mon Sep 17 00:00:00 2001 From: Flavio Pompermaier Date: Wed, 11 Dec 2019 10:08:28 +0100 Subject: [PATCH 0008/1065] Enabling to embed images into email alerter You just need to specify the images dir and the mapping between images keys and values. Example: assets_dir: "/opt/elastalert/email_images" email_image_keys: ["img1"] email_image_values: ["my_logo.png"] alert_text_args: - "winlog.event_data.TargetDomainName" - "winlog.event_data.TargetUserName" - "starttime" alert_text_args: - "winlog.event_data.TargetDomainName" - "winlog.event_data.TargetUserName" - "starttime" alert_text: |

L'utente {0}\{1} ha superato il numero massimo di tentativi di login al minuto (check-time: {2}).



--- elastalert/alerts.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index f5ca22070..9d3dfe87b 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -11,6 +11,8 @@ import uuid import warnings from email.mime.text import MIMEText +from email.mime.multipart import MIMEMultipart +from email.mime.image import MIMEImage from email.utils import formatdate from html.parser import HTMLParser from smtplib import SMTP @@ -410,6 +412,8 @@ class EmailAlerter(Alerter): def __init__(self, *args): super(EmailAlerter, self).__init__(*args) + self.assets_dir = self.rule.get('assets_dir', '/tmp') + self.images_dictionary = dict(zip( self.rule.get('email_image_keys', []), self.rule.get('email_image_values', []))); self.smtp_host = self.rule.get('smtp_host', 'localhost') self.smtp_ssl = self.rule.get('smtp_ssl', False) self.from_addr = self.rule.get('from_addr', 'ElastAlert') @@ -454,7 +458,17 @@ def alert(self, matches): if 'email_add_domain' in self.rule: to_addr = [name + self.rule['email_add_domain'] for name in to_addr] if self.rule.get('email_format') == 'html': - email_msg = MIMEText(body, 'html', _charset='UTF-8') + #email_msg = MIMEText(body, 'html', _charset='UTF-8') # old way + email_msg = MIMEMultipart() + msgText = MIMEText(body, 'html', _charset='UTF-8') + email_msg.attach(msgText) # Added, and edited the previous line + + for image_key in self.images_dictionary: + fp = open(os.path.join(self.assets_dir, self.images_dictionary[image_key]), 'rb') + img = MIMEImage(fp.read()) + fp.close() + img.add_header('Content-ID', '<{}>'.format(image_key)) + email_msg.attach(img) else: email_msg = MIMEText(body, _charset='UTF-8') email_msg['Subject'] = self.create_title(matches) From 51fb0753fb2d327f4faa78f672b4d39609a803fa Mon Sep 17 00:00:00 2001 From: Alvaro Olmedo Date: Tue, 28 Jan 2020 16:58:21 +0100 Subject: [PATCH 0009/1065] Dummy commit From f04c08ac581b1b6bac280c0d447e2ca7f2fe9134 Mon Sep 17 00:00:00 2001 From: Alvaro Olmedo Date: Tue, 28 Jan 2020 17:02:45 +0100 Subject: [PATCH 0010/1065] Upgrading elasticsearch lib --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index ce392cb18..ac50dd6e4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ boto3>=1.4.4 cffi>=1.11.5 configparser>=3.5.0 croniter>=0.3.16 -elasticsearch>=7.0.0 +elasticsearch>=7.5.0 envparse>=0.2.0 exotel>=0.1.3 jira>=1.0.10,<1.0.15 From 97a81a1b385aee9d987c49c3ec9f44eca7256f85 Mon Sep 17 00:00:00 2001 From: Alvaro Olmedo Date: Thu, 30 Jan 2020 11:37:36 +0100 Subject: [PATCH 0011/1065] Upgrading elasticsearch lib in package --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index ac6506ae2..6ac2b5181 100644 --- a/setup.py +++ b/setup.py @@ -33,7 +33,7 @@ 'boto3>=1.4.4', 'configparser>=3.5.0', 'croniter>=0.3.16', - 'elasticsearch>=7.0.0', + 'elasticsearch>=7.5.0', 'envparse>=0.2.0', 'exotel>=0.1.3', 'jira>=2.0.0', From fbce0fb0ef9a65f57d15188f6d28f33be03fac22 Mon Sep 17 00:00:00 2001 From: Moises Cruz Date: Tue, 21 Apr 2020 20:07:44 +0200 Subject: [PATCH 0012/1065] agg_type percentiles fixes 2713 --- elastalert/ruletypes.py | 36 +++++++++++++++++++++++++++++------- elastalert/schema.yaml | 6 ++++-- 2 files changed, 33 insertions(+), 9 deletions(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 2f1d2f82c..85c08a4af 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -1026,6 +1026,7 @@ class MetricAggregationRule(BaseAggregationRule): """ A rule that matches when there is a low number of events given a timeframe. """ required_options = frozenset(['metric_agg_key', 'metric_agg_type']) allowed_aggregations = frozenset(['min', 'max', 'avg', 'sum', 'cardinality', 'value_count']) + allowed_percent_aggregations = frozenset(['percentiles']) def __init__(self, *args): super(MetricAggregationRule, self).__init__(*args) @@ -1035,8 +1036,10 @@ def __init__(self, *args): self.metric_key = 'metric_' + self.rules['metric_agg_key'] + '_' + self.rules['metric_agg_type'] - if not self.rules['metric_agg_type'] in self.allowed_aggregations: + if not self.rules['metric_agg_type'] in self.allowed_aggregations.union(self.allowed_percent_aggregations): raise EAException("metric_agg_type must be one of %s" % (str(self.allowed_aggregations))) + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations and self.rules['percentile_range'] is None: + raise EAException("percentile_range must be specified for percentiles aggregation") self.rules['aggregation_query_element'] = self.generate_aggregation_query() @@ -1051,14 +1054,20 @@ def get_match_str(self, match): return message def generate_aggregation_query(self): - return {self.metric_key: {self.rules['metric_agg_type']: {'field': self.rules['metric_agg_key']}}} + query = {self.metric_key: {self.rules['metric_agg_type']: {'field': self.rules['metric_agg_key']}}} + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: + query[self.metric_key][self.rules['metric_agg_type']]['percents'] = [self.rules['percentile_range']] + return query def check_matches(self, timestamp, query_key, aggregation_data): if "compound_query_key" in self.rules: self.check_matches_recursive(timestamp, query_key, aggregation_data, self.rules['compound_query_key'], dict()) else: - metric_val = aggregation_data[self.metric_key]['value'] + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: + metric_val = list(aggregation_data[self.metric_key]['values'].values())[0] + else: + metric_val = aggregation_data[self.metric_key]['value'] if self.crossed_thresholds(metric_val): match = {self.rules['timestamp_field']: timestamp, self.metric_key: metric_val} @@ -1106,6 +1115,7 @@ class SpikeMetricAggregationRule(BaseAggregationRule, SpikeRule): """ A rule that matches when there is a spike in an aggregated event compared to its reference point """ required_options = frozenset(['metric_agg_key', 'metric_agg_type', 'spike_height', 'spike_type']) allowed_aggregations = frozenset(['min', 'max', 'avg', 'sum', 'cardinality', 'value_count']) + allowed_percent_aggregations = frozenset(['percentiles']) def __init__(self, *args): # We inherit everything from BaseAggregation and Spike, overwrite only what we need in functions below @@ -1113,8 +1123,11 @@ def __init__(self, *args): # MetricAgg alert things self.metric_key = 'metric_' + self.rules['metric_agg_key'] + '_' + self.rules['metric_agg_type'] - if not self.rules['metric_agg_type'] in self.allowed_aggregations: + + if not self.rules['metric_agg_type'] in self.allowed_aggregations.union(self.allowed_percent_aggregations): raise EAException("metric_agg_type must be one of %s" % (str(self.allowed_aggregations))) + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations and self.rules['percentile_range'] is None: + raise EAException("percentile_range must be specified for percentiles aggregation") # Disabling bucket intervals (doesn't make sense in context of spike to split up your time period) if self.rules.get('bucket_interval'): @@ -1126,7 +1139,10 @@ def generate_aggregation_query(self): """Lifted from MetricAggregationRule, added support for scripted fields""" if self.rules.get('metric_agg_script'): return {self.metric_key: {self.rules['metric_agg_type']: self.rules['metric_agg_script']}} - return {self.metric_key: {self.rules['metric_agg_type']: {'field': self.rules['metric_agg_key']}}} + query = {self.metric_key: {self.rules['metric_agg_type']: {'field': self.rules['metric_agg_key']}}} + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: + query[self.metric_key][self.rules['metric_agg_type']]['percents'] = [self.rules['percentile_range']] + return query def add_aggregation_data(self, payload): """ @@ -1140,7 +1156,10 @@ def add_aggregation_data(self, payload): else: # no time / term split, just focus on the agg event = {self.ts_field: timestamp} - agg_value = payload_data[self.metric_key]['value'] + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: + agg_value = list(payload_data[self.metric_key]['values'].values())[0] + else: + agg_value = payload_data[self.metric_key]['value'] self.handle_event(event, agg_value, 'all') return @@ -1160,7 +1179,10 @@ def unwrap_term_buckets(self, timestamp, term_buckets, qk=[]): continue qk_str = ','.join(qk) - agg_value = term_data[self.metric_key]['value'] + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: + agg_value = list(term_data[self.metric_key]['values'].values())[0] + else: + agg_value = term_data[self.metric_key]['value'] event = {self.ts_field: timestamp, self.rules['query_key']: qk_str} # pass to SpikeRule's tracker diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index cc5d52395..a5b00a5fa 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -110,7 +110,7 @@ oneOf: type: {enum: [spike_aggregation]} spike_height: {type: number} spike_type: {enum: ["up", "down", "both"]} - metric_agg_type: {enum: ["min", "max", "avg", "sum", "cardinality", "value_count"]} + metric_agg_type: {enum: ["min", "max", "avg", "sum", "cardinality", "value_count", "percentiles"]} timeframe: *timeframe use_count_query: {type: boolean} doc_type: {type: string} @@ -120,6 +120,7 @@ oneOf: threshold_ref: {type: number} threshold_cur: {type: number} min_doc_count: {type: integer} + percentile_range: {type: integer} - title: Flatline required: [threshold, timeframe] @@ -153,8 +154,9 @@ oneOf: required: [metric_agg_key,metric_agg_type] properties: type: {enum: [metric_aggregation]} - metric_agg_type: {enum: ["min", "max", "avg", "sum", "cardinality", "value_count"]} + metric_agg_type: {enum: ["min", "max", "avg", "sum", "cardinality", "value_count", "percentiles"]} #timeframe: *timeframe + percentile_range: {type: integer} - title: Percentage Match required: [match_bucket_filter] From b9963d9e1bd2498bdf76fd2f10e7e71a0181e468 Mon Sep 17 00:00:00 2001 From: Minogiannis Grigoris Date: Fri, 24 Apr 2020 20:17:10 +0300 Subject: [PATCH 0013/1065] Adjusting elastalert/ruletypes.py so that the functions 'append' and 'append_middle' take into account the scenario whereby an event is None --- elastalert/ruletypes.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 2f1d2f82c..1ccb85d8d 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -317,6 +317,8 @@ def append(self, event): """ Add an event to the window. Event should be of the form (dict, count). This will also pop the oldest events and call onRemoved on them until the window size is less than timeframe. """ + if not event or not event[1]: + return self self.data.add(event) self.running_count += event[1] @@ -357,6 +359,8 @@ def __iter__(self): def append_middle(self, event): """ Attempt to place the event in the correct location in our deque. Returns True if successful, otherwise False. """ + if not event or not event[1]: + return self rotation = 0 ts = self.get_ts(event) From 50bcd948460438dc111b309e231a340248b5701d Mon Sep 17 00:00:00 2001 From: Minogiannis Grigoris Date: Fri, 24 Apr 2020 20:47:38 +0300 Subject: [PATCH 0014/1065] Adjusting elastalert/ruletypes.py so that the functions 'append' and 'append_middle' take into account the scenario whereby an event is None --- elastalert/ruletypes.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 1ccb85d8d..00ac4c621 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -317,15 +317,15 @@ def append(self, event): """ Add an event to the window. Event should be of the form (dict, count). This will also pop the oldest events and call onRemoved on them until the window size is less than timeframe. """ - if not event or not event[1]: - return self self.data.add(event) - self.running_count += event[1] + if event and event[1]: + self.running_count += event[1] while self.duration() >= self.timeframe: oldest = self.data[0] self.data.remove(oldest) - self.running_count -= oldest[1] + if oldest and oldest[1]: + self.running_count -= oldest[1] self.onRemoved and self.onRemoved(oldest) def duration(self): @@ -359,15 +359,14 @@ def __iter__(self): def append_middle(self, event): """ Attempt to place the event in the correct location in our deque. Returns True if successful, otherwise False. """ - if not event or not event[1]: - return self rotation = 0 ts = self.get_ts(event) # Append left if ts is earlier than first event if self.get_ts(self.data[0]) > ts: self.data.appendleft(event) - self.running_count += event[1] + if event and event[1]: + self.running_count += event[1] return # Rotate window until we can insert event @@ -378,7 +377,8 @@ def append_middle(self, event): # This should never happen return self.data.append(event) - self.running_count += event[1] + if event and event[1]: + self.running_count += event[1] self.data.rotate(-rotation) From cde4ac325c0991619b46f03be9f400104b0a6a85 Mon Sep 17 00:00:00 2001 From: Ryan Goggin Date: Mon, 27 Apr 2020 15:36:02 -0400 Subject: [PATCH 0015/1065] Add ca certs and ignore ssl to HTTP Post Added options: - http_post_ca_certs - http_post_ignore_ssl_errors --- elastalert/alerts.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index a453081fb..9bd882684 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1956,6 +1956,8 @@ def __init__(self, rule): self.post_static_payload = self.rule.get('http_post_static_payload', {}) self.post_all_values = self.rule.get('http_post_all_values', not self.post_payload) self.post_http_headers = self.rule.get('http_post_headers', {}) + self.post_ca_certs = self.rule.get('http_post_ca_certs') + self.post_ignore_ssl_errors = self.rule.get('http_post_ignore_ssl_errors', False) self.timeout = self.rule.get('http_post_timeout', 10) def alert(self, matches): @@ -1969,12 +1971,18 @@ def alert(self, matches): "Content-Type": "application/json", "Accept": "application/json;charset=utf-8" } + if self.post_ca_certs: + verify = self.post_ca_certs + else: + verify = not self.post_ignore_ssl_errors + headers.update(self.post_http_headers) proxies = {'https': self.post_proxy} if self.post_proxy else None for url in self.post_url: try: response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder), - headers=headers, proxies=proxies, timeout=self.timeout) + headers=headers, proxies=proxies, timeout=self.timeout, + verify=verify) response.raise_for_status() except RequestException as e: raise EAException("Error posting HTTP Post alert: %s" % e) From ff899b93f6f75f014871f18af253f2c3c7bc631d Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Tue, 5 May 2020 16:24:43 +0900 Subject: [PATCH 0016/1065] Update Docs for Zabbix --- docs/source/ruletypes.rst | 2 +- elastalert/schema.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index a947a77b7..e2420a363 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2242,4 +2242,4 @@ Required: ``zbx_sender_host``: The address where zabbix server is running. ``zbx_sender_port``: The port where zabbix server is listenning. ``zbx_host``: This field setup the host in zabbix that receives the value sent by Elastalert. -``zbx_item``: This field setup the item in the host that receives the value sent by Elastalert. +``zbx_key``: This field setup the key in the host that receives the value sent by Elastalert. diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index cc5d52395..3fd147447 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -386,4 +386,4 @@ properties: zbx_sender_host: {type: string} zbx_sender_port: {type: integer} zbx_host: {type: string} - zbx_item: {type: string} + zbx_key: {type: string} From 24442676e48df17451b71fe749878076374cbf06 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Tue, 5 May 2020 16:57:34 +0900 Subject: [PATCH 0017/1065] Add LineNotify to alerts mapping --- elastalert/loaders.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 771194768..08f06f648 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -77,7 +77,8 @@ class RulesLoader(object): 'servicenow': alerts.ServiceNowAlerter, 'alerta': alerts.AlertaAlerter, 'post': alerts.HTTPPostAlerter, - 'hivealerter': alerts.HiveAlerter + 'hivealerter': alerts.HiveAlerter, + 'linenotify': alerts.LineNotifyAlerter } # A partial ordering of alert types. Relative order will be preserved in the resulting alerts list From f6f57a5bbfae8306fcd29f3b78f382f31038121f Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 10 May 2020 06:47:49 +0900 Subject: [PATCH 0018/1065] Fix SNS Program & Docs --- docs/source/ruletypes.rst | 37 +++++++++++++++++++++++++++++++++---- 1 file changed, 33 insertions(+), 4 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index a947a77b7..a198abe63 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1670,13 +1670,42 @@ SNS requires one option: Optional: -``aws_access_key``: An access key to connect to SNS with. +``sns_aws_access_key_id``: An access key to connect to SNS with. -``aws_secret_key``: The secret key associated with the access key. +``sns_aws_secret_access_key``: The secret key associated with the access key. -``aws_region``: The AWS region in which the SNS resource is located. Default is us-east-1 +``sns_aws_region``: The AWS region in which the SNS resource is located. Default is us-east-1 -``profile``: The AWS profile to use. If none specified, the default will be used. +``sns_aws_profile``: The AWS profile to use. If none specified, the default will be used. + +Example When not using aws_profile usage:: + + alert: + - sns + sns_topic_arn: 'arn:aws:sns:us-east-1:123456789:somesnstopic' + sns_aws_access_key_id: 'XXXXXXXXXXXXXXXXXX'' + sns_aws_secret_access_key: 'YYYYYYYYYYYYYYYYYYYY' + sns_aws_region: 'us-east-1' # You must nest aws_region within your alert configuration so it is not used to sign AWS requests. + +Example When to use aws_profile usage:: + + # Create ~/.aws/credentials + + [default] + aws_access_key_id = xxxxxxxxxxxxxxxxxxxx + aws_secret_access_key = yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy + + # Create ~/.aws/config + + [default] + region = us-east-1 + + # alert rule setting + + alert: + - sns + sns_topic_arn: 'arn:aws:sns:us-east-1:123456789:somesnstopic' + sns_aws_profile: 'default' HipChat ~~~~~~~ From c77c9951edbea7386b82f50f6b0c515b48bace7a Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 10 May 2020 06:50:51 +0900 Subject: [PATCH 0019/1065] Fix SNS Program & Docs_2 --- elastalert/alerts.py | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index a453081fb..2a04b4130 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -935,11 +935,11 @@ class SnsAlerter(Alerter): def __init__(self, *args): super(SnsAlerter, self).__init__(*args) self.sns_topic_arn = self.rule.get('sns_topic_arn', '') - self.aws_access_key_id = self.rule.get('aws_access_key_id') - self.aws_secret_access_key = self.rule.get('aws_secret_access_key') - self.aws_region = self.rule.get('aws_region', 'us-east-1') + self.sns_aws_access_key_id = self.rule.get('sns_aws_access_key_id') + self.sns_aws_secret_access_key = self.rule.get('sns_aws_secret_access_key') + self.sns_aws_region = self.rule.get('sns_aws_region', 'us-east-1') self.profile = self.rule.get('boto_profile', None) # Deprecated - self.profile = self.rule.get('aws_profile', None) + self.profile = self.rule.get('sns_aws_profile', None) def create_default_title(self, matches): subject = 'ElastAlert: %s' % (self.rule['name']) @@ -948,12 +948,15 @@ def create_default_title(self, matches): def alert(self, matches): body = self.create_alert_body(matches) - session = boto3.Session( - aws_access_key_id=self.aws_access_key_id, - aws_secret_access_key=self.aws_secret_access_key, - region_name=self.aws_region, - profile_name=self.profile - ) + if self.profile is None: + session = boto3.Session( + aws_access_key_id=self.sns_aws_access_key_id, + aws_secret_access_key=self.sns_aws_access_key_id, + region_name=self.sns_aws_region + ) + else: + session = boto3.Session(profile_name=self.profile) + sns_client = session.client('sns') sns_client.publish( TopicArn=self.sns_topic_arn, From 72709790c3dff1748467e108a44bb92dc1bcdb36 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jasper=20J=C3=BCrgensen?= Date: Sun, 10 May 2020 17:25:28 +0200 Subject: [PATCH 0020/1065] Adds --silence_qk_value option to elastalert This enables the user to silence a rule only for a specific query_key value and not only the whole rule. See Issue #2777 --- elastalert/elastalert.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 24b10ced9..fd1eeb5f8 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -83,6 +83,11 @@ def parse_args(self, args): parser.add_argument('--rule', dest='rule', help='Run only a specific rule (by filename, must still be in rules folder)') parser.add_argument('--silence', dest='silence', help='Silence rule for a time period. Must be used with --rule. Usage: ' '--silence =, eg. --silence hours=2') + parser.add_argument( + "--silence_qk_value", + dest="silence_qk_value", + help="Silence the rule only for this specific query key value.", + ) parser.add_argument('--start', dest='start', help='YYYY-MM-DDTHH:MM:SS Start querying from this timestamp. ' 'Use "NOW" to start from current time. (Default: present)') parser.add_argument('--end', dest='end', help='YYYY-MM-DDTHH:MM:SS Query to this timestamp. (Default: present)') @@ -1853,7 +1858,10 @@ def silence(self, silence_cache_key=None): # With --rule, self.rules will only contain that specific rule if not silence_cache_key: - silence_cache_key = self.rules[0]['name'] + "._silence" + if self.args.silence_qk_value: + silence_cache_key = self.rules[0]['name'] + "." + self.args.silence_qk_value + else: + silence_cache_key = self.rules[0]['name'] + "._silence" try: silence_ts = parse_deadline(self.args.silence) From 496a04fb89e5e99021bc894537c35a514c2f6414 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 23 May 2020 00:50:32 +0900 Subject: [PATCH 0021/1065] Sync requirements.txt and setup.py & update py-zabbix --- requirements.txt | 6 +++--- setup.py | 5 +++-- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/requirements.txt b/requirements.txt index c66ca8d79..f5fb0b36f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,12 +12,12 @@ jira>=1.0.10,<1.0.15 jsonschema>=3.0.2 mock>=2.0.0 prison>=0.1.2 -py-zabbix==1.1.3 +py-zabbix>=1.1.3 PyStaticConfiguration>=0.10.3 python-dateutil>=2.6.0,<2.7.0 python-magic>=0.4.15 PyYAML>=5.1 -requests>=2.0.0 +requests>=2.10.0 stomp.py>=4.1.17 texttable>=0.8.8 -twilio==6.0.0 +twilio>=6.0.0,<6.1 diff --git a/setup.py b/setup.py index 30ef9495f..048eb5fa2 100644 --- a/setup.py +++ b/setup.py @@ -42,12 +42,13 @@ 'prison>=0.1.2', 'PyStaticConfiguration>=0.10.3', 'python-dateutil>=2.6.0,<2.7.0', - 'PyYAML>=3.12', + 'PyYAML>=5.1', 'requests>=2.10.0', 'stomp.py>=4.1.17', 'texttable>=0.8.8', 'twilio>=6.0.0,<6.1', 'python-magic>=0.4.15', - 'cffi>=1.11.5' + 'cffi>=1.11.5', + 'py-zabbix>=1.1.3' ] ) From f8b0415bff4f78e27ae1cc13502196c7512355bd Mon Sep 17 00:00:00 2001 From: Raghu Date: Wed, 10 Jun 2020 09:17:36 +0530 Subject: [PATCH 0022/1065] Added Squadcast http post alert config --- docs/source/ruletypes.rst | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index a947a77b7..9f364811c 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2164,6 +2164,20 @@ Example usage:: http_post_headers: authorization: Basic 123dr3234 +Squadcast +~~~~~~~~ + +Alerts can be sent to Squadcast using the `http post` method described above and Squadcast will process it and send Phone, SMS, Email and Push notifications to the relevant person(s) and let them take actions. + +Configuration variables in rules YAML file:: + + alert: post + http_post_url: + http_post_static_payload: + Title: + http_post_all_values: true + +For more details, you can refer the `Squadcast documentation `_. Alerter ~~~~~~~ From fa5a16b7808e49489ce6bbe6ac35f65f25b2f4a0 Mon Sep 17 00:00:00 2001 From: Raghu Date: Wed, 10 Jun 2020 09:20:41 +0530 Subject: [PATCH 0023/1065] Adjusted the space in the YAML config --- docs/source/ruletypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 9f364811c..fb6ff8137 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2174,7 +2174,7 @@ Configuration variables in rules YAML file:: alert: post http_post_url: http_post_static_payload: - Title: + Title: http_post_all_values: true For more details, you can refer the `Squadcast documentation `_. From c15364ab92f71183b987816d1815f615a4c0e8ad Mon Sep 17 00:00:00 2001 From: Raghu Date: Wed, 10 Jun 2020 09:23:00 +0530 Subject: [PATCH 0024/1065] Replaced tabs with spaces --- docs/source/ruletypes.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index fb6ff8137..6ce23ec17 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2171,11 +2171,11 @@ Alerts can be sent to Squadcast using the `http post` method described above and Configuration variables in rules YAML file:: - alert: post - http_post_url: - http_post_static_payload: - Title: - http_post_all_values: true + alert: post + http_post_url: + http_post_static_payload: + Title: + http_post_all_values: true For more details, you can refer the `Squadcast documentation `_. From 8a3fa307da188536da8572f3f88734e718aa7bc8 Mon Sep 17 00:00:00 2001 From: Raghu Date: Tue, 16 Jun 2020 10:15:52 +0530 Subject: [PATCH 0025/1065] Fixed the build error by matching the number of ~ to the length of the heading. --- docs/source/ruletypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 6ce23ec17..c493c39af 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2165,7 +2165,7 @@ Example usage:: authorization: Basic 123dr3234 Squadcast -~~~~~~~~ +~~~~~~~~~ Alerts can be sent to Squadcast using the `http post` method described above and Squadcast will process it and send Phone, SMS, Email and Push notifications to the relevant person(s) and let them take actions. From a107491800ffd3e666d2dd570aefa952e197d280 Mon Sep 17 00:00:00 2001 From: Swapnil Suryawanshi Date: Tue, 23 Jun 2020 15:20:45 +0530 Subject: [PATCH 0026/1065] fix attribute error is raised when query ran for future --- elastalert/elastalert.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 24b10ced9..f99a3229b 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -870,15 +870,16 @@ def run_rule(self, rule, endtime, starttime=None): rule['original_starttime'] = rule['starttime'] rule['scrolling_cycle'] = 0 + self.thread_data.num_hits = 0 + self.thread_data.num_dupes = 0 + self.thread_data.cumulative_hits = 0 + # Don't run if starttime was set to the future if ts_now() <= rule['starttime']: logging.warning("Attempted to use query start time in the future (%s), sleeping instead" % (starttime)) return 0 # Run the rule. If querying over a large time period, split it up into segments - self.thread_data.num_hits = 0 - self.thread_data.num_dupes = 0 - self.thread_data.cumulative_hits = 0 segment_size = self.get_segment_size(rule) tmp_endtime = rule['starttime'] From 6b04f9e12c076bb49a2b37082d779916b1666d4e Mon Sep 17 00:00:00 2001 From: Ryan Goggin Date: Thu, 9 Jul 2020 08:23:23 -0400 Subject: [PATCH 0027/1065] Fix tests to include new kwarg verify to requests.post --- tests/alerts_test.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 5cd61ae75..d5ac49ff2 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -1672,7 +1672,8 @@ def test_http_alerter_with_payload(): data=mock.ANY, headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, proxies=None, - timeout=10 + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1707,7 +1708,8 @@ def test_http_alerter_with_payload_all_values(): data=mock.ANY, headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, proxies=None, - timeout=10 + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1739,7 +1741,8 @@ def test_http_alerter_without_payload(): data=mock.ANY, headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, proxies=None, - timeout=10 + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) From a7144d156016830cb383ed96deb622a9128d0fb3 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 25 Jul 2020 13:57:00 +0900 Subject: [PATCH 0028/1065] Remove Stride integration The Stride service was shutdown on February 15th, 2019. --- docs/source/ruletypes.rst | 20 --- elastalert/alerts.py | 93 ------------- elastalert/loaders.py | 1 - elastalert/schema.yaml | 6 - tests/alerts_test.py | 278 -------------------------------------- 5 files changed, 398 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index a947a77b7..b5ad1f610 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1715,26 +1715,6 @@ If set, it will mention the users, no matter if the original message format is s Valid values: list of strings. Defaults to ``[]``. - -Stride -~~~~~~~ - -Stride alerter will send a notification to a predefined Stride room. The body of the notification is formatted the same as with other alerters. -Simple HTML such as and tags will be parsed into a format that Stride can consume. - -The alerter requires the following two options: - -``stride_access_token``: The randomly generated notification token created by Stride. - -``stride_cloud_id``: The site_id associated with the Stride site you want to send the alert to. - -``stride_conversation_id``: The conversation_id associated with the Stride conversation you want to send the alert to. - -``stride_ignore_ssl_errors``: Ignore TLS errors (self-signed certificates, etc.). Default is false. - -``stride_proxy``: By default ElastAlert will not use a network proxy to send notifications to Stride. Set this option using ``hostname:port`` if you need to use a proxy. - - MS Teams ~~~~~~~~ diff --git a/elastalert/alerts.py b/elastalert/alerts.py index d3fa7518f..48c834e84 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1985,99 +1985,6 @@ def get_info(self): 'http_post_webhook_url': self.post_url} -class StrideHTMLParser(HTMLParser): - """Parse html into stride's fabric structure""" - - def __init__(self): - """ - Define a couple markup place holders. - """ - self.content = [] - self.mark = None - HTMLParser.__init__(self) - - def handle_starttag(self, tag, attrs): - """Identify and verify starting tag is fabric compatible.""" - if tag == 'b' or tag == 'strong': - self.mark = dict(type='strong') - if tag == 'u': - self.mark = dict(type='underline') - if tag == 'a': - self.mark = dict(type='link', attrs=dict(attrs)) - - def handle_endtag(self, tag): - """Clear mark on endtag.""" - self.mark = None - - def handle_data(self, data): - """Construct data node for our data.""" - node = dict(type='text', text=data) - if self.mark: - node['marks'] = [self.mark] - self.content.append(node) - - -class StrideAlerter(Alerter): - """ Creates a Stride conversation message for each alert """ - - required_options = frozenset( - ['stride_access_token', 'stride_cloud_id', 'stride_conversation_id']) - - def __init__(self, rule): - super(StrideAlerter, self).__init__(rule) - - self.stride_access_token = self.rule['stride_access_token'] - self.stride_cloud_id = self.rule['stride_cloud_id'] - self.stride_conversation_id = self.rule['stride_conversation_id'] - self.stride_ignore_ssl_errors = self.rule.get('stride_ignore_ssl_errors', False) - self.stride_proxy = self.rule.get('stride_proxy', None) - self.url = 'https://api.atlassian.com/site/%s/conversation/%s/message' % ( - self.stride_cloud_id, self.stride_conversation_id) - - def alert(self, matches): - body = self.create_alert_body(matches).strip() - - # parse body with StrideHTMLParser - parser = StrideHTMLParser() - parser.feed(body) - - # Post to Stride - headers = { - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(self.stride_access_token) - } - - # set https proxy, if it was provided - proxies = {'https': self.stride_proxy} if self.stride_proxy else None - - # build stride json payload - # https://developer.atlassian.com/cloud/stride/apis/document/structure/ - payload = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': parser.content} - ]} - ]}} - - try: - if self.stride_ignore_ssl_errors: - requests.packages.urllib3.disable_warnings() - response = requests.post( - self.url, data=json.dumps(payload, cls=DateTimeEncoder), - headers=headers, verify=not self.stride_ignore_ssl_errors, - proxies=proxies) - warnings.resetwarnings() - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to Stride: %s" % e) - elastalert_logger.info( - "Alert sent to Stride conversation %s" % self.stride_conversation_id) - - def get_info(self): - return {'type': 'stride', - 'stride_cloud_id': self.stride_cloud_id, - 'stride_converstation_id': self.stride_converstation_id} - - class LineNotifyAlerter(Alerter): """ Created a Line Notify for each alert """ required_option = frozenset(["linenotify_access_token"]) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 771194768..053c68ace 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -63,7 +63,6 @@ class RulesLoader(object): 'command': alerts.CommandAlerter, 'sns': alerts.SnsAlerter, 'hipchat': alerts.HipChatAlerter, - 'stride': alerts.StrideAlerter, 'ms_teams': alerts.MsTeamsAlerter, 'slack': alerts.SlackAlerter, 'mattermost': alerts.MattermostAlerter, diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index cc5d52395..7a5426866 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -270,12 +270,6 @@ properties: hipchat_from: {type: string} hipchat_mentions: {type: array, items: {type: string}} - ### Stride - stride_access_token: {type: string} - stride_cloud_id: {type: string} - stride_conversation_id: {type: string} - stride_ignore_ssl_errors: {type: boolean} - ### Slack slack_webhook_url: *arrayOfString slack_username_override: {type: string} diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 5cd61ae75..59ee5746d 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -20,7 +20,6 @@ from elastalert.alerts import MsTeamsAlerter from elastalert.alerts import PagerDutyAlerter from elastalert.alerts import SlackAlerter -from elastalert.alerts import StrideAlerter from elastalert.loaders import FileRulesLoader from elastalert.opsgenie import OpsGenieAlerter from elastalert.util import ts_add @@ -2086,283 +2085,6 @@ def test_resolving_rule_references(ea): assert 'the_owner' == alert.rule['nested_dict']['nested_owner'] -def test_stride_plain_text(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = StrideAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - body = "{0}\n\n@timestamp: {1}\nsomefield: {2}".format( - rule['name'], match['@timestamp'], match['somefield'] - ) - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': body} - ]} - ]} - ]}} - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_stride_underline_text(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', - 'alert_subject': 'Cool subject', - 'alert_text': 'Underline Text', - 'alert_text_type': 'alert_text_only', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = StrideAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - body = "Underline Text" - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': body, 'marks': [ - {'type': 'underline'} - ]} - ]} - ]} - ]}} - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_stride_bold_text(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', - 'alert_subject': 'Cool subject', - 'alert_text': 'Bold Text', - 'alert_text_type': 'alert_text_only', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = StrideAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - body = "Bold Text" - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': body, 'marks': [ - {'type': 'strong'} - ]} - ]} - ]} - ]}} - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_stride_strong_text(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', - 'alert_subject': 'Cool subject', - 'alert_text': 'Bold Text', - 'alert_text_type': 'alert_text_only', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = StrideAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - body = "Bold Text" - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': body, 'marks': [ - {'type': 'strong'} - ]} - ]} - ]} - ]}} - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_stride_hyperlink(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', - 'alert_subject': 'Cool subject', - 'alert_text': 'Link', - 'alert_text_type': 'alert_text_only', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = StrideAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - body = "Link" - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': body, 'marks': [ - {'type': 'link', 'attrs': {'href': 'http://stride.com'}} - ]} - ]} - ]} - ]}} - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_stride_html(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', - 'alert_subject': 'Cool subject', - 'alert_text': 'Alert: we found something. Link', - 'alert_text_type': 'alert_text_only', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = StrideAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': 'Alert', 'marks': [ - {'type': 'strong'} - ]}, - {'type': 'text', 'text': ': we found something. '}, - {'type': 'text', 'text': 'Link', 'marks': [ - {'type': 'link', 'attrs': {'href': 'http://stride.com'}} - ]} - ]} - ]} - ]}} - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - def test_hipchat_body_size_limit_text(): rule = { 'name': 'Test Rule', From ef8484c87e2568f4197feda222b19ed144313e92 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 25 Jul 2020 14:12:12 +0900 Subject: [PATCH 0029/1065] Remove import HTMLParser --- elastalert/alerts.py | 1 - 1 file changed, 1 deletion(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 48c834e84..0130063bf 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -12,7 +12,6 @@ import warnings from email.mime.text import MIMEText from email.utils import formatdate -from html.parser import HTMLParser from smtplib import SMTP from smtplib import SMTP_SSL from smtplib import SMTPAuthenticationError From d2cb0ed42c06f473fa77d7926b273534818da380 Mon Sep 17 00:00:00 2001 From: Flavio Pompermaier Date: Mon, 27 Jul 2020 10:47:36 +0200 Subject: [PATCH 0030/1065] Fixed checkstyle errors --- elastalert/alerts.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 9d3dfe87b..7a964a583 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -413,7 +413,7 @@ def __init__(self, *args): super(EmailAlerter, self).__init__(*args) self.assets_dir = self.rule.get('assets_dir', '/tmp') - self.images_dictionary = dict(zip( self.rule.get('email_image_keys', []), self.rule.get('email_image_values', []))); + self.images_dictionary = dict(zip(self.rule.get('email_image_keys', []), self.rule.get('email_image_values', []))) self.smtp_host = self.rule.get('smtp_host', 'localhost') self.smtp_ssl = self.rule.get('smtp_ssl', False) self.from_addr = self.rule.get('from_addr', 'ElastAlert') @@ -458,7 +458,7 @@ def alert(self, matches): if 'email_add_domain' in self.rule: to_addr = [name + self.rule['email_add_domain'] for name in to_addr] if self.rule.get('email_format') == 'html': - #email_msg = MIMEText(body, 'html', _charset='UTF-8') # old way + # email_msg = MIMEText(body, 'html', _charset='UTF-8') # old way email_msg = MIMEMultipart() msgText = MIMEText(body, 'html', _charset='UTF-8') email_msg.attach(msgText) # Added, and edited the previous line From b739aaa6d57e2232429c77a15fad4034e3bd6abb Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Thu, 13 Aug 2020 01:44:46 +0900 Subject: [PATCH 0031/1065] kibana discover 7.4/7.5/7.6/7.7/7.8 support --- docs/source/ruletypes.rst | 4 ++-- elastalert/kibana_discover.py | 2 +- elastalert/schema.yaml | 2 +- tests/kibana_discover_test.py | 12 ++++++------ 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index a947a77b7..e2ecfb672 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -553,9 +553,9 @@ The currently supported versions of Kibana Discover are: - `5.6` - `6.0`, `6.1`, `6.2`, `6.3`, `6.4`, `6.5`, `6.6`, `6.7`, `6.8` -- `7.0`, `7.1`, `7.2`, `7.3` +- `7.0`, `7.1`, `7.2`, `7.3`, `7.4`, `7.5`, `7.6`, `7.7`, `7.8` -``kibana_discover_version: '7.3'`` +``kibana_discover_version: '7.8'`` kibana_discover_index_pattern_id ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/elastalert/kibana_discover.py b/elastalert/kibana_discover.py index 7e4dbb5d1..3fb252144 100644 --- a/elastalert/kibana_discover.py +++ b/elastalert/kibana_discover.py @@ -14,7 +14,7 @@ kibana_default_timedelta = datetime.timedelta(minutes=10) kibana5_kibana6_versions = frozenset(['5.6', '6.0', '6.1', '6.2', '6.3', '6.4', '6.5', '6.6', '6.7', '6.8']) -kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3']) +kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8']) def generate_kibana_discover_url(rule, match): ''' Creates a link for a kibana discover app. ''' diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index cc5d52395..5946d9c0c 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -217,7 +217,7 @@ properties: ### Kibana Discover App Link generate_kibana_discover_url: {type: boolean} kibana_discover_app_url: {type: string, format: uri} - kibana_discover_version: {type: string, enum: ['7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} + kibana_discover_version: {type: string, enum: ['7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} kibana_discover_index_pattern_id: {type: string, minLength: 1} kibana_discover_columns: {type: array, items: {type: string, minLength: 1}, minItems: 1} kibana_discover_from_timedelta: *timedelta diff --git a/tests/kibana_discover_test.py b/tests/kibana_discover_test.py index f06fe4e0c..70bd43776 100644 --- a/tests/kibana_discover_test.py +++ b/tests/kibana_discover_test.py @@ -38,7 +38,7 @@ def test_generate_kibana_discover_url_with_kibana_5x_and_6x(kibana_version): assert url == expectedUrl -@pytest.mark.parametrize("kibana_version", ['7.0', '7.1', '7.2', '7.3']) +@pytest.mark.parametrize("kibana_version", ['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8']) def test_generate_kibana_discover_url_with_kibana_7x(kibana_version): url = generate_kibana_discover_url( rule={ @@ -171,7 +171,7 @@ def test_generate_kibana_discover_url_with_from_timedelta(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.3', + 'kibana_discover_version': '7.8', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_from_timedelta': timedelta(hours=1), 'timestamp_field': 'timestamp' @@ -204,7 +204,7 @@ def test_generate_kibana_discover_url_with_from_timedelta_and_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.3', + 'kibana_discover_version': '7.8', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_from_timedelta': timedelta(hours=1), 'timeframe': timedelta(minutes=20), @@ -238,7 +238,7 @@ def test_generate_kibana_discover_url_with_to_timedelta(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.3', + 'kibana_discover_version': '7.8', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_to_timedelta': timedelta(hours=1), 'timestamp_field': 'timestamp' @@ -271,7 +271,7 @@ def test_generate_kibana_discover_url_with_to_timedelta_and_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.3', + 'kibana_discover_version': '7.8', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_to_timedelta': timedelta(hours=1), 'timeframe': timedelta(minutes=20), @@ -305,7 +305,7 @@ def test_generate_kibana_discover_url_with_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.3', + 'kibana_discover_version': '7.8', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'timeframe': timedelta(minutes=20), 'timestamp_field': 'timestamp' From 39a04aa9b038dde24d6a5373fe6000137a927edb Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 22 Aug 2020 21:25:17 +0900 Subject: [PATCH 0032/1065] Kibana 7.9 --- elastalert/kibana_discover.py | 2 +- elastalert/schema.yaml | 2 +- tests/kibana_discover_test.py | 12 ++++++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/elastalert/kibana_discover.py b/elastalert/kibana_discover.py index 3fb252144..b332facce 100644 --- a/elastalert/kibana_discover.py +++ b/elastalert/kibana_discover.py @@ -14,7 +14,7 @@ kibana_default_timedelta = datetime.timedelta(minutes=10) kibana5_kibana6_versions = frozenset(['5.6', '6.0', '6.1', '6.2', '6.3', '6.4', '6.5', '6.6', '6.7', '6.8']) -kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8']) +kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9']) def generate_kibana_discover_url(rule, match): ''' Creates a link for a kibana discover app. ''' diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 5946d9c0c..ba265a731 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -217,7 +217,7 @@ properties: ### Kibana Discover App Link generate_kibana_discover_url: {type: boolean} kibana_discover_app_url: {type: string, format: uri} - kibana_discover_version: {type: string, enum: ['7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} + kibana_discover_version: {type: string, enum: ['7.9', '7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} kibana_discover_index_pattern_id: {type: string, minLength: 1} kibana_discover_columns: {type: array, items: {type: string, minLength: 1}, minItems: 1} kibana_discover_from_timedelta: *timedelta diff --git a/tests/kibana_discover_test.py b/tests/kibana_discover_test.py index 70bd43776..ae5c8bca1 100644 --- a/tests/kibana_discover_test.py +++ b/tests/kibana_discover_test.py @@ -38,7 +38,7 @@ def test_generate_kibana_discover_url_with_kibana_5x_and_6x(kibana_version): assert url == expectedUrl -@pytest.mark.parametrize("kibana_version", ['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8']) +@pytest.mark.parametrize("kibana_version", ['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9']) def test_generate_kibana_discover_url_with_kibana_7x(kibana_version): url = generate_kibana_discover_url( rule={ @@ -171,7 +171,7 @@ def test_generate_kibana_discover_url_with_from_timedelta(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.8', + 'kibana_discover_version': '7.9', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_from_timedelta': timedelta(hours=1), 'timestamp_field': 'timestamp' @@ -204,7 +204,7 @@ def test_generate_kibana_discover_url_with_from_timedelta_and_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.8', + 'kibana_discover_version': '7.9', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_from_timedelta': timedelta(hours=1), 'timeframe': timedelta(minutes=20), @@ -238,7 +238,7 @@ def test_generate_kibana_discover_url_with_to_timedelta(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.8', + 'kibana_discover_version': '7.9', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_to_timedelta': timedelta(hours=1), 'timestamp_field': 'timestamp' @@ -271,7 +271,7 @@ def test_generate_kibana_discover_url_with_to_timedelta_and_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.8', + 'kibana_discover_version': '7.9', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_to_timedelta': timedelta(hours=1), 'timeframe': timedelta(minutes=20), @@ -305,7 +305,7 @@ def test_generate_kibana_discover_url_with_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.8', + 'kibana_discover_version': '7.9', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'timeframe': timedelta(minutes=20), 'timestamp_field': 'timestamp' From 87ba1d1ffc7d4c36bb2fd6817e5d7f487eaf209b Mon Sep 17 00:00:00 2001 From: Josh Brower Date: Wed, 2 Sep 2020 06:30:50 -0400 Subject: [PATCH 0033/1065] TheHive alerter: Allow severity and tlp to be set by rule --- elastalert/alerts.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index d3fa7518f..b881ec2ea 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2154,7 +2154,10 @@ def alert(self, matches): n += 1 custom_fields[cf_key] = cf elif isinstance(alert_config_value, str): - alert_config[alert_config_field] = alert_config_value.format(**context) + alert_value = alert_config_value.format(**context) + if alert_config_field in ['severity', 'tlp']: + alert_value = int(alert_value) + alert_config[alert_config_field] = alert_value elif isinstance(alert_config_value, (list, tuple)): formatted_list = [] for element in alert_config_value: From 59ad4ac53c838aa76f85671c1252d28787fcc452 Mon Sep 17 00:00:00 2001 From: EC2 Default User Date: Sat, 12 Sep 2020 06:46:10 +0000 Subject: [PATCH 0034/1065] fix a configuration options of docs --- docs/source/ruletypes.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index ff3763712..4426c00aa 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -140,7 +140,7 @@ Rule Configuration Cheat Sheet +----------------------------------------------------+--------+-----------+-----------+--------+-----------+-------+----------+--------+-----------+ | ``ignore_null`` (boolean, no default) | | | Req | Req | | | | | | +----------------------------------------------------+--------+-----------+-----------+--------+-----------+-------+----------+--------+-----------+ -| ``query_key`` (string, no default) | Opt | | | Req | Opt | Opt | Opt | Req | Opt | +| ``query_key`` (string or list, no default) | Opt | | | Req | Opt | Opt | Opt | Req | Opt | +----------------------------------------------------+--------+-----------+-----------+--------+-----------+-------+----------+--------+-----------+ | ``aggregation_key`` (string, no default) | Opt | | | | | | | | | +----------------------------------------------------+--------+-----------+-----------+--------+-----------+-------+----------+--------+-----------+ @@ -160,7 +160,7 @@ Rule Configuration Cheat Sheet | | | | | | | | | | | |``doc_type`` (string, no default) | | | | | | | | | | | | | | | | | | | | | -|``query_key`` (string, no default) | | | | | | | | | | +|``query_key`` (string or list, no default) | | | | | | | | | | | | | | | | | | | | | |``terms_size`` (int, default 50) | | | | | | | | | | +----------------------------------------------------+--------+-----------+-----------+--------+-----------+-------+----------+--------+-----------+ From 62924c4b2e7ecf071bb8cde46a726768df7d3ab0 Mon Sep 17 00:00:00 2001 From: Kevin Rose Date: Thu, 1 Oct 2020 17:06:32 -0400 Subject: [PATCH 0035/1065] Add support for custom_details in the PagerDuty alerter v2 module --- docs/source/ruletypes.rst | 5 +++++ elastalert/alerts.py | 13 ++++++++++--- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index ff3763712..0823c82eb 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1915,6 +1915,11 @@ See https://v2.developer.pagerduty.com/docs/send-an-event-events-api-v2 ``pagerduty_v2_payload_source_args``: If set, and ``pagerduty_v2_payload_source`` is a formattable string, Elastalert will format the source based on the provided array of fields from the rule or match. +``pagerduty_v2_payload_custom_details``: List of keys:values to use as the content of the custom_details payload. Example - ip:clientip will map the value from the clientip index of Elasticsearch to JSON key named ip. + +``pagerduty_v2_payload_include_all_info``: If True, this will include the entire Elasticsearch document as a custom detail field called "information" in the PagerDuty alert. + + PagerTree ~~~~~~~~~ diff --git a/elastalert/alerts.py b/elastalert/alerts.py index d3fa7518f..cca59b0d5 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1357,6 +1357,8 @@ def __init__(self, rule): self.pagerduty_v2_payload_severity = self.rule.get('pagerduty_v2_payload_severity', 'critical') self.pagerduty_v2_payload_source = self.rule.get('pagerduty_v2_payload_source', 'ElastAlert') self.pagerduty_v2_payload_source_args = self.rule.get('pagerduty_v2_payload_source_args', None) + self.pagerduty_v2_payload_custom_details = self.rule.get('pagerduty_v2_payload_custom_details', {}) + self.pagerduty_v2_payload_include_all_info = self.rule.get('pagerduty_v2_payload_include_all_info', True) if self.pagerduty_api_version == 'v2': self.url = 'https://events.pagerduty.com/v2/enqueue' @@ -1369,6 +1371,13 @@ def alert(self, matches): # post to pagerduty headers = {'content-type': 'application/json'} if self.pagerduty_api_version == 'v2': + + custom_details_payload = {'information': body} if self.pagerduty_v2_payload_include_all_info else {} + if self.pagerduty_v2_payload_custom_details: + for match in matches: + for custom_details_key, es_key in list(self.pagerduty_v2_payload_custom_details.items()): + custom_details_payload[custom_details_key] = lookup_es_key(match, es_key) + payload = { 'routing_key': self.pagerduty_service_key, 'event_action': self.pagerduty_event_type, @@ -1389,9 +1398,7 @@ def alert(self, matches): self.pagerduty_v2_payload_source_args, matches), 'summary': self.create_title(matches), - 'custom_details': { - 'information': body, - }, + 'custom_details': custom_details_payload, }, } match_timestamp = lookup_es_key(matches[0], self.rule.get('timestamp_field', '@timestamp')) From d818bfa81d29731198ca8248811778e6cc85c551 Mon Sep 17 00:00:00 2001 From: Daichi Kimura Date: Tue, 20 Oct 2020 01:16:06 +0900 Subject: [PATCH 0036/1065] Fix initializing self.thread_data.alerts_sent for running elastalert-test-rule --- elastalert/elastalert.py | 1 + 1 file changed, 1 insertion(+) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index b078c86db..24c9f884f 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -159,6 +159,7 @@ def __init__(self, args): self.starttime = self.args.start self.disabled_rules = [] self.replace_dots_in_field_names = self.conf.get('replace_dots_in_field_names', False) + self.thread_data.alerts_sent = 0 self.thread_data.num_hits = 0 self.thread_data.num_dupes = 0 self.scheduler = BackgroundScheduler() From 1a42831b776d3156e1b2deaecd0e5b96ea177b40 Mon Sep 17 00:00:00 2001 From: Elad Amit Date: Tue, 20 Oct 2020 21:10:52 +0300 Subject: [PATCH 0037/1065] adding support for multiple imports and statsd metrics --- config.yaml.example | 6 ++++++ docs/source/ruletypes.rst | 15 +++++++++++++++ docs/source/running_elastalert.rst | 4 ++++ elastalert/config.py | 4 +++- elastalert/elastalert.py | 18 ++++++++++++++++++ elastalert/loaders.py | 29 +++++++++++++++++++---------- elastalert/schema.yaml | 7 ++++++- requirements.txt | 2 ++ 8 files changed, 73 insertions(+), 12 deletions(-) diff --git a/config.yaml.example b/config.yaml.example index 9d9176382..70423de96 100644 --- a/config.yaml.example +++ b/config.yaml.example @@ -30,6 +30,12 @@ es_port: 9200 # Optional URL prefix for Elasticsearch #es_url_prefix: elasticsearch +# Optional prefix for statsd metrics +#statsd_metrics_prefix: elastalert + +# Optional statsd host +#statsd_host: dogstatsd + # Connect with TLS to Elasticsearch #use_ssl: True diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index ff3763712..f1aab460e 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -40,6 +40,10 @@ Rule Configuration Cheat Sheet +--------------------------------------------------------------+ | | ``es_url_prefix`` (string, no default) | | +--------------------------------------------------------------+ | +| ``statsd_metrics_prefix`` (string, no default) | | ++--------------------------------------------------------------+ | +| ``statsd_host`` (string, no default) | | ++--------------------------------------------------------------+ | | ``es_send_get_body_as`` (string, default "GET") | | +--------------------------------------------------------------+ | | ``aggregation`` (time, no default) | | @@ -289,6 +293,17 @@ es_url_prefix ``es_url_prefix``: URL prefix for the Elasticsearch endpoint. (Optional, string, no default) +statsd_metrics_prefix +^^^^^^^^^^^^^ + +``statsd_metrics_prefix``: prefix for statsd metrics. (Optional, string, no default) + + +statsd_host +^^^^^^^^^^^^^ + +``statsd_host``: statsd host. (Optional, string, no default) + es_send_get_body_as ^^^^^^^^^^^^^^^^^^^ diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index 7fdf1eeba..3f937c401 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -66,6 +66,10 @@ Next, open up config.yaml.example. In it, you will find several configuration op ``es_url_prefix``: Optional; URL prefix for the Elasticsearch endpoint. +``statsd_metrics_prefix``: Optional; prefix for statsd metrics. + +``statsd_host``: Optional; statsd host. + ``es_send_get_body_as``: Optional; Method for querying Elasticsearch - ``GET``, ``POST`` or ``source``. The default is ``GET`` ``writeback_index`` is the name of the index in which ElastAlert will store data. We will create this index later. diff --git a/elastalert/config.py b/elastalert/config.py index 5ae9a26e6..fe26fb391 100644 --- a/elastalert/config.py +++ b/elastalert/config.py @@ -20,7 +20,9 @@ 'ES_USERNAME': 'es_username', 'ES_HOST': 'es_host', 'ES_PORT': 'es_port', - 'ES_URL_PREFIX': 'es_url_prefix'} + 'ES_URL_PREFIX': 'es_url_prefix', + 'STATSD_METRICS_PREFIX': 'statsd_metrics_prefix', + 'STATSD_HOST': 'statsd_host'} env = Env(ES_USE_SSL=bool) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index b078c86db..6eb160d1f 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -16,6 +16,8 @@ from smtplib import SMTP from smtplib import SMTPException from socket import error +import statsd + import dateutil.tz import pytz @@ -163,6 +165,14 @@ def __init__(self, args): self.thread_data.num_dupes = 0 self.scheduler = BackgroundScheduler() self.string_multi_field_name = self.conf.get('string_multi_field_name', False) + self.statsd_prefix = os.environ.get('statsd_metrics_prefix', '') + self.statsd_host = os.environ.get('statsd_host', '') + if self.statsd_host and len(self.statsd_host) > 0: + self.statsd = statsd.StatsClient(host=self.statsd_host, + port=8125, + prefix=self.statsd_prefix) + else: + self.statsd = None self.add_metadata_alert = self.conf.get('add_metadata_alert', False) self.show_disabled_rules = self.conf.get('show_disabled_rules', True) @@ -1279,6 +1289,14 @@ def handle_rule_execution(self, rule): " %s alerts sent" % (rule['name'], old_starttime, pretty_ts(endtime, rule.get('use_local_time')), self.thread_data.num_hits, self.thread_data.num_dupes, num_matches, self.thread_data.alerts_sent)) + rule_duration = seconds(endtime - rule.get('original_starttime')) + elastalert_logger.info("%s range %s" % (rule['name'], rule_duration)) + if self.statsd: + self.statsd.gauge('query.hits', self.thread_data.num_hits, tags={"rule_name": rule['name']}) + self.statsd.gauge('already_seen.hits', self.thread_data.num_dupes,tags={"rule_name": rule['name']}) + self.statsd.gauge('query.matches', num_matches, tags={"rule_name": rule['name']}) + self.statsd.gauge('query.alerts_sent', self.thread_data.alerts_sent, tags={"rule_name": rule['name']}) + self.thread_data.alerts_sent = 0 if next_run < datetime.datetime.utcnow(): diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 771194768..24a1cca61 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -193,6 +193,7 @@ def load_yaml(self, filename): } self.import_rules.pop(filename, None) # clear `filename` dependency + files_to_import = [] while True: loaded = self.get_yaml(filename) @@ -203,14 +204,16 @@ def load_yaml(self, filename): loaded.update(rule) rule = loaded if 'import' in rule: - # Find the path of the next file. - import_filename = self.get_import_rule(rule) - # set dependencies + # add all of the files to load into the load queue + files_to_import += self.get_import_rule(rule) + del (rule['import']) # or we could go on forever! + if len(files_to_import) > 0: + # set the next file to load + next_file_to_import = files_to_import.pop() rules = self.import_rules.get(filename, []) - rules.append(import_filename) + rules.append(next_file_to_import) self.import_rules[filename] = rules - filename = import_filename - del (rule['import']) # or we could go on forever! + filename = next_file_to_import else: break @@ -534,10 +537,16 @@ def get_import_rule(self, rule): :return: Path the import rule :rtype: str """ - if os.path.isabs(rule['import']): - return rule['import'] - else: - return os.path.join(os.path.dirname(rule['rule_file']), rule['import']) + rule_imports = rule['import'] + if type(rule_imports) is str: + rule_imports = [rule_imports] + expanded_imports = [] + for rule_import in rule_imports: + if os.path.isabs(rule_import): + expanded_imports.append(rule_import) + else: + expanded_imports.append(os.path.join(os.path.dirname(rule['rule_file']), rule_import)) + return expanded_imports def get_rule_file_hash(self, rule_file): rule_file_hash = '' diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 1241315dc..c9f765211 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -181,7 +181,12 @@ properties: use_strftime_index: {type: boolean} # Optional Settings - import: {type: string} + import: + anyOf: + - type: array + items: + type: string + - type: string aggregation: *timeframe realert: *timeframe exponential_realert: *timeframe diff --git a/requirements.txt b/requirements.txt index 9c32052d0..c3a03f041 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,3 +20,5 @@ requests>=2.0.0 stomp.py>=4.1.17 texttable>=0.8.8 twilio==6.0.0 +statsd==3.3.0 +statsd-tags==3.2.1 From 648f1996d47204167626d5763a6cc8e0ec44780a Mon Sep 17 00:00:00 2001 From: Sherif Abdel-Naby Date: Wed, 21 Oct 2020 06:07:38 +0200 Subject: [PATCH 0038/1065] Add Jinja Requirement to setup.py --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 2845836a7..e2906230d 100644 --- a/setup.py +++ b/setup.py @@ -37,6 +37,7 @@ 'envparse>=0.2.0', 'exotel>=0.1.3', 'jira>=2.0.0', + 'Jinja2==2.10.1', 'jsonschema>=3.0.2', 'mock>=2.0.0', 'prison>=0.1.2', From 1b4875b26d32f70cc392e5d3cb6e2917a39df329 Mon Sep 17 00:00:00 2001 From: Sherif Abdel-Naby Date: Sat, 17 Oct 2020 20:09:25 +0200 Subject: [PATCH 0039/1065] Add Jinja2 Template option to `alert_text_type` --- elastalert/alerts.py | 10 ++++++++-- elastalert/loaders.py | 6 ++++++ elastalert/schema.yaml | 2 +- requirements.txt | 1 + 4 files changed, 16 insertions(+), 3 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index d3fa7518f..8dacfcfca 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -63,7 +63,13 @@ def _ensure_new_line(self): def _add_custom_alert_text(self): missing = self.rule.get('alert_missing_value', '') alert_text = str(self.rule.get('alert_text', '')) - if 'alert_text_args' in self.rule: + if 'alert_text_jinja' == self.rule.get('alert_text_type'): + # Top fields are accessible via `{{field_name}}` or `{{jinja_root_name['field_name']}}` + # `jinja_root_name` dict is useful when accessing *fields with dots in their keys*, + # as Jinja treat dot as a nested field. + alert_text = self.rule.get("jinja_template").render(**self.match, + **{self.rule['jinja_root_name']: self.match}) + elif 'alert_text_args' in self.rule: alert_text_args = self.rule.get('alert_text_args') alert_text_values = [lookup_es_key(self.match, arg) for arg in alert_text_args] @@ -142,7 +148,7 @@ def __str__(self): self._add_custom_alert_text() self._ensure_new_line() - if self.rule.get('alert_text_type') != 'alert_text_only': + if self.rule.get('alert_text_type') != 'alert_text_only' and self.rule.get('alert_text_type') != 'alert_text_jinja': self._add_rule_text() self._ensure_new_line() if self.rule.get('top_count_keys'): diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 771194768..5729c4ed3 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -9,6 +9,7 @@ import jsonschema import yaml import yaml.scanner +from jinja2 import Template from staticconf.loader import yaml_loader from . import alerts @@ -279,6 +280,7 @@ def load_options(self, rule, conf, filename, args=None): rule.setdefault('_source_enabled', True) rule.setdefault('use_local_time', True) rule.setdefault('description', "") + rule.setdefault('jinja_root_name', "_data") # Set timestamp_type conversion function, used when generating queries and processing hits rule['timestamp_type'] = rule['timestamp_type'].strip().lower() @@ -401,6 +403,10 @@ def _dt_to_ts_with_format(dt): if rule.get('scan_entire_timeframe') and not rule.get('timeframe'): raise EAException('scan_entire_timeframe can only be used if there is a timeframe specified') + # Compile Jinja Template + if rule.get('alert_text_type') == 'alert_text_jinja': + rule["jinja_template"] = Template(str(rule.get('alert_text', ''))) + def load_modules(self, rule, args=None): """ Loads things that could be modules. Enhancements, alerts and rule type. """ # Set match enhancements diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 1241315dc..852702a83 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -227,7 +227,7 @@ properties: alert_text: {type: string} # Python format string alert_text_args: {type: array, items: {type: string}} alert_text_kw: {type: object} - alert_text_type: {enum: [alert_text_only, exclude_fields, aggregation_summary_only]} + alert_text_type: {enum: [alert_text_only, alert_text_jinja, exclude_fields, aggregation_summary_only]} alert_missing_value: {type: string} timestamp_field: {type: string} field: {} diff --git a/requirements.txt b/requirements.txt index 9c32052d0..089a11125 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,6 +8,7 @@ croniter>=0.3.16 elasticsearch>=7.0.0 envparse>=0.2.0 exotel>=0.1.3 +Jinja2==2.10.1 jira>=1.0.10,<1.0.15 jsonschema>=3.0.2 mock>=2.0.0 From 8f0b5d974baa56eb5ff5a4acfc95fed8982b746d Mon Sep 17 00:00:00 2001 From: Sherif Abdel-Naby Date: Sat, 17 Oct 2020 20:16:14 +0200 Subject: [PATCH 0040/1065] Update Docs about using Jinja2 Template in `alert_text`. --- README.md | 19 ++++++++++++++++++- docs/source/elastalert.rst | 2 ++ docs/source/ruletypes.rst | 35 ++++++++++++++++++++++++++++++++--- 3 files changed, 52 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 99acc02e7..ca626a9f7 100644 --- a/README.md +++ b/README.md @@ -230,7 +230,24 @@ The data for when an alert will fire again is stored in Elasticsearch in the ``e ### How can I change what's in the alert? -You can use the field ``alert_text`` to add custom text to an alert. By setting ``alert_text_type: alert_text_only``, it will be the entirety of the alert. You can also add different fields from the alert by using Python style string formatting and ``alert_text_args``. For example +You can use the field ``alert_text`` to add custom text to an alert. By setting ``alert_text_type: alert_text_only`` Or ``alert_text_type: alert_text_jinja``, it will be the entirety of the alert. You can also add different fields from the alert: + +With ``alert_text_type: alert_text_jinja`` by using [Jinja2](https://pypi.org/project/Jinja2/) Template. + +``` +alert_text_type: alert_text_jinja + +alert_text: | + Alert triggered! *({{num_hits}} Matches!)* + Something happened with {{username}} ({{email}}) + {{description|truncate}} + +``` + +> Top fields are accessible via `{{field_name}}` or `{{_data['field_name']}}`, `_data` is useful when accessing *fields with dots in their keys*, as Jinja treat dot as a nested field. +> If `_data` conflicts with your top level data, use ``jinja_root_name`` to change its name. + +With ``alert_text_type: alert_text_only`` by using Python style string formatting and ``alert_text_args``. For example ``` alert_text: "Something happened with {0} at {1}" diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index b1008c3c4..f7fc59c22 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -203,6 +203,8 @@ The default value is ``.raw`` for Elasticsearch 2 and ``.keyword`` for Elasticse ``skip_invalid``: If ``True``, skip invalid files instead of exiting. +``jinja_root_name``: When using a Jinja template, specify the name of the root field name in the template. The default is ``_data``. + Logging ------- diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index ff3763712..d961cb26b 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1361,9 +1361,30 @@ There are several ways to format the body text of the various types of events. I top_counts = top_counts_header, LF, top_counts_value field_values = Field, ": ", Value -Similarly to ``alert_subject``, ``alert_text`` can be further formatted using standard Python formatting syntax. +Similarly to ``alert_subject``, ``alert_text`` can be further formatted using Jinja2 Templates or Standard Python Formatting Syntax + +1. Jinja Template + +By setting ``alert_text_type: alert_text_jinja`` you can use jinja2 templates in ``alert_text``. :: + + alert_text_type: alert_text_jinja + + alert_text: | + Alert triggered! *({{num_hits}} Matches!)* + Something happened with {{username}} ({{email}}) + {{description|truncate}} + +Top fields are accessible via `{{field_name}}` or `{{_data['field_name']}}`, `_data` is useful when accessing *fields with dots in their keys*, as Jinja treat dot as a nested field. +If `_data` conflicts with your top level data, use ``jinja_root_name`` to change its name. + +2. Standard Python Formatting Syntax + The field names whose values will be used as the arguments can be passed with ``alert_text_args`` or ``alert_text_kw``. -You may also refer to any top-level rule property in the ``alert_subject_args``, ``alert_text_args``, ``alert_missing_value``, and ``alert_text_kw fields``. However, if the matched document has a key with the same name, that will take preference over the rule property. +You may also refer to any top-level rule property in the ``alert_subject_args``, ``alert_text_args``, ``alert_missing_value``, and ``alert_text_kw fields``. However, if the matched document has a key with the same name, that will take preference over the rule property. :: + + alert_text: "Something happened with {0} at {1}" + alert_text_type: alert_text_only + alert_text_args: ["username", "@timestamp"] By default:: @@ -1383,6 +1404,14 @@ With ``alert_text_type: alert_text_only``:: alert_text + +With ``alert_text_type: alert_text_jinja``:: + + body = rule_name + + alert_text + + With ``alert_text_type: exclude_fields``:: body = rule_name @@ -1501,7 +1530,7 @@ by the smtp server. ``bcc``: This adds the BCC emails to the list of recipients but does not show up in the email message. By default, this is left empty. ``email_format``: If set to ``html``, the email's MIME type will be set to HTML, and HTML content should correctly render. If you use this, -you need to put your own HTML into ``alert_text`` and use ``alert_text_type: alert_text_only``. +you need to put your own HTML into ``alert_text`` and use ``alert_text_type: alert_text_jinja`` Or ``alert_text_type: alert_text_only``. Jira ~~~~ From 8f9cf3035e56012f7a0c2cde9fe89b18f28d7f0a Mon Sep 17 00:00:00 2001 From: Elad Amit Date: Wed, 21 Oct 2020 11:16:12 +0300 Subject: [PATCH 0041/1065] fixing statsd reporting --- config.yaml.example | 2 +- docs/source/ruletypes.rst | 6 +++--- docs/source/running_elastalert.rst | 2 +- elastalert/config.py | 2 +- elastalert/elastalert.py | 16 +++++++--------- 5 files changed, 13 insertions(+), 15 deletions(-) diff --git a/config.yaml.example b/config.yaml.example index 70423de96..958e40a8f 100644 --- a/config.yaml.example +++ b/config.yaml.example @@ -31,7 +31,7 @@ es_port: 9200 #es_url_prefix: elasticsearch # Optional prefix for statsd metrics -#statsd_metrics_prefix: elastalert +#statsd_instance_tag: elastalert # Optional statsd host #statsd_host: dogstatsd diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index f1aab460e..bb3a58f6b 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -40,7 +40,7 @@ Rule Configuration Cheat Sheet +--------------------------------------------------------------+ | | ``es_url_prefix`` (string, no default) | | +--------------------------------------------------------------+ | -| ``statsd_metrics_prefix`` (string, no default) | | +| ``statsd_instance_tag`` (string, no default) | | +--------------------------------------------------------------+ | | ``statsd_host`` (string, no default) | | +--------------------------------------------------------------+ | @@ -293,10 +293,10 @@ es_url_prefix ``es_url_prefix``: URL prefix for the Elasticsearch endpoint. (Optional, string, no default) -statsd_metrics_prefix +statsd_instance_tag ^^^^^^^^^^^^^ -``statsd_metrics_prefix``: prefix for statsd metrics. (Optional, string, no default) +``statsd_instance_tag``: prefix for statsd metrics. (Optional, string, no default) statsd_host diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index 3f937c401..8c87d8759 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -66,7 +66,7 @@ Next, open up config.yaml.example. In it, you will find several configuration op ``es_url_prefix``: Optional; URL prefix for the Elasticsearch endpoint. -``statsd_metrics_prefix``: Optional; prefix for statsd metrics. +``statsd_instance_tag``: Optional; prefix for statsd metrics. ``statsd_host``: Optional; statsd host. diff --git a/elastalert/config.py b/elastalert/config.py index fe26fb391..87c517778 100644 --- a/elastalert/config.py +++ b/elastalert/config.py @@ -21,7 +21,7 @@ 'ES_HOST': 'es_host', 'ES_PORT': 'es_port', 'ES_URL_PREFIX': 'es_url_prefix', - 'STATSD_METRICS_PREFIX': 'statsd_metrics_prefix', + 'STATSD_INSTANCE_TAG': 'statsd_instance_tag', 'STATSD_HOST': 'statsd_host'} env = Env(ES_USE_SSL=bool) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 6eb160d1f..541df181f 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -165,12 +165,10 @@ def __init__(self, args): self.thread_data.num_dupes = 0 self.scheduler = BackgroundScheduler() self.string_multi_field_name = self.conf.get('string_multi_field_name', False) - self.statsd_prefix = os.environ.get('statsd_metrics_prefix', '') - self.statsd_host = os.environ.get('statsd_host', '') + self.statsd_instance_tag = self.conf.get('statsd_instance_tag', '') + self.statsd_host = self.conf.get('statsd_host', '') if self.statsd_host and len(self.statsd_host) > 0: - self.statsd = statsd.StatsClient(host=self.statsd_host, - port=8125, - prefix=self.statsd_prefix) + self.statsd = statsd.StatsClient(host=self.statsd_host, port=8125) else: self.statsd = None self.add_metadata_alert = self.conf.get('add_metadata_alert', False) @@ -1292,10 +1290,10 @@ def handle_rule_execution(self, rule): rule_duration = seconds(endtime - rule.get('original_starttime')) elastalert_logger.info("%s range %s" % (rule['name'], rule_duration)) if self.statsd: - self.statsd.gauge('query.hits', self.thread_data.num_hits, tags={"rule_name": rule['name']}) - self.statsd.gauge('already_seen.hits', self.thread_data.num_dupes,tags={"rule_name": rule['name']}) - self.statsd.gauge('query.matches', num_matches, tags={"rule_name": rule['name']}) - self.statsd.gauge('query.alerts_sent', self.thread_data.alerts_sent, tags={"rule_name": rule['name']}) + self.statsd.gauge('query.hits', self.thread_data.num_hits, tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + self.statsd.gauge('already_seen.hits', self.thread_data.num_dupes,tags={"elastalert_instance": self.statsd_instance_tag"rule_name": rule['name']}) + self.statsd.gauge('query.matches', num_matches, tags={"elastalert_instance": self.statsd_instance_tag"rule_name": rule['name']}) + self.statsd.gauge('query.alerts_sent', self.thread_data.alerts_sent, tags={"elastalert_instance": self.statsd_instance_tag"rule_name": rule['name']}) self.thread_data.alerts_sent = 0 From a7a0993a22b305a5235a0f734247f08473fdfe8c Mon Sep 17 00:00:00 2001 From: Elad Amit Date: Wed, 21 Oct 2020 11:22:04 +0300 Subject: [PATCH 0042/1065] fixing statsd reporting --- elastalert/elastalert.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 541df181f..950ed5032 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -1291,9 +1291,9 @@ def handle_rule_execution(self, rule): elastalert_logger.info("%s range %s" % (rule['name'], rule_duration)) if self.statsd: self.statsd.gauge('query.hits', self.thread_data.num_hits, tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) - self.statsd.gauge('already_seen.hits', self.thread_data.num_dupes,tags={"elastalert_instance": self.statsd_instance_tag"rule_name": rule['name']}) - self.statsd.gauge('query.matches', num_matches, tags={"elastalert_instance": self.statsd_instance_tag"rule_name": rule['name']}) - self.statsd.gauge('query.alerts_sent', self.thread_data.alerts_sent, tags={"elastalert_instance": self.statsd_instance_tag"rule_name": rule['name']}) + self.statsd.gauge('already_seen.hits', self.thread_data.num_dupes,tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + self.statsd.gauge('query.matches', num_matches, tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + self.statsd.gauge('query.alerts_sent', self.thread_data.alerts_sent, tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) self.thread_data.alerts_sent = 0 From be1ee8b1b51130e7c4b51a6c1685faf3855daf28 Mon Sep 17 00:00:00 2001 From: Elad Amit Date: Wed, 21 Oct 2020 11:34:38 +0300 Subject: [PATCH 0043/1065] fixing statsd reporting --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index c3a03f041..3d4f788aa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -21,4 +21,4 @@ stomp.py>=4.1.17 texttable>=0.8.8 twilio==6.0.0 statsd==3.3.0 -statsd-tags==3.2.1 +statsd-telegraf==3.2.1 From efcbbc39b74bc767662ac97089ced2e0b1bb21f5 Mon Sep 17 00:00:00 2001 From: Elad Amit Date: Wed, 21 Oct 2020 11:39:50 +0300 Subject: [PATCH 0044/1065] fixing statsd reporting --- requirements.txt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 3d4f788aa..78d085803 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,5 +20,4 @@ requests>=2.0.0 stomp.py>=4.1.17 texttable>=0.8.8 twilio==6.0.0 -statsd==3.3.0 -statsd-telegraf==3.2.1 +statsd-telegraf==3.2.1.post1 From c69f9adcf57a49f9b7aad36d04cfa471d5e3972c Mon Sep 17 00:00:00 2001 From: Elad Amit Date: Wed, 21 Oct 2020 11:41:41 +0300 Subject: [PATCH 0045/1065] fixing statsd reporting --- elastalert/elastalert.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 950ed5032..efec46524 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -1290,10 +1290,13 @@ def handle_rule_execution(self, rule): rule_duration = seconds(endtime - rule.get('original_starttime')) elastalert_logger.info("%s range %s" % (rule['name'], rule_duration)) if self.statsd: - self.statsd.gauge('query.hits', self.thread_data.num_hits, tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) - self.statsd.gauge('already_seen.hits', self.thread_data.num_dupes,tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) - self.statsd.gauge('query.matches', num_matches, tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) - self.statsd.gauge('query.alerts_sent', self.thread_data.alerts_sent, tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + try: + self.statsd.gauge('query.hits', self.thread_data.num_hits, tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + self.statsd.gauge('already_seen.hits', self.thread_data.num_dupes,tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + self.statsd.gauge('query.matches', num_matches, tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + self.statsd.gauge('query.alerts_sent', self.thread_data.alerts_sent, tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + except BaseException as e: + elastalert_logger.error("unable to send metrics:\n%s" % str(e)) self.thread_data.alerts_sent = 0 From a972cb4ae57f48367e3d5bf43a789ee2816cca25 Mon Sep 17 00:00:00 2001 From: Elad Amit Date: Wed, 21 Oct 2020 12:01:44 +0300 Subject: [PATCH 0046/1065] fixing statsd reporting --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 78d085803..54978e020 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,4 +20,4 @@ requests>=2.0.0 stomp.py>=4.1.17 texttable>=0.8.8 twilio==6.0.0 -statsd-telegraf==3.2.1.post1 +statsd-tags==3.2.1.post1 From 6388d6768dd6942aaf07925b25491e9db14a790e Mon Sep 17 00:00:00 2001 From: Cheng Liangyu Date: Thu, 22 Oct 2020 21:47:40 +0800 Subject: [PATCH 0047/1065] Setting size to 0 avoids executing the fetch phase of the search making the request more efficient https://www.elastic.co/guide/en/elasticsearch/reference/current/returning-only-agg-results.html --- elastalert/ruletypes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 2f1d2f82c..740e74ea7 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -674,7 +674,7 @@ def get_all_terms(self, args): time_filter = {self.rules['timestamp_field']: {'lt': self.rules['dt_to_ts'](tmp_end), 'gte': self.rules['dt_to_ts'](tmp_start)}} query_template['filter'] = {'bool': {'must': [{'range': time_filter}]}} - query = {'aggs': {'filtered': query_template}} + query = {'aggs': {'filtered': query_template}, 'size': 0} if 'filter' in self.rules: for item in self.rules['filter']: From a9f0d1d8488b02e9e2d96a65d49d70fc12733f0a Mon Sep 17 00:00:00 2001 From: Dennis Boone Date: Wed, 28 Oct 2020 15:19:49 -0400 Subject: [PATCH 0048/1065] Fix for the mapping error reported in #2899. --- elastalert/es_mappings/6/elastalert.json | 1 + 1 file changed, 1 insertion(+) diff --git a/elastalert/es_mappings/6/elastalert.json b/elastalert/es_mappings/6/elastalert.json index 645a67762..2cc97bcfb 100644 --- a/elastalert/es_mappings/6/elastalert.json +++ b/elastalert/es_mappings/6/elastalert.json @@ -29,6 +29,7 @@ "format": "dateOptionalTime" }, "match_body": { + "enabled": "false", "type": "object" }, "aggregate_id": { From 873f41bac13ea6a2264121c2bcd2e047ecb1a4e5 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Wed, 4 Nov 2020 22:44:39 +0900 Subject: [PATCH 0049/1065] Fix Stomp --- elastalert/alerts.py | 1 - 1 file changed, 1 deletion(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index d3fa7518f..d7d8cfced 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -371,7 +371,6 @@ def alert(self, matches): conn = stomp.Connection([(self.stomp_hostname, self.stomp_hostport)], use_ssl=self.stomp_ssl) - conn.start() conn.connect(self.stomp_login, self.stomp_password) # Ensures that the CONNECTED frame is received otherwise, the disconnect call will fail. time.sleep(1) From 66d5d224801793c18e981123363f1237d9f35d1d Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 7 Nov 2020 19:31:26 +0900 Subject: [PATCH 0050/1065] Del the forgotten code of new_style_string_format --- elastalert/alerts.py | 4 ---- tests/alerts_test.py | 26 -------------------------- 2 files changed, 30 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index d3fa7518f..fe261b5aa 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -896,10 +896,6 @@ def __init__(self, *args): logging.warning('Warning! You could be vulnerable to shell injection!') self.rule['command'] = [self.rule['command']] - self.new_style_string_format = False - if 'new_style_string_format' in self.rule and self.rule['new_style_string_format']: - self.new_style_string_format = True - def alert(self, matches): # Format the command and arguments try: diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 5cd61ae75..894de728b 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -1072,32 +1072,6 @@ def test_command(): alert.alert([match]) assert mock_popen.called_with('/bin/test/foo.sh', stdin=subprocess.PIPE, shell=True) - # Test command as string with formatted arg (new-style string format) - rule = {'command': '/bin/test/ --arg {match[somefield]}', 'new_style_string_format': True} - alert = CommandAlerter(rule) - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: - alert.alert([match]) - assert mock_popen.called_with('/bin/test --arg foobarbaz', stdin=subprocess.PIPE, shell=False) - - rule = {'command': '/bin/test/ --arg {match[nested][field]}', 'new_style_string_format': True} - alert = CommandAlerter(rule) - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: - alert.alert([match]) - assert mock_popen.called_with('/bin/test --arg 1', stdin=subprocess.PIPE, shell=False) - - # Test command as string without formatted arg (new-style string format) - rule = {'command': '/bin/test/foo.sh', 'new_style_string_format': True} - alert = CommandAlerter(rule) - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: - alert.alert([match]) - assert mock_popen.called_with('/bin/test/foo.sh', stdin=subprocess.PIPE, shell=True) - - rule = {'command': '/bin/test/foo.sh {{bar}}', 'new_style_string_format': True} - alert = CommandAlerter(rule) - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: - alert.alert([match]) - assert mock_popen.called_with('/bin/test/foo.sh {bar}', stdin=subprocess.PIPE, shell=True) - # Test command with pipe_match_json rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], 'pipe_match_json': True} From 814b415de4381e07aed1d148a7ad6d1a9e1e1a62 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 7 Nov 2020 21:44:11 +0900 Subject: [PATCH 0051/1065] document SNS to AWS SNS --- docs/source/elastalert.rst | 2 +- docs/source/ruletypes.rst | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index b1008c3c4..3bd400d0c 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -35,7 +35,7 @@ Currently, we have support built in for these alert types: - Email - JIRA - OpsGenie -- SNS +- AWS SNS - HipChat - Slack - Telegram diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index a198abe63..3fb818a47 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1657,14 +1657,14 @@ Example usage:: Environment: '$VAR' # environment variable Message: { field: message } # field in the first match -SNS -~~~ +AWS SNS +~~~~~~~ -The SNS alerter will send an SNS notification. The body of the notification is formatted the same as with other alerters. -The SNS alerter uses boto3 and can use credentials in the rule yaml, in a standard AWS credential and config files, or +The AWS SNS alerter will send an AWS SNS notification. The body of the notification is formatted the same as with other alerters. +The AWS SNS alerter uses boto3 and can use credentials in the rule yaml, in a standard AWS credential and config files, or via environment variables. See http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html for details. -SNS requires one option: +AWS SNS requires one option: ``sns_topic_arn``: The SNS topic's ARN. For example, ``arn:aws:sns:us-east-1:123456789:somesnstopic`` From c62c1a732b85deb295cd7ac2f9d87fbecf2e5719 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 22 Nov 2020 20:42:26 +0900 Subject: [PATCH 0052/1065] Fix elasticsearch-py versionup test_rule.py error --- elastalert/test_rule.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/elastalert/test_rule.py b/elastalert/test_rule.py index 06100aa0f..af1eaa497 100644 --- a/elastalert/test_rule.py +++ b/elastalert/test_rule.py @@ -83,7 +83,7 @@ def test_file(self, conf, args): # Get one document for schema try: - res = es_client.search(index, size=1, body=query, ignore_unavailable=True) + res = es_client.search(index=index, size=1, body=query, ignore_unavailable=True) except Exception as e: print("Error running your filter:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) @@ -109,7 +109,7 @@ def test_file(self, conf, args): five=conf['five'] ) try: - res = es_client.count(index, doc_type=doc_type, body=count_query, ignore_unavailable=True) + res = es_client.count(index=index, doc_type=doc_type, body=count_query, ignore_unavailable=True) except Exception as e: print("Error querying Elasticsearch:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) @@ -153,7 +153,7 @@ def test_file(self, conf, args): # Download up to max_query_size (defaults to 10,000) documents to save if (args.save or args.formatted_output) and not args.count: try: - res = es_client.search(index, size=args.max_query_size, body=query, ignore_unavailable=True) + res = es_client.search(index=index, size=args.max_query_size, body=query, ignore_unavailable=True) except Exception as e: print("Error running your filter:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) From af6e71affd1caad8385ee35b2c9806ddf310c607 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Wed, 25 Nov 2020 08:25:17 -0500 Subject: [PATCH 0053/1065] Switch out abandoned blist lib for sortedcontainers lib --- elastalert/ruletypes.py | 2 +- requirements.txt | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 2f1d2f82c..7a889e80a 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -3,7 +3,7 @@ import datetime import sys -from blist import sortedlist +from sortedcontainers import SortedKeyList as sortedlist from .util import add_raw_postfix from .util import dt_to_ts diff --git a/requirements.txt b/requirements.txt index c66ca8d79..6f6d6f7f8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ apscheduler>=3.3.0 aws-requests-auth>=0.3.0 -blist>=1.3.6 +sortedcontainers>=2.2.2 boto3>=1.4.4 cffi>=1.11.5 configparser>=3.5.0 diff --git a/setup.py b/setup.py index 30ef9495f..c8f221021 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,7 @@ install_requires=[ 'apscheduler>=3.3.0', 'aws-requests-auth>=0.3.0', - 'blist>=1.3.6', + 'sortedcontainers>=2.2.2', 'boto3>=1.4.4', 'configparser>=3.5.0', 'croniter>=0.3.16', From ff65ee004ea17bb7963261e9eb79ec2f797f9401 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Thu, 10 Dec 2020 00:26:02 +0900 Subject: [PATCH 0054/1065] Kibana 7.10 --- docs/source/ruletypes.rst | 4 ++-- elastalert/kibana_discover.py | 2 +- elastalert/schema.yaml | 2 +- tests/kibana_discover_test.py | 12 ++++++------ 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index e2ecfb672..fc4cf0b8e 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -553,9 +553,9 @@ The currently supported versions of Kibana Discover are: - `5.6` - `6.0`, `6.1`, `6.2`, `6.3`, `6.4`, `6.5`, `6.6`, `6.7`, `6.8` -- `7.0`, `7.1`, `7.2`, `7.3`, `7.4`, `7.5`, `7.6`, `7.7`, `7.8` +- `7.0`, `7.1`, `7.2`, `7.3`, `7.4`, `7.5`, `7.6`, `7.7`, `7.8`, `7.9`, `7.10` -``kibana_discover_version: '7.8'`` +``kibana_discover_version: '7.10'`` kibana_discover_index_pattern_id ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/elastalert/kibana_discover.py b/elastalert/kibana_discover.py index b332facce..f846ac6c4 100644 --- a/elastalert/kibana_discover.py +++ b/elastalert/kibana_discover.py @@ -14,7 +14,7 @@ kibana_default_timedelta = datetime.timedelta(minutes=10) kibana5_kibana6_versions = frozenset(['5.6', '6.0', '6.1', '6.2', '6.3', '6.4', '6.5', '6.6', '6.7', '6.8']) -kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9']) +kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10']) def generate_kibana_discover_url(rule, match): ''' Creates a link for a kibana discover app. ''' diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index ba265a731..b8bcf8e41 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -217,7 +217,7 @@ properties: ### Kibana Discover App Link generate_kibana_discover_url: {type: boolean} kibana_discover_app_url: {type: string, format: uri} - kibana_discover_version: {type: string, enum: ['7.9', '7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} + kibana_discover_version: {type: string, enum: ['7.10', '7.9', '7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} kibana_discover_index_pattern_id: {type: string, minLength: 1} kibana_discover_columns: {type: array, items: {type: string, minLength: 1}, minItems: 1} kibana_discover_from_timedelta: *timedelta diff --git a/tests/kibana_discover_test.py b/tests/kibana_discover_test.py index ae5c8bca1..a508c25d9 100644 --- a/tests/kibana_discover_test.py +++ b/tests/kibana_discover_test.py @@ -38,7 +38,7 @@ def test_generate_kibana_discover_url_with_kibana_5x_and_6x(kibana_version): assert url == expectedUrl -@pytest.mark.parametrize("kibana_version", ['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9']) +@pytest.mark.parametrize("kibana_version", ['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10']) def test_generate_kibana_discover_url_with_kibana_7x(kibana_version): url = generate_kibana_discover_url( rule={ @@ -171,7 +171,7 @@ def test_generate_kibana_discover_url_with_from_timedelta(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.9', + 'kibana_discover_version': '7.10', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_from_timedelta': timedelta(hours=1), 'timestamp_field': 'timestamp' @@ -204,7 +204,7 @@ def test_generate_kibana_discover_url_with_from_timedelta_and_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.9', + 'kibana_discover_version': '7.10', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_from_timedelta': timedelta(hours=1), 'timeframe': timedelta(minutes=20), @@ -238,7 +238,7 @@ def test_generate_kibana_discover_url_with_to_timedelta(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.9', + 'kibana_discover_version': '7.10', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_to_timedelta': timedelta(hours=1), 'timestamp_field': 'timestamp' @@ -271,7 +271,7 @@ def test_generate_kibana_discover_url_with_to_timedelta_and_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.9', + 'kibana_discover_version': '7.10', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_to_timedelta': timedelta(hours=1), 'timeframe': timedelta(minutes=20), @@ -305,7 +305,7 @@ def test_generate_kibana_discover_url_with_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.9', + 'kibana_discover_version': '7.10', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'timeframe': timedelta(minutes=20), 'timestamp_field': 'timestamp' From b7dc75d1160d50c5e0c64e47f80c787279f25121 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Tue, 29 Dec 2020 15:12:09 +0900 Subject: [PATCH 0055/1065] Fix docker test --- Dockerfile-test | 2 ++ requirements-dev.txt | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/Dockerfile-test b/Dockerfile-test index 3c153e644..fb8a78409 100644 --- a/Dockerfile-test +++ b/Dockerfile-test @@ -1,6 +1,8 @@ FROM ubuntu:latest RUN apt-get update && apt-get upgrade -y +RUN apt-get install software-properties-common -y +RUN add-apt-repository ppa:deadsnakes/ppa RUN apt-get -y install build-essential python3.6 python3.6-dev python3-pip libssl-dev git WORKDIR /home/elastalert diff --git a/requirements-dev.txt b/requirements-dev.txt index 558761d9e..d15887c01 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,4 +6,4 @@ pylint<1.4 pytest<3.3.0 setuptools sphinx_rtd_theme -tox<2.0 +tox==3.20.1 From 474c69cd4084cd8a304afda9fa6a0af8d4adfffc Mon Sep 17 00:00:00 2001 From: Naoyuki Sano Date: Mon, 4 Jan 2021 21:23:43 +0900 Subject: [PATCH 0056/1065] Update loaders.py --- elastalert/loaders.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 08f06f648..f0751946c 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -77,8 +77,10 @@ class RulesLoader(object): 'servicenow': alerts.ServiceNowAlerter, 'alerta': alerts.AlertaAlerter, 'post': alerts.HTTPPostAlerter, - 'hivealerter': alerts.HiveAlerter, - 'linenotify': alerts.LineNotifyAlerter + 'pagertree': alerts.PagerTreeAlerter, + 'linenotify': alerts.LineNotifyAlerter, + 'hivealerter': alerts.HiveAlerter + } # A partial ordering of alert types. Relative order will be preserved in the resulting alerts list From d38ebcd0184e2622532a6b8e92108ed5e1ec0fb5 Mon Sep 17 00:00:00 2001 From: Naoyuki Sano Date: Tue, 12 Jan 2021 08:10:47 +0900 Subject: [PATCH 0057/1065] Update loaders.py --- elastalert/loaders.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index f0751946c..01c178e3b 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -79,8 +79,7 @@ class RulesLoader(object): 'post': alerts.HTTPPostAlerter, 'pagertree': alerts.PagerTreeAlerter, 'linenotify': alerts.LineNotifyAlerter, - 'hivealerter': alerts.HiveAlerter - + 'hivealerter': alerts.HiveAlerter } # A partial ordering of alert types. Relative order will be preserved in the resulting alerts list From f77b746f3416b92d189675325714954e3413ca9a Mon Sep 17 00:00:00 2001 From: Naoyuki Sano Date: Tue, 12 Jan 2021 08:26:20 +0900 Subject: [PATCH 0058/1065] Update requirements.txt --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 91acfd3fb..f2c11da5b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,10 +5,10 @@ boto3>=1.4.4 cffi>=1.11.5 configparser>=3.5.0 croniter>=0.3.16 -elasticsearch>=7.0.0 +elasticsearch==7.0.0 envparse>=0.2.0 exotel>=0.1.3 -jira>=1.0.10,<1.0.15 +jira>=2.0.0 jsonschema>=3.0.2 mock>=2.0.0 prison>=0.1.2 From d22e3e531d49b46b4840e674ce9c45cee7d137d2 Mon Sep 17 00:00:00 2001 From: Naoyuki Sano Date: Tue, 12 Jan 2021 08:28:55 +0900 Subject: [PATCH 0059/1065] Update setup.py --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 184d09bc1..4bc0be730 100644 --- a/setup.py +++ b/setup.py @@ -40,6 +40,7 @@ 'jsonschema>=3.0.2', 'mock>=2.0.0', 'prison>=0.1.2', + 'py-zabbix>=1.1.3', 'PyStaticConfiguration>=0.10.3', 'python-dateutil>=2.6.0,<2.7.0', 'PyYAML>=5.1', @@ -47,7 +48,6 @@ 'stomp.py>=4.1.17', 'texttable>=0.8.8', 'twilio>=6.0.0,<6.1', - 'cffi>=1.11.5', - 'py-zabbix>=1.1.3' + 'cffi>=1.11.5' ] ) From 84c93c88197910e6fa97eba8b1a22d6b0a6d5042 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Wed, 13 Jan 2021 01:19:29 +0900 Subject: [PATCH 0060/1065] Fix documents --- README.md | 13 ++++-- docs/source/elastalert.rst | 15 ++++++- docs/source/ruletypes.rst | 81 +++++++++++++++++++++++++++----------- 3 files changed, 81 insertions(+), 28 deletions(-) diff --git a/README.md b/README.md index 99acc02e7..cbaec79dd 100644 --- a/README.md +++ b/README.md @@ -42,20 +42,27 @@ Currently, we have built-in support for the following alert types: - Email - JIRA - OpsGenie -- Commands +- Command - HipChat - MS Teams - Slack +- Mattermost - Telegram - GoogleChat - AWS SNS -- VictorOps +- Splunk On-Call (Formerly VictorOps) - PagerDuty - PagerTree - Exotel - Twilio - Gitter +- ServiceNow +- Debug +- Stomp +- Alerta +- HTTP POST - Line Notify +- TheHive - Zabbix Additional rule types and alerts can be easily imported or written. @@ -115,7 +122,7 @@ A [Dockerized version](https://github.com/bitsensor/elastalert) of ElastAlert in ```bash git clone https://github.com/bitsensor/elastalert.git; cd elastalert -docker run -d -p 3030:3030 \ +docker run -d -p 3030:3030 -p 3333:3333 \ -v `pwd`/config/elastalert.yaml:/opt/elastalert/config.yaml \ -v `pwd`/config/config.json:/opt/elastalert-server/config/config.json \ -v `pwd`/rules:/opt/elastalert/rules \ diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index b1008c3c4..95e049b88 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -35,14 +35,27 @@ Currently, we have support built in for these alert types: - Email - JIRA - OpsGenie -- SNS +- AWS SNS +- MS Teams - HipChat - Slack +- Mattermost - Telegram - GoogleChat +- Splunk On-Call (Formerly VictorOps) +- PagerDuty +- PagerTree +- Exotel +- Twilio +- Gitter +- ServiceNow - Debug - Stomp +- Alerta +- HTTP POST +- Line Notify - TheHive +- Zabbix Additional rule types and alerts can be easily imported or written. (See :ref:`Writing rule types ` and :ref:`Writing alerts `) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index ff3763712..200df5f85 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1620,7 +1620,7 @@ OpsGenie alerter will create an alert which can be used to notify Operations peo integration must be created in order to acquire the necessary ``opsgenie_key`` rule variable. Currently the OpsGenieAlerter only creates an alert, however it could be extended to update or close existing alerts. -It is necessary for the user to create an OpsGenie Rest HTTPS API `integration page `_ in order to create alerts. +It is necessary for the user to create an OpsGenie Rest HTTPS API `integration page `_ in order to create alerts. The OpsGenie alert requires one option: @@ -1632,7 +1632,7 @@ Optional: ``opsgenie_recipients``: A list OpsGenie recipients who will be notified by the alert. ``opsgenie_recipients_args``: Map of arguments used to format opsgenie_recipients. -``opsgenie_default_recipients``: List of default recipients to notify when the formatting of opsgenie_recipients is unsuccesful. +``opsgenie_default_receipients``: List of default recipients to notify when the formatting of opsgenie_recipients is unsuccesful. ``opsgenie_teams``: A list of OpsGenie teams to notify (useful for schedules with escalation). ``opsgenie_teams_args``: Map of arguments used to format opsgenie_teams (useful for assigning the alerts to teams based on some data) ``opsgenie_default_teams``: List of default teams to notify when the formatting of opsgenie_teams is unsuccesful. @@ -1650,6 +1650,8 @@ Optional: ``opsgenie_details``: Map of custom key/value pairs to include in the alert's details. The value can sourced from either fields in the first match, environment variables, or a constant value. +``opsgenie_proxy``: By default ElastAlert will not use a network proxy to send notifications to OpsGenie. Set this option using ``hostname:port`` if you need to use a proxy. + Example usage:: opsgenie_details: @@ -1657,11 +1659,11 @@ Example usage:: Environment: '$VAR' # environment variable Message: { field: message } # field in the first match -SNS -~~~ +AWS SNS +~~~~~~~ -The SNS alerter will send an SNS notification. The body of the notification is formatted the same as with other alerters. -The SNS alerter uses boto3 and can use credentials in the rule yaml, in a standard AWS credential and config files, or +The AWS SNS alerter will send an SNS notification. The body of the notification is formatted the same as with other alerters. +The AWS SNS alerter uses boto3 and can use credentials in the rule yaml, in a standard AWS credential and config files, or via environment variables. See http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html for details. SNS requires one option: @@ -1670,13 +1672,22 @@ SNS requires one option: Optional: -``aws_access_key``: An access key to connect to SNS with. +``aws_access_key_id``: An access key to connect to SNS with. -``aws_secret_key``: The secret key associated with the access key. +``aws_secret_access_key``: The secret key associated with the access key. ``aws_region``: The AWS region in which the SNS resource is located. Default is us-east-1 -``profile``: The AWS profile to use. If none specified, the default will be used. +``aws_profile``: The AWS profile to use. If none specified, the default will be used. + +Example usage:: + + alert: + - sns: + aws_region: 'us-east-1' # You must nest aws_region within your alert configuration so it is not used to sign AWS requests. + sns_topic_arn: 'arn:aws:sns:us-east-1:123456789:somesnstopic' + aws_access_key_id: 'XXXXXXXXXXXXXXXXXX'' + aws_secret_access_key: 'YYYYYYYYYYYYYYYYYYYY' HipChat ~~~~~~~ @@ -1776,7 +1787,7 @@ Optional: ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If slack_icon_url_override parameter is provided, emoji is ignored. ``slack_icon_url_override``: By default ElastAlert will use the :ghost: emoji when posting to the channel. You can provide icon_url to use custom image. -Provide absolute address of the pciture, for example: http://some.address.com/image.jpg . +Provide absolute address of the pciture. ``slack_msg_color``: By default the alert will be posted with the 'danger' color. You can also use 'good' or 'warning' colors. @@ -1784,6 +1795,8 @@ Provide absolute address of the pciture, for example: http://some.address.com/im ``slack_alert_fields``: You can add additional fields to your slack alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. +``slack_ignore_ssl_errors``: By default ElastAlert will verify SSL certificate. Set this option to False if you want to ignore SSL errors. + ``slack_title``: Sets a title for the message, this shows up as a blue text at the start of the message ``slack_title_link``: You can add a link in your Slack notification by setting this to a valid URL. Requires slack_title to be set. @@ -1796,6 +1809,8 @@ Provide absolute address of the pciture, for example: http://some.address.com/im ``slack_kibana_discover_title``: The title of the Kibana Discover url attachment. Defaults to ``Discover in Kibana``. +``slack_ca_certs``: path to a CA cert bundle to use to verify SSL connections. + Mattermost ~~~~~~~~~~ @@ -1816,7 +1831,7 @@ Optional: ``mattermost_channel_override``: Incoming webhooks have a default channel, but it can be overridden. A public channel can be specified "#other-channel", and a Direct Message with "@username". ``mattermost_icon_url_override``: By default ElastAlert will use the default webhook icon when posting to the channel. You can provide icon_url to use custom image. -Provide absolute address of the picture (for example: http://some.address.com/image.jpg) or Base64 data url. +Provide absolute address of the picture or Base64 data url. ``mattermost_msg_pretext``: You can set the message attachment pretext using this option. @@ -1832,7 +1847,7 @@ Telegram alerter will send a notification to a predefined Telegram username or c The alerter requires the following two options: -``telegram_bot_token``: The token is a string along the lines of ``110201543:AAHdqTcvCH1vGWJxfSeofSAs0K5PALDsaw`` that will be required to authorize the bot and send requests to the Bot API. You can learn about obtaining tokens and generating new ones in this document https://core.telegram.org/bots#botfather +``telegram_bot_token``: The token is a string along the lines of ``110201543:AAHdqTcvCH1vGWJxfSeofSAs0K5PALDsaw`` that will be required to authorize the bot and send requests to the Bot API. You can learn about obtaining tokens and generating new ones in this document https://core.telegram.org/bots#6-botfather ``telegram_room_id``: Unique identifier for the target chat or username of the target channel using telegram chat_id (in the format "-xxxxxxxx") @@ -1842,6 +1857,10 @@ Optional: ``telegram_proxy``: By default ElastAlert will not use a network proxy to send notifications to Telegram. Set this option using ``hostname:port`` if you need to use a proxy. +``telegram_proxy_login``: The Telegram proxy auth username. + +``telegram_proxy_pass``: The Telegram proxy auth password. + GoogleChat ~~~~~~~~~~ GoogleChat alerter will send a notification to a predefined GoogleChat channel. The body of the notification is formatted the same as with other alerters. @@ -1893,7 +1912,7 @@ V2 API Options (Optional): These options are specific to the PagerDuty V2 API -See https://v2.developer.pagerduty.com/docs/send-an-event-events-api-v2 +See https://developer.pagerduty.com/docs/events-api-v2/trigger-events/ ``pagerduty_api_version``: Defaults to `v1`. Set to `v2` to enable the PagerDuty V2 Event API. @@ -1924,6 +1943,8 @@ The alerter requires the following options: ``pagertree_integration_url``: URL generated by PagerTree for the integration. +``pagertree_proxy``: By default ElastAlert will not use a network proxy to send notifications to PagerTree. Set this option using hostname:port if you need to use a proxy. + Exotel ~~~~~~ @@ -1935,7 +1956,7 @@ The alerter requires the following option: ``exotel_auth_token``: Auth token assosiated with your Exotel account. -If you don't know how to find your accound sid and auth token, refer - http://support.exotel.in/support/solutions/articles/3000023019-how-to-find-my-exotel-token-and-exotel-sid- +If you don't know how to find your accound sid and auth token, refer - https://support.exotel.com/support/solutions/articles/3000023019-how-to-find-my-exotel-token-and-exotel-sid ``exotel_to_number``: The phone number where you would like send the notification. @@ -1962,26 +1983,26 @@ The alerter requires the following option: ``twilio_from_number``: Your twilio phone number from which message will be sent. -VictorOps -~~~~~~~~~ +Splunk On-Call (Formerly VictorOps) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -VictorOps alerter will trigger an incident to a predefined VictorOps routing key. The body of the notification is formatted the same as with other alerters. +Splunk On-Call (Formerly VictorOps) alerter will trigger an incident to a predefined Splunk On-Call (Formerly VictorOps) routing key. The body of the notification is formatted the same as with other alerters. The alerter requires the following options: ``victorops_api_key``: API key generated under the 'REST Endpoint' in the Integrations settings. -``victorops_routing_key``: VictorOps routing key to route the alert to. +``victorops_routing_key``: Splunk On-Call (Formerly VictorOps) routing key to route the alert to. -``victorops_message_type``: VictorOps field to specify severity level. Must be one of the following: INFO, WARNING, ACKNOWLEDGEMENT, CRITICAL, RECOVERY +``victorops_message_type``: Splunk On-Call (Formerly VictorOps) field to specify severity level. Must be one of the following: INFO, WARNING, ACKNOWLEDGEMENT, CRITICAL, RECOVERY Optional: -``victorops_entity_id``: The identity of the incident used by VictorOps to correlate incidents throughout the alert lifecycle. If not defined, VictorOps will assign a random string to each alert. +``victorops_entity_id``: The identity of the incident used by Splunk On-Call (Formerly VictorOps) to correlate incidents throughout the alert lifecycle. If not defined, Splunk On-Call (Formerly VictorOps) will assign a random string to each alert. ``victorops_entity_display_name``: Human-readable name of alerting entity to summarize incidents without affecting the life-cycle workflow. -``victorops_proxy``: By default ElastAlert will not use a network proxy to send notifications to VictorOps. Set this option using ``hostname:port`` if you need to use a proxy. +``victorops_proxy``: By default ElastAlert will not use a network proxy to send notifications to Splunk On-Call (Formerly VictorOps). Set this option using ``hostname:port`` if you need to use a proxy. Gitter ~~~~~~ @@ -2006,7 +2027,7 @@ The ServiceNow alerter will create a ne Incident in ServiceNow. The body of the The alerter requires the following options: -``servicenow_rest_url``: The ServiceNow RestApi url, this will look like https://instancename.service-now.com/api/now/v1/table/incident +``servicenow_rest_url``: The ServiceNow RestApi url, this will look like https://developer.servicenow.com/dev.do#!/reference/api/orlando/rest/c_TableAPI#r_TableAPI-POST ``username``: The ServiceNow Username to access the api. @@ -2043,12 +2064,20 @@ Stomp This alert type will use the STOMP protocol in order to push a message to a broker like ActiveMQ or RabbitMQ. The message body is a JSON string containing the alert details. The default values will work with a pristine ActiveMQ installation. -Optional: +The alerter requires the following options: ``stomp_hostname``: The STOMP host to use, defaults to localhost. + ``stomp_hostport``: The STOMP port to use, defaults to 61613. + ``stomp_login``: The STOMP login to use, defaults to admin. + ``stomp_password``: The STOMP password to use, defaults to admin. + +Optional: + +``stomp_ssl``: Connect the STOMP host using TLS, defaults to False. + ``stomp_destination``: The STOMP destination to use, defaults to /queue/ALERT The stomp_destination field depends on the broker, the /queue/ALERT example is the nomenclature used by ActiveMQ. Each broker has its own logic. @@ -2057,7 +2086,7 @@ Alerta ~~~~~~ Alerta alerter will post an alert in the Alerta server instance through the alert API endpoint. -See http://alerta.readthedocs.io/en/latest/api/alert.html for more details on the Alerta JSON format. +See https://docs.alerta.io/en/latest/api/alert.html for more details on the Alerta JSON format. For Alerta 5.0 @@ -2073,6 +2102,8 @@ Optional: ``alerta_use_match_timestamp``: If true, it will use the timestamp of the first match as the ``createTime`` of the alert. otherwise, the current server time is used. +``alerta_api_skip_ssl``: Defaults to False. + ``alert_missing_value``: Text to replace any match field not found when formating strings. Defaults to ````. The following options dictate the values of the API JSON payload: @@ -2201,6 +2232,8 @@ Optional: ``hive_proxies``: Proxy configuration. +``hive_verify``: Wether or not to enable SSL certificate validation. Defaults to False. + ``hive_observable_data_mapping``: If needed, matched data fields can be mapped to TheHive observable types using python string formatting. Example usage:: From 9eaa68ef92d54718ab245b4b6525dac266c213d0 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Wed, 13 Jan 2021 21:43:57 +0900 Subject: [PATCH 0061/1065] Undo readme.md and elastalert.rst --- README.md | 13 +++---------- docs/source/elastalert.rst | 15 +-------------- docs/source/ruletypes.rst | 23 +++++++---------------- 3 files changed, 11 insertions(+), 40 deletions(-) diff --git a/README.md b/README.md index cbaec79dd..99acc02e7 100644 --- a/README.md +++ b/README.md @@ -42,27 +42,20 @@ Currently, we have built-in support for the following alert types: - Email - JIRA - OpsGenie -- Command +- Commands - HipChat - MS Teams - Slack -- Mattermost - Telegram - GoogleChat - AWS SNS -- Splunk On-Call (Formerly VictorOps) +- VictorOps - PagerDuty - PagerTree - Exotel - Twilio - Gitter -- ServiceNow -- Debug -- Stomp -- Alerta -- HTTP POST - Line Notify -- TheHive - Zabbix Additional rule types and alerts can be easily imported or written. @@ -122,7 +115,7 @@ A [Dockerized version](https://github.com/bitsensor/elastalert) of ElastAlert in ```bash git clone https://github.com/bitsensor/elastalert.git; cd elastalert -docker run -d -p 3030:3030 -p 3333:3333 \ +docker run -d -p 3030:3030 \ -v `pwd`/config/elastalert.yaml:/opt/elastalert/config.yaml \ -v `pwd`/config/config.json:/opt/elastalert-server/config/config.json \ -v `pwd`/rules:/opt/elastalert/rules \ diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 95e049b88..b1008c3c4 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -35,27 +35,14 @@ Currently, we have support built in for these alert types: - Email - JIRA - OpsGenie -- AWS SNS -- MS Teams +- SNS - HipChat - Slack -- Mattermost - Telegram - GoogleChat -- Splunk On-Call (Formerly VictorOps) -- PagerDuty -- PagerTree -- Exotel -- Twilio -- Gitter -- ServiceNow - Debug - Stomp -- Alerta -- HTTP POST -- Line Notify - TheHive -- Zabbix Additional rule types and alerts can be easily imported or written. (See :ref:`Writing rule types ` and :ref:`Writing alerts `) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 200df5f85..7d37f226b 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1659,11 +1659,11 @@ Example usage:: Environment: '$VAR' # environment variable Message: { field: message } # field in the first match -AWS SNS -~~~~~~~ +SNS +~~~ -The AWS SNS alerter will send an SNS notification. The body of the notification is formatted the same as with other alerters. -The AWS SNS alerter uses boto3 and can use credentials in the rule yaml, in a standard AWS credential and config files, or +The SNS alerter will send an SNS notification. The body of the notification is formatted the same as with other alerters. +The SNS alerter uses boto3 and can use credentials in the rule yaml, in a standard AWS credential and config files, or via environment variables. See http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html for details. SNS requires one option: @@ -1672,22 +1672,13 @@ SNS requires one option: Optional: -``aws_access_key_id``: An access key to connect to SNS with. +``aws_access_key``: An access key to connect to SNS with. -``aws_secret_access_key``: The secret key associated with the access key. +``aws_secret_key``: The secret key associated with the access key. ``aws_region``: The AWS region in which the SNS resource is located. Default is us-east-1 -``aws_profile``: The AWS profile to use. If none specified, the default will be used. - -Example usage:: - - alert: - - sns: - aws_region: 'us-east-1' # You must nest aws_region within your alert configuration so it is not used to sign AWS requests. - sns_topic_arn: 'arn:aws:sns:us-east-1:123456789:somesnstopic' - aws_access_key_id: 'XXXXXXXXXXXXXXXXXX'' - aws_secret_access_key: 'YYYYYYYYYYYYYYYYYYYY' +``profile``: The AWS profile to use. If none specified, the default will be used. HipChat ~~~~~~~ From 29ea671cd05e6ad6807a26a52bc24af6e2ddd261 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Thu, 14 Jan 2021 00:22:40 +0900 Subject: [PATCH 0062/1065] Remove hipchat & Fix loaders.py --- README.md | 1 - docs/source/elastalert.rst | 1 - docs/source/ruletypes.rst | 37 ---------------- elastalert/alerts.py | 86 -------------------------------------- elastalert/loaders.py | 10 +---- elastalert/schema.yaml | 9 ---- tests/alerts_test.py | 58 ------------------------- 7 files changed, 1 insertion(+), 201 deletions(-) diff --git a/README.md b/README.md index 99acc02e7..6d91859ef 100644 --- a/README.md +++ b/README.md @@ -43,7 +43,6 @@ Currently, we have built-in support for the following alert types: - JIRA - OpsGenie - Commands -- HipChat - MS Teams - Slack - Telegram diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 3bd400d0c..2d962f1d0 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -36,7 +36,6 @@ Currently, we have support built in for these alert types: - JIRA - OpsGenie - AWS SNS -- HipChat - Slack - Telegram - GoogleChat diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 27e1eefbd..7341e7a84 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1709,43 +1709,6 @@ Example When to use aws_profile usage:: sns_topic_arn: 'arn:aws:sns:us-east-1:123456789:somesnstopic' sns_aws_profile: 'default' -HipChat -~~~~~~~ - -HipChat alerter will send a notification to a predefined HipChat room. The body of the notification is formatted the same as with other alerters. - -The alerter requires the following two options: - -``hipchat_auth_token``: The randomly generated notification token created by HipChat. Go to https://XXXXX.hipchat.com/account/api and use -'Create new token' section, choosing 'Send notification' in Scopes list. - -``hipchat_room_id``: The id associated with the HipChat room you want to send the alert to. Go to https://XXXXX.hipchat.com/rooms and choose -the room you want to post to. The room ID will be the numeric part of the URL. - -``hipchat_msg_color``: The color of the message background that is sent to HipChat. May be set to green, yellow or red. Default is red. - -``hipchat_domain``: The custom domain in case you have HipChat own server deployment. Default is api.hipchat.com. - -``hipchat_ignore_ssl_errors``: Ignore TLS errors (self-signed certificates, etc.). Default is false. - -``hipchat_proxy``: By default ElastAlert will not use a network proxy to send notifications to HipChat. Set this option using ``hostname:port`` if you need to use a proxy. - -``hipchat_notify``: When set to true, triggers a hipchat bell as if it were a user. Default is true. - -``hipchat_from``: When humans report to hipchat, a timestamp appears next to their name. For bots, the name is the name of the token. The from, instead of a timestamp, defaults to empty unless set, which you can do here. This is optional. - -``hipchat_message_format``: Determines how the message is treated by HipChat and rendered inside HipChat applications -html - Message is rendered as HTML and receives no special treatment. Must be valid HTML and entities must be escaped (e.g.: '&' instead of '&'). May contain basic tags: a, b, i, strong, em, br, img, pre, code, lists, tables. -text - Message is treated just like a message sent by a user. Can include @mentions, emoticons, pastes, and auto-detected URLs (Twitter, YouTube, images, etc). -Valid values: html, text. -Defaults to 'html'. - -``hipchat_mentions``: When using a ``html`` message format, it's not possible to mentions specific users using the ``@user`` syntax. -In that case, you can set ``hipchat_mentions`` to a list of users which will be first mentioned using a single text message, then the normal ElastAlert message will be sent to Hipchat. -If set, it will mention the users, no matter if the original message format is set to HTML or text. -Valid values: list of strings. -Defaults to ``[]``. - MS Teams ~~~~~~~~ diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 9ef852948..294417016 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -966,92 +966,6 @@ def alert(self, matches): elastalert_logger.info("Sent sns notification to %s" % (self.sns_topic_arn)) -class HipChatAlerter(Alerter): - """ Creates a HipChat room notification for each alert """ - required_options = frozenset(['hipchat_auth_token', 'hipchat_room_id']) - - def __init__(self, rule): - super(HipChatAlerter, self).__init__(rule) - self.hipchat_msg_color = self.rule.get('hipchat_msg_color', 'red') - self.hipchat_message_format = self.rule.get('hipchat_message_format', 'html') - self.hipchat_auth_token = self.rule['hipchat_auth_token'] - self.hipchat_room_id = self.rule['hipchat_room_id'] - self.hipchat_domain = self.rule.get('hipchat_domain', 'api.hipchat.com') - self.hipchat_ignore_ssl_errors = self.rule.get('hipchat_ignore_ssl_errors', False) - self.hipchat_notify = self.rule.get('hipchat_notify', True) - self.hipchat_from = self.rule.get('hipchat_from', '') - self.url = 'https://%s/v2/room/%s/notification?auth_token=%s' % ( - self.hipchat_domain, self.hipchat_room_id, self.hipchat_auth_token) - self.hipchat_proxy = self.rule.get('hipchat_proxy', None) - - def create_alert_body(self, matches): - body = super(HipChatAlerter, self).create_alert_body(matches) - - # HipChat sends 400 bad request on messages longer than 10000 characters - if self.hipchat_message_format == 'html': - # Use appropriate line ending for text/html - br = '
' - body = body.replace('\n', br) - - truncated_message = '
...(truncated)' - truncate_to = 10000 - len(truncated_message) - else: - truncated_message = '..(truncated)' - truncate_to = 10000 - len(truncated_message) - - if (len(body) > 9999): - body = body[:truncate_to] + truncated_message - - return body - - def alert(self, matches): - body = self.create_alert_body(matches) - - # Post to HipChat - headers = {'content-type': 'application/json'} - # set https proxy, if it was provided - proxies = {'https': self.hipchat_proxy} if self.hipchat_proxy else None - payload = { - 'color': self.hipchat_msg_color, - 'message': body, - 'message_format': self.hipchat_message_format, - 'notify': self.hipchat_notify, - 'from': self.hipchat_from - } - - try: - if self.hipchat_ignore_ssl_errors: - requests.packages.urllib3.disable_warnings() - - if self.rule.get('hipchat_mentions', []): - ping_users = self.rule.get('hipchat_mentions', []) - ping_msg = payload.copy() - ping_msg['message'] = "ping {}".format( - ", ".join("@{}".format(user) for user in ping_users) - ) - ping_msg['message_format'] = "text" - - response = requests.post( - self.url, - data=json.dumps(ping_msg, cls=DateTimeEncoder), - headers=headers, - verify=not self.hipchat_ignore_ssl_errors, - proxies=proxies) - - response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, - verify=not self.hipchat_ignore_ssl_errors, - proxies=proxies) - warnings.resetwarnings() - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to HipChat: %s" % e) - elastalert_logger.info("Alert sent to HipChat room %s" % self.hipchat_room_id) - - def get_info(self): - return {'type': 'hipchat', - 'hipchat_room_id': self.hipchat_room_id} - - class MsTeamsAlerter(Alerter): """ Creates a Microsoft Teams Conversation Message for each alert """ required_options = frozenset(['ms_teams_webhook_url', 'ms_teams_alert_summary']) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 4ed8ccc7f..0c3b13d71 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -62,7 +62,6 @@ class RulesLoader(object): 'debug': alerts.DebugAlerter, 'command': alerts.CommandAlerter, 'sns': alerts.SnsAlerter, - 'hipchat': alerts.HipChatAlerter, 'ms_teams': alerts.MsTeamsAlerter, 'slack': alerts.SlackAlerter, 'mattermost': alerts.MattermostAlerter, @@ -78,7 +77,7 @@ class RulesLoader(object): 'post': alerts.HTTPPostAlerter, 'pagertree': alerts.PagerTreeAlerter, 'linenotify': alerts.LineNotifyAlerter, - 'hivealerter': alerts.HiveAlerter + 'hivealerter': alerts.HiveAlerter } # A partial ordering of alert types. Relative order will be preserved in the resulting alerts list @@ -316,13 +315,6 @@ def _dt_to_ts_with_format(dt): rule.setdefault('client_cert', conf.get('client_cert')) rule.setdefault('client_key', conf.get('client_key')) - # Set HipChat options from global config - rule.setdefault('hipchat_msg_color', 'red') - rule.setdefault('hipchat_domain', 'api.hipchat.com') - rule.setdefault('hipchat_notify', True) - rule.setdefault('hipchat_from', '') - rule.setdefault('hipchat_ignore_ssl_errors', False) - # Make sure we have required options if self.required_locals - frozenset(list(rule.keys())): raise EAException('Missing required option(s): %s' % (', '.join(self.required_locals - frozenset(list(rule.keys()))))) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index eaff142f9..eff1bbac5 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -261,15 +261,6 @@ properties: jira_max_age: {type: number} jira_watchers: *arrayOfString - ### HipChat - hipchat_auth_token: {type: string} - hipchat_room_id: {type: [string, integer]} - hipchat_domain: {type: string} - hipchat_ignore_ssl_errors: {type: boolean} - hipchat_notify: {type: boolean} - hipchat_from: {type: string} - hipchat_mentions: {type: array, items: {type: string}} - ### Slack slack_webhook_url: *arrayOfString slack_username_override: {type: string} diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 59ee5746d..e823d0965 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -13,7 +13,6 @@ from elastalert.alerts import BasicMatchString from elastalert.alerts import CommandAlerter from elastalert.alerts import EmailAlerter -from elastalert.alerts import HipChatAlerter from elastalert.alerts import HTTPPostAlerter from elastalert.alerts import JiraAlerter from elastalert.alerts import JiraFormattedMatchString @@ -2085,63 +2084,6 @@ def test_resolving_rule_references(ea): assert 'the_owner' == alert.rule['nested_dict']['nested_owner'] -def test_hipchat_body_size_limit_text(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'hipchat_auth_token': 'token', - 'hipchat_room_id': 'room_id', - 'hipchat_message_format': 'text', - 'alert_subject': 'Cool subject', - 'alert_text': 'Alert: we found something.\n\n{message}', - 'alert_text_type': 'alert_text_only', - 'alert': [], - 'alert_text_kw': { - '@timestamp': 'time', - 'message': 'message', - }, - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = HipChatAlerter(rule) - match = { - '@timestamp': '2018-01-01T00:00:00', - 'message': 'foo bar\n' * 5000, - } - body = alert.create_alert_body([match]) - - assert len(body) <= 10000 - - -def test_hipchat_body_size_limit_html(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'hipchat_auth_token': 'token', - 'hipchat_room_id': 'room_id', - 'hipchat_message_format': 'html', - 'alert_subject': 'Cool subject', - 'alert_text': 'Alert: we found something.\n\n{message}', - 'alert_text_type': 'alert_text_only', - 'alert': [], - 'alert_text_kw': { - '@timestamp': 'time', - 'message': 'message', - }, - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = HipChatAlerter(rule) - match = { - '@timestamp': '2018-01-01T00:00:00', - 'message': 'foo bar\n' * 5000, - } - - body = alert.create_alert_body([match]) - - assert len(body) <= 10000 - - def test_alerta_no_auth(ea): rule = { 'name': 'Test Alerta rule!', From b30a58ee10f7a3ca07dd23d3869618f32478b2a6 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Thu, 14 Jan 2021 02:48:30 +0900 Subject: [PATCH 0063/1065] Typo in example_rules/ssh.yaml --- example_rules/ssh.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/example_rules/ssh.yaml b/example_rules/ssh.yaml index 7af890784..a7147217b 100644 --- a/example_rules/ssh.yaml +++ b/example_rules/ssh.yaml @@ -1,5 +1,5 @@ # Rule name, must be unique - name: SSH abuse (ElastAlert 3.0.1) - 2 +name: SSH abuse (ElastAlert 3.0.1) - 2 # Alert on x events in y seconds type: frequency From a9a417c54d9a34de62ff8df5c2f217d5a26489b2 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Thu, 14 Jan 2021 02:50:20 +0900 Subject: [PATCH 0064/1065] Remove duplicate property in example config file --- config.yaml.example | 1 - 1 file changed, 1 deletion(-) diff --git a/config.yaml.example b/config.yaml.example index 9d9176382..c4921120a 100644 --- a/config.yaml.example +++ b/config.yaml.example @@ -48,7 +48,6 @@ es_port: 9200 # Use SSL authentication with client certificates client_cert must be # a pem file containing both cert and key for client -#verify_certs: True #ca_certs: /path/to/cacert.pem #client_cert: /path/to/client_cert.pem #client_key: /path/to/client_key.key From 4b928d763825900f7a1e50d1b8c72ddd9d69d591 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Thu, 14 Jan 2021 02:53:35 +0900 Subject: [PATCH 0065/1065] Fixed the logging property in config.yaml.example --- config.yaml.example | 60 ++++++++++++++++++++++----------------------- 1 file changed, 30 insertions(+), 30 deletions(-) diff --git a/config.yaml.example b/config.yaml.example index c4921120a..89db954be 100644 --- a/config.yaml.example +++ b/config.yaml.example @@ -77,38 +77,38 @@ alert_time_limit: # logline: # format: '%(asctime)s %(levelname)+8s %(name)+20s %(message)s' # -# handlers: -# console: -# class: logging.StreamHandler -# formatter: logline -# level: DEBUG -# stream: ext://sys.stderr +# handlers: +# console: +# class: logging.StreamHandler +# formatter: logline +# level: DEBUG +# stream: ext://sys.stderr # -# file: -# class : logging.FileHandler -# formatter: logline -# level: DEBUG -# filename: elastalert.log +# file: +# class : logging.FileHandler +# formatter: logline +# level: DEBUG +# filename: elastalert.log # -# loggers: -# elastalert: -# level: WARN -# handlers: [] -# propagate: true +# loggers: +# elastalert: +# level: WARN +# handlers: [] +# propagate: true # -# elasticsearch: -# level: WARN -# handlers: [] -# propagate: true +# elasticsearch: +# level: WARN +# handlers: [] +# propagate: true # -# elasticsearch.trace: -# level: WARN -# handlers: [] -# propagate: true +# elasticsearch.trace: +# level: WARN +# handlers: [] +# propagate: true # -# '': # root logger -# level: WARN -# handlers: -# - console -# - file -# propagate: false +# '': # root logger +# level: WARN +# handlers: +# - console +# - file +# propagate: false From 5a5c49a40f1a59f9755a9b39b845b75963204917 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Thu, 14 Jan 2021 23:23:33 +0900 Subject: [PATCH 0066/1065] add tzlocal<3.0 --- requirements.txt | 1 + setup.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e70415b28..4f23de5b8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,3 +20,4 @@ requests>=2.10.0 stomp.py>=4.1.17 texttable>=0.8.8 twilio>=6.0.0,<6.1 +tzlocal<3.0 diff --git a/setup.py b/setup.py index b7ca722fe..4591f27d9 100644 --- a/setup.py +++ b/setup.py @@ -48,6 +48,7 @@ 'stomp.py>=4.1.17', 'texttable>=0.8.8', 'twilio>=6.0.0,<6.1', - 'cffi>=1.11.5' + 'cffi>=1.11.5', + 'tzlocal<3.0' ] ) From 3bc7297a1c8add1fa9f2a8b3ea12bfdd4cafa0e7 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Thu, 14 Jan 2021 23:49:03 +0900 Subject: [PATCH 0067/1065] Update running_elastalert.rst --- docs/source/running_elastalert.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index 7fdf1eeba..91b8f8624 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -10,7 +10,10 @@ Requirements - ISO8601 or Unix timestamped data - Python 3.6 - pip, see requirements.txt -- Packages on Ubuntu 14.x: python-pip python-dev libffi-dev libssl-dev +- Packages on Ubuntu 18.x: build-essential python3-pip python3.6 python3.6-dev libffi-dev libssl-dev +- Packages on Ubuntu 20.x: build-essential python3-pip python3.6 python3.6-dev libffi-dev libssl-dev +- Packages on CentOS 7.x: 'Development Tools' python3-pip python36 python3-devel python3-libs python3-setuptools libffi-devel openssl-devel +- Packages on CentOS 8.x: 'Development Tools' python3-pip python36 python3-devel python3-setuptools python3-libs libffi-devel openssl-devel Downloading and Configuring --------------------------- From 26cc8a6e9c439d12a50aaa4499af6bb804996744 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 15 Jan 2021 00:00:04 +0900 Subject: [PATCH 0068/1065] add opsgenie_addr to docs --- docs/source/ruletypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 7341e7a84..d818af8ec 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1629,7 +1629,7 @@ The OpsGenie alert requires one option: Optional: ``opsgenie_account``: The OpsGenie account to integrate with. - +``opsgenie_addr``: The OpsGenie URL to to connect against, default is ``https://api.opsgenie.com/v2/alerts`` ``opsgenie_recipients``: A list OpsGenie recipients who will be notified by the alert. ``opsgenie_recipients_args``: Map of arguments used to format opsgenie_recipients. ``opsgenie_default_receipients``: List of default recipients to notify when the formatting of opsgenie_recipients is unsuccesful. From bb4631430e33874b3b974f80295f6da1fa098186 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 15 Jan 2021 00:09:51 +0900 Subject: [PATCH 0069/1065] fix ruletypes.rst typo --- docs/source/ruletypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 7341e7a84..ab8ecbd79 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -938,7 +938,7 @@ Optional: ``field_value``: When set, uses the value of the field in the document and not the number of matching documents. This is useful to monitor for example a temperature sensor and raise an alarm if the temperature grows too fast. Note that the means of the field on the reference and current windows are used to determine if the ``spike_height`` value is reached. -Note also that the threshold parameters are ignored in this smode. +Note also that the threshold parameters are ignored in this mode. ``threshold_ref``: The minimum number of events that must exist in the reference window for an alert to trigger. For example, if From 9edf294c8f2f6d4e9614e2c557feb5d452572f35 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 15 Jan 2021 00:18:31 +0900 Subject: [PATCH 0070/1065] Remove Duplicate Key in Schema YAML --- elastalert/schema.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index eff1bbac5..e9aca22d7 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -347,7 +347,6 @@ properties: alerta_origin: {type: string} # Python format string alerta_group: {type: string} # Python format string alerta_service: {type: array, items: {type: string}} # Python format string - alerta_service: {type: array, items: {type: string}} # Python format string alerta_correlate: {type: array, items: {type: string}} # Python format string alerta_tags: {type: array, items: {type: string}} # Python format string alerta_event: {type: string} # Python format string From 62efda09cc2794c3f9e54aed2d323ccd7891f69f Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 15 Jan 2021 00:41:05 +0900 Subject: [PATCH 0071/1065] fix zabbix --- elastalert/loaders.py | 4 ++- elastalert/zabbix.py | 60 ++++++++++++++++++++++++++++--------------- 2 files changed, 43 insertions(+), 21 deletions(-) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 0c3b13d71..a03610759 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -25,6 +25,7 @@ from .util import ts_to_dt_with_format from .util import unix_to_dt from .util import unixms_to_dt +from .zabbix import ZabbixAlerter class RulesLoader(object): @@ -77,7 +78,8 @@ class RulesLoader(object): 'post': alerts.HTTPPostAlerter, 'pagertree': alerts.PagerTreeAlerter, 'linenotify': alerts.LineNotifyAlerter, - 'hivealerter': alerts.HiveAlerter + 'hivealerter': alerts.HiveAlerter, + 'zabbix': ZabbixAlerter } # A partial ordering of alert types. Relative order will be preserved in the resulting alerts list diff --git a/elastalert/zabbix.py b/elastalert/zabbix.py index e3f13aa03..e2b5f1ed6 100644 --- a/elastalert/zabbix.py +++ b/elastalert/zabbix.py @@ -1,46 +1,47 @@ -from alerts import Alerter # , BasicMatchString -import logging -from pyzabbix.api import ZabbixAPI -from pyzabbix import ZabbixSender, ZabbixMetric from datetime import datetime +from pyzabbix import ZabbixSender, ZabbixMetric, ZabbixAPI + +from .alerts import Alerter +from .util import elastalert_logger, EAException + class ZabbixClient(ZabbixAPI): - def __init__(self, url='http://localhost', use_authenticate=False, user='Admin', password='zabbix', sender_host='localhost', - sender_port=10051): + def __init__(self, url='http://localhost', use_authenticate=False, user='Admin', password='zabbix', + sender_host='localhost', sender_port=10051): self.url = url self.use_authenticate = use_authenticate self.sender_host = sender_host self.sender_port = sender_port self.metrics_chunk_size = 200 self.aggregated_metrics = [] - self.logger = logging.getLogger(self.__class__.__name__) - super(ZabbixClient, self).__init__(url=self.url, use_authenticate=self.use_authenticate, user=user, password=password) + + super(ZabbixClient, self).__init__(url=self.url, + use_authenticate=self.use_authenticate, + user=user, + password=password) def send_metric(self, hostname, key, data): zm = ZabbixMetric(hostname, key, data) if self.send_aggregated_metrics: - self.aggregated_metrics.append(zm) if len(self.aggregated_metrics) > self.metrics_chunk_size: - self.logger.info("Sending: %s metrics" % (len(self.aggregated_metrics))) + elastalert_logger.info("Sending: %s metrics" % (len(self.aggregated_metrics))) try: - ZabbixSender(zabbix_server=self.sender_host, zabbix_port=self.sender_port).send(self.aggregated_metrics) + ZabbixSender(zabbix_server=self.sender_host, zabbix_port=self.sender_port) \ + .send(self.aggregated_metrics) self.aggregated_metrics = [] except Exception as e: - self.logger.exception(e) - pass + elastalert_logger.exception(e) else: try: - ZabbixSender(zabbix_server=self.sender_host, zabbix_port=self.sender_port).send(zm) + ZabbixSender(zabbix_server=self.sender_host, zabbix_port=self.sender_port).send([zm]) except Exception as e: - self.logger.exception(e) - pass + elastalert_logger.exception(e) class ZabbixAlerter(Alerter): - # By setting required_options to a set of strings # You can ensure that the rule config file specifies all # of the options. Otherwise, ElastAlert will throw an exception @@ -54,6 +55,9 @@ def __init__(self, *args): self.zbx_sender_port = self.rule.get('zbx_sender_port', 10051) self.zbx_host = self.rule.get('zbx_host') self.zbx_key = self.rule.get('zbx_key') + self.timestamp_field = self.rule.get('timestamp_field', '@timestamp') + self.timestamp_type = self.rule.get('timestamp_type', 'iso') + self.timestamp_strptime = self.rule.get('timestamp_strptime', '%Y-%m-%dT%H:%M:%S.%fZ') # Alert is called def alert(self, matches): @@ -63,10 +67,26 @@ def alert(self, matches): # the aggregation option set zm = [] for match in matches: - ts_epoch = int(datetime.strptime(match['@timestamp'], "%Y-%m-%dT%H:%M:%S.%fZ").strftime('%s')) - zm.append(ZabbixMetric(host=self.zbx_host, key=self.zbx_key, value=1, clock=ts_epoch)) + if ':' not in match[self.timestamp_field] or '-' not in match[self.timestamp_field]: + ts_epoch = int(match[self.timestamp_field]) + else: + try: + ts_epoch = int(datetime.strptime(match[self.timestamp_field], self.timestamp_strptime) + .strftime('%s')) + except ValueError: + ts_epoch = int(datetime.strptime(match[self.timestamp_field], '%Y-%m-%dT%H:%M:%SZ') + .strftime('%s')) + zm.append(ZabbixMetric(host=self.zbx_host, key=self.zbx_key, value='1', clock=ts_epoch)) - ZabbixSender(zabbix_server=self.zbx_sender_host, zabbix_port=self.zbx_sender_port).send(zm) + try: + response = ZabbixSender(zabbix_server=self.zbx_sender_host, zabbix_port=self.zbx_sender_port).send(zm) + if response.failed: + elastalert_logger.warning("Missing zabbix host '%s' or host's item '%s', alert will be discarded" + % (self.zbx_host, self.zbx_key)) + else: + elastalert_logger.info("Alert sent to Zabbix") + except Exception as e: + raise EAException("Error sending alert to Zabbix: %s" % e) # get_info is called after an alert is sent to get data that is written back # to Elasticsearch in the field "alert_info" From d45a83f54007d4a6a75459fe5e5ef15f4819573b Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 15 Jan 2021 01:22:44 +0900 Subject: [PATCH 0072/1065] update alert types and Third Party Tools --- README.md | 33 +++++++++++++++++++++++++++++---- docs/source/elastalert.rst | 13 +++++++++++++ 2 files changed, 42 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 604cc081b..fc2461bf4 100644 --- a/README.md +++ b/README.md @@ -39,22 +39,29 @@ Several rule types with common monitoring paradigms are included with ElastAlert Currently, we have built-in support for the following alert types: +- Command - Email - JIRA - OpsGenie -- Commands +- AWS SNS - MS Teams - Slack +- Mattermost - Telegram - GoogleChat -- AWS SNS -- VictorOps - PagerDuty - PagerTree - Exotel - Twilio +- Splunk On-Call (Formerly VictorOps) - Gitter +- ServiceNow +- Debug +- Stomp +- Alerta +- HTTP POST - Line Notify +- TheHive - Zabbix Additional rule types and alerts can be easily imported or written. @@ -114,7 +121,7 @@ A [Dockerized version](https://github.com/bitsensor/elastalert) of ElastAlert in ```bash git clone https://github.com/bitsensor/elastalert.git; cd elastalert -docker run -d -p 3030:3030 \ +docker run -d -p 3030:3030 -p 3333:3333 \ -v `pwd`/config/elastalert.yaml:/opt/elastalert/config.yaml \ -v `pwd`/config/config.json:/opt/elastalert-server/config/config.json \ -v `pwd`/rules:/opt/elastalert/rules \ @@ -123,6 +130,24 @@ docker run -d -p 3030:3030 \ --name elastalert bitsensor/elastalert:latest ``` +### ElastAlert-Docker + +This Dockerfile will build a Docker image for Elastalert. This image is compatible with the accompanying Helm chart for Kubernetes. + +https://github.com/jertel/elastalert-docker + +### ElastAlert uses ElastAlert Helm Chart almost as it is *Use original Docker Image to fix bugs + +[ElastAlert Server Helm Chart](https://github.com/daichi703n/elastalert-helm) + +[Praeco Helm Chart](https://github.com/daichi703n/praeco-helm) + +[Installing Praeco (ElastAlert GUI) into Kubernetes with Helm](https://en-designetwork.daichi703n.com/entry/2020/02/24/praeco-helm-kubernetes) + +### Praeco +![Praeco screenshot](https://user-images.githubusercontent.com/611996/47752071-7c4a9080-dc61-11e8-8ccf-2196f13429b2.png) +[Praeco](https://github.com/johnsusek/praeco) is a free open source GUI for ElastAlert. + ## Documentation Read the documentation at [Read the Docs](http://elastalert.readthedocs.org). diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index d6654b850..71c1b8cb0 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -36,12 +36,25 @@ Currently, we have support built in for these alert types: - JIRA - OpsGenie - AWS SNS +- MS Teams - Slack +- Mattermost - Telegram - GoogleChat +- PagerDuty +- PagerTree +- Exotel +- Twilio +- Splunk On-Call (Formerly VictorOps) +- Gitter +- ServiceNow - Debug - Stomp +- Alerta +- HTTP POST +- Line Notify - TheHive +- Zabbix Additional rule types and alerts can be easily imported or written. (See :ref:`Writing rule types ` and :ref:`Writing alerts `) From 84888b03a1e34bae069cbe1af5cac6a26bd82ef4 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 15 Jan 2021 02:12:01 +0900 Subject: [PATCH 0073/1065] Fix Logging inconsistencies --- elastalert/alerts.py | 15 +++++++-------- elastalert/elastalert.py | 32 ++++++++++++++++---------------- elastalert/kibana_discover.py | 9 +++++---- elastalert/loaders.py | 14 +++++++------- elastalert/opsgenie.py | 15 +++++++-------- elastalert/util.py | 6 +++--- 6 files changed, 45 insertions(+), 46 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index c4fef8581..65b0b5de1 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2,7 +2,6 @@ import copy import datetime import json -import logging import os import re import subprocess @@ -590,7 +589,7 @@ def __init__(self, rule): msg = '%s Both have common statuses of (%s). As such, no tickets will ever be found.' % ( msg, ','.join(intersection)) msg += ' This should be simplified to use only one or the other.' - logging.warning(msg) + elastalert_logger.warning(msg) self.reset_jira_args() @@ -610,7 +609,7 @@ def set_priority(self): if self.priority is not None and self.client is not None: self.jira_args['priority'] = {'id': self.priority_ids[self.priority]} except KeyError: - logging.error("Priority %s not found. Valid priorities are %s" % (self.priority, list(self.priority_ids.keys()))) + elastalert_logger.error("Priority %s not found. Valid priorities are %s" % (self.priority, list(self.priority_ids.keys()))) def reset_jira_args(self): self.jira_args = {'project': {'key': self.project}, @@ -753,7 +752,7 @@ def find_existing_ticket(self, matches): try: issues = self.client.search_issues(jql) except JIRAError as e: - logging.exception("Error while searching for JIRA ticket using jql '%s': %s" % (jql, e)) + elastalert_logger.exception("Error while searching for JIRA ticket using jql '%s': %s" % (jql, e)) return None if len(issues): @@ -796,19 +795,19 @@ def alert(self, matches): try: self.comment_on_ticket(ticket, match) except JIRAError as e: - logging.exception("Error while commenting on ticket %s: %s" % (ticket, e)) + elastalert_logger.exception("Error while commenting on ticket %s: %s" % (ticket, e)) if self.labels: for label in self.labels: try: ticket.fields.labels.append(label) except JIRAError as e: - logging.exception("Error while appending labels to ticket %s: %s" % (ticket, e)) + elastalert_logger.exception("Error while appending labels to ticket %s: %s" % (ticket, e)) if self.transition: elastalert_logger.info('Transitioning existing ticket %s' % (ticket.key)) try: self.transition_ticket(ticket) except JIRAError as e: - logging.exception("Error while transitioning ticket %s: %s" % (ticket, e)) + elastalert_logger.exception("Error while transitioning ticket %s: %s" % (ticket, e)) if self.pipeline is not None: self.pipeline['jira_ticket'] = ticket @@ -899,7 +898,7 @@ def __init__(self, *args): if isinstance(self.rule['command'], str): self.shell = True if '%' in self.rule['command']: - logging.warning('Warning! You could be vulnerable to shell injection!') + elastalert_logger.warning('Warning! You could be vulnerable to shell injection!') self.rule['command'] = [self.rule['command']] self.new_style_string_format = False diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 81d2ecff7..d9b1f87ab 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -407,7 +407,7 @@ def get_hits(self, rule, starttime, endtime, index, scroll=False): # Different versions of ES have this formatted in different ways. Fallback to str-ing the whole thing raise ElasticsearchException(str(res['_shards']['failures'])) - logging.debug(str(res)) + elastalert_logger.debug(str(res)) except ElasticsearchException as e: # Elasticsearch sometimes gives us GIGANTIC error messages # (so big that they will fill the entire terminal buffer) @@ -850,7 +850,7 @@ def enhance_filter(self, rule): filters.append(query_str_filter) else: filters.append({'query': query_str_filter}) - logging.debug("Enhanced filter with {} terms: {}".format(listname, str(query_str_filter))) + elastalert_logger.debug("Enhanced filter with {} terms: {}".format(listname, str(query_str_filter))) def run_rule(self, rule, endtime, starttime=None): """ Run a rule for a given time period, including querying and alerting on results. @@ -879,7 +879,7 @@ def run_rule(self, rule, endtime, starttime=None): # Don't run if starttime was set to the future if ts_now() <= rule['starttime']: - logging.warning("Attempted to use query start time in the future (%s), sleeping instead" % (starttime)) + elastalert_logger.warning("Attempted to use query start time in the future (%s), sleeping instead" % (starttime)) return 0 # Run the rule. If querying over a large time period, split it up into segments @@ -1088,7 +1088,7 @@ def load_rule_changes(self): try: new_rule = self.rules_loader.load_configuration(rule_file, self.conf) if not new_rule: - logging.error('Invalid rule file skipped: %s' % rule_file) + elastalert_logger.error('Invalid rule file skipped: %s' % rule_file) continue if 'is_enabled' in new_rule and not new_rule['is_enabled']: elastalert_logger.info('Rule file %s is now disabled.' % (rule_file)) @@ -1128,7 +1128,7 @@ def load_rule_changes(self): try: new_rule = self.rules_loader.load_configuration(rule_file, self.conf) if not new_rule: - logging.error('Invalid rule file skipped: %s' % rule_file) + elastalert_logger.error('Invalid rule file skipped: %s' % rule_file) continue if 'is_enabled' in new_rule and not new_rule['is_enabled']: continue @@ -1211,12 +1211,12 @@ def wait_until_responsive(self, timeout, clock=timeit.default_timer): time.sleep(1.0) if self.writeback_es.ping(): - logging.error( + elastalert_logger.error( 'Writeback alias "%s" does not exist, did you run `elastalert-create-index`?', self.writeback_alias, ) else: - logging.error( + elastalert_logger.error( 'Could not reach ElasticSearch at "%s:%d".', self.conf['es_host'], self.conf['es_port'], @@ -1291,7 +1291,7 @@ def handle_rule_execution(self, rule): # We were processing for longer than our refresh interval # This can happen if --start was specified with a large time period # or if we are running too slow to process events in real time. - logging.warning( + elastalert_logger.warning( "Querying from %s to %s took longer than %s!" % ( old_starttime, pretty_ts(endtime, rule.get('use_local_time')), @@ -1624,7 +1624,7 @@ def writeback(self, doc_type, body, rule=None, match_body=None): res = self.writeback_es.index(index=index, doc_type=doc_type, body=body) return res except ElasticsearchException as e: - logging.exception("Error writing alert info to Elasticsearch: %s" % (e)) + elastalert_logger.exception("Error writing alert info to Elasticsearch: %s" % (e)) def find_recent_pending_alerts(self, time_limit): """ Queries writeback_es to find alerts that did not send @@ -1652,7 +1652,7 @@ def find_recent_pending_alerts(self, time_limit): if res['hits']['hits']: return res['hits']['hits'] except ElasticsearchException as e: - logging.exception("Error finding recent pending alerts: %s %s" % (e, query)) + elastalert_logger.exception("Error finding recent pending alerts: %s %s" % (e, query)) return [] def send_pending_alerts(self): @@ -1852,11 +1852,11 @@ def add_aggregated_alert(self, match, rule): def silence(self, silence_cache_key=None): """ Silence an alert for a period of time. --silence and --rule must be passed as args. """ if self.debug: - logging.error('--silence not compatible with --debug') + elastalert_logger.error('--silence not compatible with --debug') exit(1) if not self.args.rule: - logging.error('--silence must be used with --rule') + elastalert_logger.error('--silence must be used with --rule') exit(1) # With --rule, self.rules will only contain that specific rule @@ -1869,11 +1869,11 @@ def silence(self, silence_cache_key=None): try: silence_ts = parse_deadline(self.args.silence) except (ValueError, TypeError): - logging.error('%s is not a valid time period' % (self.args.silence)) + elastalert_logger.error('%s is not a valid time period' % (self.args.silence)) exit(1) if not self.set_realert(silence_cache_key, silence_ts, 0): - logging.error('Failed to save silence command to Elasticsearch') + elastalert_logger.error('Failed to save silence command to Elasticsearch') exit(1) elastalert_logger.info('Success. %s will be silenced until %s' % (silence_cache_key, silence_ts)) @@ -1934,7 +1934,7 @@ def is_silenced(self, rule_name): def handle_error(self, message, data=None): ''' Logs message at error level and writes message, data and traceback to Elasticsearch. ''' - logging.error(message) + elastalert_logger.error(message) body = {'message': message} tb = traceback.format_exc() body['traceback'] = tb.strip().split('\n') @@ -1944,7 +1944,7 @@ def handle_error(self, message, data=None): def handle_uncaught_exception(self, exception, rule): """ Disables a rule and sends a notification. """ - logging.error(traceback.format_exc()) + elastalert_logger.error(traceback.format_exc()) self.handle_error('Uncaught exception running rule %s: %s' % (rule['name'], exception), {'rule': rule['name']}) if self.disable_rules_on_error: self.rules = [running_rule for running_rule in self.rules if running_rule['name'] != rule['name']] diff --git a/elastalert/kibana_discover.py b/elastalert/kibana_discover.py index f846ac6c4..ed8b681db 100644 --- a/elastalert/kibana_discover.py +++ b/elastalert/kibana_discover.py @@ -8,6 +8,7 @@ import urllib.parse from .util import EAException +from .util import elastalert_logger from .util import lookup_es_key from .util import ts_add @@ -21,7 +22,7 @@ def generate_kibana_discover_url(rule, match): discover_app_url = rule.get('kibana_discover_app_url') if not discover_app_url: - logging.warning( + elastalert_logger.warning( 'Missing kibana_discover_app_url for rule %s' % ( rule.get('name', '') ) @@ -30,7 +31,7 @@ def generate_kibana_discover_url(rule, match): kibana_version = rule.get('kibana_discover_version') if not kibana_version: - logging.warning( + elastalert_logger.warning( 'Missing kibana_discover_version for rule %s' % ( rule.get('name', '') ) @@ -39,7 +40,7 @@ def generate_kibana_discover_url(rule, match): index = rule.get('kibana_discover_index_pattern_id') if not index: - logging.warning( + elastalert_logger.warning( 'Missing kibana_discover_index_pattern_id for rule %s' % ( rule.get('name', '') ) @@ -70,7 +71,7 @@ def generate_kibana_discover_url(rule, match): appState = kibana_discover_app_state(index, columns, filters, query_keys, match) else: - logging.warning( + elastalert_logger.warning( 'Unknown kibana discover application version %s for rule %s' % ( kibana_version, rule.get('name', '') diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 044adce91..c3487f9be 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -2,7 +2,6 @@ import copy import datetime import hashlib -import logging import os import sys @@ -21,6 +20,7 @@ from .util import dt_to_unix from .util import dt_to_unixms from .util import EAException +from .util import elastalert_logger from .util import get_module from .util import ts_to_dt from .util import ts_to_dt_with_format @@ -116,7 +116,7 @@ def load(self, conf, args=None): rule = self.load_configuration(rule_file, conf, args) # A rule failed to load, don't try to process it if not rule: - logging.error('Invalid rule file skipped: %s' % rule_file) + elastalert_logger.error('Invalid rule file skipped: %s' % rule_file) continue # By setting "is_enabled: False" in rule file, a rule is easily disabled if 'is_enabled' in rule and not rule['is_enabled']: @@ -388,10 +388,10 @@ def _dt_to_ts_with_format(dt): if rule.get('use_strftime_index'): for token in ['%y', '%M', '%D']: if token in rule.get('index'): - logging.warning('Did you mean to use %s in the index? ' - 'The index will be formatted like %s' % (token, - datetime.datetime.now().strftime( - rule.get('index')))) + elastalert_logger.warning('Did you mean to use %s in the index? ' + 'The index will be formatted like %s' % (token, + datetime.datetime.now().strftime( + rule.get('index')))) if rule.get('scan_entire_timeframe') and not rule.get('timeframe'): raise EAException('scan_entire_timeframe can only be used if there is a timeframe specified') @@ -484,7 +484,7 @@ def adjust_deprecated_values(rule): rule['http_post_proxy'] = rule['simple_proxy'] if 'simple_webhook_url' in rule: rule['http_post_url'] = rule['simple_webhook_url'] - logging.warning( + elastalert_logger.warning( '"simple" alerter has been renamed "post" and comptability may be removed in a future release.') diff --git a/elastalert/opsgenie.py b/elastalert/opsgenie.py index bcdaf2d05..8db52d89f 100644 --- a/elastalert/opsgenie.py +++ b/elastalert/opsgenie.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- import json -import logging import os.path import requests @@ -46,11 +45,11 @@ def _parse_responders(self, responders, responder_args, matches, default_respond try: formated_responders.append(responder.format(**responders_values)) except KeyError as error: - logging.warn("OpsGenieAlerter: Cannot create responder for OpsGenie Alert. Key not foud: %s. " % (error)) + elastalert_logger.warning("OpsGenieAlerter: Cannot create responder for OpsGenie Alert. Key not foud: %s. " % (error)) if not formated_responders: - logging.warn("OpsGenieAlerter: no responders can be formed. Trying the default responder ") + elastalert_logger.warning("OpsGenieAlerter: no responders can be formed. Trying the default responder ") if not default_responders: - logging.warn("OpsGenieAlerter: default responder not set. Falling back") + elastalert_logger.warning("OpsGenieAlerter: default responder not set. Falling back") formated_responders = responders else: formated_responders = default_responders @@ -90,7 +89,7 @@ def alert(self, matches): post['tags'] = self.tags if self.priority and self.priority not in ('P1', 'P2', 'P3', 'P4', 'P5'): - logging.warn("Priority level does not appear to be specified correctly. \ + elastalert_logger.warning("Priority level does not appear to be specified correctly. \ Please make sure to set it to a value between P1 and P5") else: post['priority'] = self.priority @@ -102,7 +101,7 @@ def alert(self, matches): if details: post['details'] = details - logging.debug(json.dumps(post)) + elastalert_logger.debug(json.dumps(post)) headers = { 'Content-Type': 'application/json', @@ -114,12 +113,12 @@ def alert(self, matches): try: r = requests.post(self.to_addr, json=post, headers=headers, proxies=proxies) - logging.debug('request response: {0}'.format(r)) + elastalert_logger.debug('request response: {0}'.format(r)) if r.status_code != 202: elastalert_logger.info("Error response from {0} \n " "API Response: {1}".format(self.to_addr, r)) r.raise_for_status() - logging.info("Alert sent to OpsGenie") + elastalert_logger.info("Alert sent to OpsGenie") except Exception as err: raise EAException("Error sending alert: {0}".format(err)) diff --git a/elastalert/util.py b/elastalert/util.py index bbb0600ff..3e9c9f664 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -152,7 +152,7 @@ def ts_to_dt(timestamp): def dt_to_ts(dt): if not isinstance(dt, datetime.datetime): - logging.warning('Expected datetime, got %s' % (type(dt))) + elastalert_logger.warning('Expected datetime, got %s' % (type(dt))) return dt ts = dt.isoformat() # Round microseconds to milliseconds @@ -176,7 +176,7 @@ def ts_to_dt_with_format(timestamp, ts_format): def dt_to_ts_with_format(dt, ts_format): if not isinstance(dt, datetime.datetime): - logging.warning('Expected datetime, got %s' % (type(dt))) + elastalert_logger.warning('Expected datetime, got %s' % (type(dt))) return dt ts = dt.strftime(ts_format) return ts @@ -361,7 +361,7 @@ def build_es_conn_config(conf): # Deprecated if 'boto_profile' in conf: - logging.warning('Found deprecated "boto_profile", use "profile" instead!') + elastalert_logger.warning('Found deprecated "boto_profile", use "profile" instead!') parsed_conf['profile'] = conf['boto_profile'] if 'profile' in conf: From 72201dae06b009693359b4843c74947e063cbf52 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 15 Jan 2021 20:12:01 +0900 Subject: [PATCH 0074/1065] Fixing slack ssl verification --- elastalert/alerts.py | 2 +- tests/alerts_test.py | 63 +++++++++++++++++++++++++++++++++++++------- 2 files changed, 55 insertions(+), 10 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 65b0b5de1..d2aac654d 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1127,7 +1127,7 @@ def alert(self, matches): if self.slack_ca_certs: verify = self.slack_ca_certs else: - verify = self.slack_ignore_ssl_errors + verify = not self.slack_ignore_ssl_errors if self.slack_ignore_ssl_errors: requests.packages.urllib3.disable_warnings() payload['channel'] = channel_override diff --git a/tests/alerts_test.py b/tests/alerts_test.py index e823d0965..ba5a8c5d8 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -1240,7 +1240,7 @@ def test_slack_uses_custom_title(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1286,7 +1286,7 @@ def test_slack_uses_custom_timeout(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=20 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1330,7 +1330,7 @@ def test_slack_uses_rule_name_when_custom_title_is_not_provided(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1375,7 +1375,7 @@ def test_slack_uses_custom_slack_channel(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1436,7 +1436,7 @@ def test_slack_uses_list_of_custom_slack_channel(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) assert expected_data1 == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1487,7 +1487,7 @@ def test_slack_attach_kibana_discover_url_when_generated(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1532,7 +1532,7 @@ def test_slack_attach_kibana_discover_url_when_not_generated(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1584,7 +1584,7 @@ def test_slack_kibana_discover_title(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1636,13 +1636,58 @@ def test_slack_kibana_discover_color(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data +def test_slack_ignore_ssl_errors(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_ignore_ssl_errors': True, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=False, + timeout=10 + ) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + def test_http_alerter_with_payload(): rule = { 'name': 'Test HTTP Post Alerter With Payload', From deca0b006a4b71cbb28c79d69fabb54cdd32c646 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 15 Jan 2021 20:20:21 +0900 Subject: [PATCH 0075/1065] Fix is_enabled not work with reload --- elastalert/elastalert.py | 17 ++++++++++++++--- elastalert/loaders.py | 3 --- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index d9b1f87ab..db2e27498 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -176,7 +176,10 @@ def __init__(self, args): remove = [] for rule in self.rules: - if not self.init_rule(rule): + if 'is_enabled' in rule and not rule['is_enabled']: + self.disabled_rules.append(rule) + remove.append(rule) + elif not self.init_rule(rule): remove.append(rule) list(map(self.rules.remove, remove)) @@ -974,7 +977,7 @@ def run_rule(self, rule, endtime, starttime=None): def init_rule(self, new_rule, new=True): ''' Copies some necessary non-config state from an exiting rule to a new rule. ''' - if not new: + if not new and self.scheduler.get_job(job_id=new_rule['name']): self.scheduler.remove_job(job_id=new_rule['name']) try: @@ -1094,6 +1097,15 @@ def load_rule_changes(self): elastalert_logger.info('Rule file %s is now disabled.' % (rule_file)) # Remove this rule if it's been disabled self.rules = [rule for rule in self.rules if rule['rule_file'] != rule_file] + # Stop job if is running + if self.scheduler.get_job(job_id=new_rule['name']): + self.scheduler.remove_job(job_id=new_rule['name']) + # Append to disabled_rule + for disabled_rule in self.disabled_rules: + if disabled_rule['name'] == new_rule['name']: + break + else: + self.disabled_rules.append(new_rule) continue except EAException as e: message = 'Could not load rule %s: %s' % (rule_file, e) @@ -1112,7 +1124,6 @@ def load_rule_changes(self): # Re-enable if rule had been disabled for disabled_rule in self.disabled_rules: if disabled_rule['name'] == new_rule['name']: - self.rules.append(disabled_rule) self.disabled_rules.remove(disabled_rule) break diff --git a/elastalert/loaders.py b/elastalert/loaders.py index dae799be4..fc1bb4830 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -120,9 +120,6 @@ def load(self, conf, args=None): if not rule: elastalert_logger.error('Invalid rule file skipped: %s' % rule_file) continue - # By setting "is_enabled: False" in rule file, a rule is easily disabled - if 'is_enabled' in rule and not rule['is_enabled']: - continue if rule['name'] in names: raise EAException('Duplicate rule named %s' % (rule['name'])) except EAException as e: From 5293cada4e9338dcfc22417dcae52460d1705360 Mon Sep 17 00:00:00 2001 From: Elad Amit Date: Sun, 17 Jan 2021 10:33:56 +0200 Subject: [PATCH 0076/1065] adjusting indentation --- elastalert/elastalert.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index efec46524..f1e1d6b79 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -1291,10 +1291,18 @@ def handle_rule_execution(self, rule): elastalert_logger.info("%s range %s" % (rule['name'], rule_duration)) if self.statsd: try: - self.statsd.gauge('query.hits', self.thread_data.num_hits, tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) - self.statsd.gauge('already_seen.hits', self.thread_data.num_dupes,tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) - self.statsd.gauge('query.matches', num_matches, tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) - self.statsd.gauge('query.alerts_sent', self.thread_data.alerts_sent, tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + self.statsd.gauge( + 'query.hits', self.thread_data.num_hits, + tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + self.statsd.gauge( + 'already_seen.hits', self.thread_data.num_dupes, + tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + self.statsd.gauge( + 'query.matches', num_matches, + tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + self.statsd.gauge( + 'query.alerts_sent', self.thread_data.alerts_sent, + tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) except BaseException as e: elastalert_logger.error("unable to send metrics:\n%s" % str(e)) From d3f2658473f6a1b0246c607f8872e3dad23743aa Mon Sep 17 00:00:00 2001 From: Elad Amit Date: Sun, 17 Jan 2021 10:41:09 +0200 Subject: [PATCH 0077/1065] adjusting indentation --- docs/source/ruletypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index bb3a58f6b..8e7f4a364 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -40,7 +40,7 @@ Rule Configuration Cheat Sheet +--------------------------------------------------------------+ | | ``es_url_prefix`` (string, no default) | | +--------------------------------------------------------------+ | -| ``statsd_instance_tag`` (string, no default) | | +| ``statsd_instance_tag`` (string, no default) | | +--------------------------------------------------------------+ | | ``statsd_host`` (string, no default) | | +--------------------------------------------------------------+ | From 283f379a682ed4ea5539888de5001b741669eea9 Mon Sep 17 00:00:00 2001 From: Elad Amit Date: Sun, 17 Jan 2021 10:45:16 +0200 Subject: [PATCH 0078/1065] fixing underline --- docs/source/ruletypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 8e7f4a364..bb6ca2b49 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -294,7 +294,7 @@ es_url_prefix ``es_url_prefix``: URL prefix for the Elasticsearch endpoint. (Optional, string, no default) statsd_instance_tag -^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^ ``statsd_instance_tag``: prefix for statsd metrics. (Optional, string, no default) From ad198da149c5057ae29ceff208345ff46e864917 Mon Sep 17 00:00:00 2001 From: Elad Amit Date: Sun, 17 Jan 2021 16:46:39 +0200 Subject: [PATCH 0079/1065] adding statsd to setup.py --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 2845836a7..35d1eb340 100644 --- a/setup.py +++ b/setup.py @@ -47,6 +47,7 @@ 'stomp.py>=4.1.17', 'texttable>=0.8.8', 'twilio>=6.0.0,<6.1', - 'cffi>=1.11.5' + 'cffi>=1.11.5', + 'statsd-tags==3.2.1.post1' ] ) From c41e610c502379464122572cfcd40f9cb208ef64 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Mon, 18 Jan 2021 01:59:36 +0900 Subject: [PATCH 0080/1065] add discord alerter --- README.md | 1 + docs/source/elastalert.rst | 1 + docs/source/ruletypes.rst | 28 ++++++++++++ elastalert/alerts.py | 62 ++++++++++++++++++++++++++ elastalert/loaders.py | 3 +- elastalert/schema.yaml | 3 ++ example_rules/exemple_discord_any.yaml | 40 +++++++++++++++++ 7 files changed, 137 insertions(+), 1 deletion(-) create mode 100644 example_rules/exemple_discord_any.yaml diff --git a/README.md b/README.md index fc2461bf4..7abb986ea 100644 --- a/README.md +++ b/README.md @@ -63,6 +63,7 @@ Currently, we have built-in support for the following alert types: - Line Notify - TheHive - Zabbix +- Discord Additional rule types and alerts can be easily imported or written. diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 71c1b8cb0..f6683d551 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -55,6 +55,7 @@ Currently, we have support built in for these alert types: - Line Notify - TheHive - Zabbix +- Discord Additional rule types and alerts can be easily imported or written. (See :ref:`Writing rule types ` and :ref:`Writing alerts `) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index e26d4fcfb..d7a198037 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2282,3 +2282,31 @@ Required: ``zbx_sender_port``: The port where zabbix server is listenning. ``zbx_host``: This field setup the host in zabbix that receives the value sent by Elastalert. ``zbx_key``: This field setup the key in the host that receives the value sent by Elastalert. + + +Discord +~~~~~~~ + +Discord will send notification to a Line application. The body of the notification is formatted the same as with other alerters. + +Required: + +``discord_webhook_url``: The webhook URL. + +Optional: + +``discord_emoji_title``: By default ElastAlert will use the ``:warning:`` emoji when posting to the channel. You can use a different emoji per ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If slack_icon_url_override parameter is provided, emoji is ignored. + +``discord_http_proxy``: By default ElastAlert will not use a network http proxy to send notifications to Discord. Set this option using hostname:port if you need to use a proxy. + +``discord_https_proxy``: By default ElastAlert will not use a network https proxy to send notifications to Discord. Set this option using hostname:port if you need to use a proxy. + +``discord_proxy_login``: The Discord proxy auth username. + +``discord_proxy_password``: The Discord proxy auth username. + +``discord_embed_color``: embed color. By default ``0xffffff``. + +``discord_embed_footer``: embed footer. + +``discord_embed_icon_url``: You can provide icon_url to use custom image. Provide absolute address of the pciture. diff --git a/elastalert/alerts.py b/elastalert/alerts.py index a0801d8bf..e44cdd68f 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2010,3 +2010,65 @@ def get_info(self): 'type': 'hivealerter', 'hive_host': self.rule.get('hive_connection', {}).get('hive_host', '') } + + +class DiscordAlerter(Alerter): + + required_options = frozenset(['discord_webhook_url']) + + def __init__(self, rule): + super(DiscordAlerter, self).__init__(rule) + self.discord_webhook_url = self.rule['discord_webhook_url'] + self.discord_emoji_title = self.rule.get('discord_emoji_title', ':warning:') + self.discord_proxy = self.rule.get('discord_proxy', None) + self.discord_proxy_login = self.rule.get('discord_proxy_login', None) + self.discord_proxy_password = self.rule.get('discord_proxy_password', None) + self.discord_embed_color = self.rule.get('discord_embed_color', 0xffffff) + self.discord_embed_footer = self.rule.get('discord_embed_footer', None) + self.discord_embed_icon_url = self.rule.get('discord_embed_icon_url', None) + + def alert(self, matches): + body = '' + title = u'%s' % (self.create_title(matches)) + for match in matches: + body += str(BasicMatchString(self.rule, match)) + if len(matches) > 1: + body += '\n----------------------------------------\n' + if len(body) > 2047: + body = body[0:1950] + '\n *message was cropped according to discord embed description limits!* ' + + body += '```' + + proxies = {'https': self.discord_proxy} if self.discord_proxy else None + auth = HTTPProxyAuth(self.discord_proxy_login, self.discord_proxy_password) if self.discord_proxy_login else None + headers = {"Content-Type": "application/json"} + + data = {} + data["content"] = "%s %s %s" % (self.discord_emoji_title, title, self.discord_emoji_title) + data["embeds"] = [] + embed = {} + embed["description"] = "%s" % (body) + embed["color"] = (self.discord_embed_color) + + if self.discord_embed_footer: + embed["footer"] = {} + embed["footer"]["text"] = (self.discord_embed_footer) if self.discord_embed_footer else None + embed["footer"]["icon_url"] = (self.discord_embed_icon_url) if self.discord_embed_icon_url else None + else: + None + + data["embeds"].append(embed) + + try: + response = requests.post(self.discord_webhook_url, data=json.dumps(data), headers=headers, proxies=proxies, auth=auth) + warnings.resetwarnings() + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to Discord: %s. Details: %s" % (e, "" if e.response is None else e.response.text)) + + elastalert_logger.info( + "Alert sent to the webhook %s" % self.discord_webhook_url) + + def get_info(self): + return {'type': 'discord', + 'discord_webhook_url': self.discord_webhook_url} diff --git a/elastalert/loaders.py b/elastalert/loaders.py index fc1bb4830..f2b1701b3 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -80,7 +80,8 @@ class RulesLoader(object): 'pagertree': alerts.PagerTreeAlerter, 'linenotify': alerts.LineNotifyAlerter, 'hivealerter': alerts.HiveAlerter, - 'zabbix': ZabbixAlerter + 'zabbix': ZabbixAlerter, + 'discord': alerts.DiscordAlerter } # A partial ordering of alert types. Relative order will be preserved in the resulting alerts list diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 474dd0750..d249b327c 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -371,3 +371,6 @@ properties: zbx_sender_port: {type: integer} zbx_host: {type: string} zbx_key: {type: string} + + ## Discord + discord_webhook_url: {type: string} diff --git a/example_rules/exemple_discord_any.yaml b/example_rules/exemple_discord_any.yaml new file mode 100644 index 000000000..e19b4a4b7 --- /dev/null +++ b/example_rules/exemple_discord_any.yaml @@ -0,0 +1,40 @@ +# This exemple will provide you every alerts that occured between the sleeping time your configured in your config file. +# Every match will be send as a unique alert to discord. If you got 3 match, the alerter will send 3 alert to your discord. + +name: "Exemple discord webhook alert" +type: any +index: your_indice_%Y-%m-%d +use_strftime_index: true + +# Exemple query +filter: +- query: + query_string: + query: "id: 2501 OR id: 5503" + +realert: + minutes: 0 + +# I only add the code content here. This way, it prevent to encode the entire description section. Only the log will be encoded and it will provide more visibility. +include: ["timestamp","name","computer"] +alert_text: "Alerts at {0} on the computer {1}.\n```" +alert_text_args: ["timestamp","computer"] + +# Needed +alert: +- discord +discord_webhook_url: "Your discord webhook url" + +# ----- Optional Section ----- + +discord_proxy: "proxy_address" + +# Must be in "" and must be valid emoji supported by discord. +discord_emoji_title: ":lock:" + +# Must be an hexadecimal value according to the exemple below +discord_embed_color: 0xE24D42 + +# This content will be displayed at the very end of your embed message. If you don't add one of these 2 lines, the footer will not be added. +discord_embed_footer: "Message sent by ElastAlert from your computer" +discord_embed_icon_url: "https://humancoders-formations.s3.amazonaws.com/uploads/course/logo/38/thumb_bigger_formation-elasticsearch.png" \ No newline at end of file From 3425e4a4e0643d56cb80ed8aedfdc63bbb7c58c3 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Mon, 18 Jan 2021 02:06:55 +0900 Subject: [PATCH 0081/1065] fix ruletypes.rst --- docs/source/ruletypes.rst | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index d7a198037..3750d6f4b 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2297,9 +2297,7 @@ Optional: ``discord_emoji_title``: By default ElastAlert will use the ``:warning:`` emoji when posting to the channel. You can use a different emoji per ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If slack_icon_url_override parameter is provided, emoji is ignored. -``discord_http_proxy``: By default ElastAlert will not use a network http proxy to send notifications to Discord. Set this option using hostname:port if you need to use a proxy. - -``discord_https_proxy``: By default ElastAlert will not use a network https proxy to send notifications to Discord. Set this option using hostname:port if you need to use a proxy. +``discord_proxy``: By default ElastAlert will not use a network proxy to send notifications to Discord. Set this option using hostname:port if you need to use a proxy. ``discord_proxy_login``: The Discord proxy auth username. From 62c15040a03ad7c94fa5f356d1c9807b800b28c7 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sun, 17 Jan 2021 18:38:16 -0500 Subject: [PATCH 0082/1065] Add CI workflow --- .github/workflows/alt_build_test.yml | 30 ++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 .github/workflows/alt_build_test.yml diff --git a/.github/workflows/alt_build_test.yml b/.github/workflows/alt_build_test.yml new file mode 100644 index 000000000..7dab4f1c0 --- /dev/null +++ b/.github/workflows/alt_build_test.yml @@ -0,0 +1,30 @@ +# This is a basic workflow to help you get started with Actions + +name: alt_build_test + +# Controls when the action will run. +on: + # Triggers the workflow on push or pull request events but only for the master branch + push: + branches: [ alt ] + pull_request: + branches: [ alt ] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "build" + build: + # The type of runner that the job will run on + runs-on: ubuntu-latest + + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v2 + + # Runs a single command using the runners shell + - name: Build and run tests + run: make test-docker From 49710aaef83f9424efcda1355a807cdf48a79f47 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sun, 17 Jan 2021 19:00:01 -0500 Subject: [PATCH 0083/1065] Update readme with fork information --- README.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/README.md b/README.md index 7abb986ea..94d20ec30 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,18 @@ +# Fork of yelp/elastalert + +The original [yelp/elastalert](https://github.com/yelp/elastalert) repository has become mostly stale, with hundreds of open PRs and over 1000 open issues. The yelp team has acknowledged that they are winding down support of Elastalert. Consequently, it is difficult to merge fixes, dependency upgrades, and new features into Elastalert. Because of this, a fork of elastalert has been created. [jertel/elastalert](https://github.com/jertel/elastalert) will be an alternate repository for updates, until a new maintainer is appointed by the Yelp team and it's clear that the new maintainers are responding to PRs and issues. + +PRs are welcome, but must include tests, when possible. PRs will not be merged if they do not pass the automated CI workflows. + +The current status of the _alt_ branch CI workflow: + +![CI Workflow](https://github.com/jertel/elastalert/workflows/alt_build_test/badge.svg) + +If you're interested in a pre-built Docker image for either the official yelp/elastalert release, or for this fork, check out the [elastalert-docker](https://hub.docker.com/r/jertel/elastalert-docker) project on Docker Hub. + + +# Original README from yelp/elastalert follows... + Recent changes: As of Elastalert 0.2.0, you must use Python 3.6. Python 2 will not longer be supported. [![Build Status](https://travis-ci.org/Yelp/elastalert.svg)](https://travis-ci.org/Yelp/elastalert) From 0bd519221842e1b3bdddf87f29f33c7fbee3548f Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 30 Jan 2021 01:57:28 +0900 Subject: [PATCH 0084/1065] Add Dingtalk --- README.md | 1 + docs/source/elastalert.rst | 1 + docs/source/ruletypes.rst | 60 +++++++++++++++++++++++++ elastalert/alerts.py | 90 ++++++++++++++++++++++++++++++++++++++ elastalert/loaders.py | 3 +- elastalert/schema.yaml | 7 +++ 6 files changed, 161 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 94d20ec30..2785cd730 100644 --- a/README.md +++ b/README.md @@ -79,6 +79,7 @@ Currently, we have built-in support for the following alert types: - TheHive - Zabbix - Discord +- Dingtalk Additional rule types and alerts can be easily imported or written. diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index f6683d551..37f424e55 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -56,6 +56,7 @@ Currently, we have support built in for these alert types: - TheHive - Zabbix - Discord +- Dingtalk Additional rule types and alerts can be easily imported or written. (See :ref:`Writing rule types ` and :ref:`Writing alerts `) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 3750d6f4b..bff211e0a 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2308,3 +2308,63 @@ Optional: ``discord_embed_footer``: embed footer. ``discord_embed_icon_url``: You can provide icon_url to use custom image. Provide absolute address of the pciture. + +Dingtalk +~~~~~~~~ + +Dingtalk will send notification to a Line application. The body of the notification is formatted the same as with other alerters. + +Required: + +``dingtalk_access_token``: Dingtalk access token. + +``dingtalk_msgtype``: Dingtalk msgtype. ``text``, ``markdown``, ``single_action_card``, ``action_card``. + +dingtalk_msgtype single_action_card Required: + +``dingtalk_single_title``: The title of a single button.. + +``dingtalk_single_url``: Jump link for a single button. + +dingtalk_msgtype action_card Required: + +``dingtalk_btns``: Button. + +dingtalk_msgtype action_card Optional: + +``dingtalk_btn_orientation``: "0": Buttons are arranged vertically "1": Buttons are arranged horizontally. + +Example msgtype : text:: + + alert: + - dingtalk + dingtalk_access_token: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' + dingtalk_msgtype: 'text' + + +Example msgtype : markdown:: + + alert: + - dingtalk + dingtalk_access_token: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' + dingtalk_msgtype: 'markdown' + + +Example msgtype : single_action_card:: + + alert: + - dingtalk + dingtalk_access_token: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' + dingtalk_msgtype: 'single_action_card' + dingtalk_single_title: 'test3' + dingtalk_single_url: 'https://xxxx.xxx' + + +Example msgtype : action_card:: + + alert: + - dingtalk + dingtalk_access_token: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' + dingtalk_msgtype: 'action_card' + dingtalk_btn_orientation: '0' + dingtalk_btns: [{'title': 'a', 'actionURL': 'https://xxxx1.xxx'}, {'title': 'b', 'actionURL': 'https://xxxx2.xxx'}] diff --git a/elastalert/alerts.py b/elastalert/alerts.py index e44cdd68f..34c7d0fcf 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2072,3 +2072,93 @@ def alert(self, matches): def get_info(self): return {'type': 'discord', 'discord_webhook_url': self.discord_webhook_url} + + +class DingTalkAlerter(Alerter): + """ Creates a DingTalk room message for each alert """ + required_options = frozenset(['dingtalk_access_token', 'dingtalk_msgtype']) + + def __init__(self, rule): + super(DingTalkAlerter, self).__init__(rule) + self.dingtalk_access_token = self.rule.get('dingtalk_access_token') + self.dingtalk_webhook_url = 'https://oapi.dingtalk.com/robot/send?access_token=%s' % (self.dingtalk_access_token) + self.dingtalk_msgtype = self.rule.get('dingtalk_msgtype') + self.dingtalk_single_title = self.rule.get('dingtalk_single_title', 'elastalert') + self.dingtalk_single_url = self.rule.get('dingtalk_single_url', '') + self.dingtalk_btn_orientation = self.rule.get('dingtalk_btn_orientation', '') + self.dingtalk_btns = self.rule.get('dingtalk_btns', []) + self.dingtalk_proxy = self.rule.get('dingtalk_proxy', None) + self.dingtalk_proxy_login = self.rule.get('dingtalk_proxy_login', None) + self.dingtalk_proxy_password = self.rule.get('dingtalk_proxy_pass', None) + + def format_body(self, body): + return body.encode('utf8') + + def alert(self, matches): + title = self.create_title(matches) + body = self.create_alert_body(matches) + + proxies = {'https': self.dingtalk_proxy} if self.dingtalk_proxy else None + auth = HTTPProxyAuth(self.dingtalk_proxy_login, self.dingtalk_proxy_password) if self.dingtalk_proxy_login else None + headers = { + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + } + + if self.dingtalk_msgtype == 'text': + # text + payload = { + 'msgtype': self.dingtalk_msgtype, + 'text': { + 'content': body + } + } + elif self.dingtalk_msgtype == 'markdown': + # markdown + payload = { + 'msgtype': self.dingtalk_msgtype, + 'markdown': { + 'title': title, + 'text': body + } + } + elif self.dingtalk_msgtype == 'single_action_card': + # singleActionCard + payload = { + 'msgtype': 'actionCard', + 'actionCard': { + 'title': title, + 'text': body, + 'singleTitle': self.dingtalk_single_title, + 'singleURL': self.dingtalk_single_url + } + } + elif self.dingtalk_msgtype == 'action_card': + # actionCard + payload = { + 'msgtype': 'actionCard', + 'actionCard': { + 'title': title, + 'text': body + } + } + if self.dingtalk_btn_orientation != '': + payload['actionCard']['btnOrientation'] = self.dingtalk_btn_orientation + if self.dingtalk_btns: + payload['actionCard']['btns'] = self.dingtalk_btns + + try: + response = requests.post(self.dingtalk_webhook_url, data=json.dumps(payload, + cls=DateTimeEncoder), headers=headers, proxies=proxies, auth=auth) + warnings.resetwarnings() + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to dingtalk: %s" % e) + + elastalert_logger.info("Trigger sent to dingtalk") + + def get_info(self): + return { + "type": "dingtalk", + "dingtalk_webhook_url": self.dingtalk_webhook_url + } diff --git a/elastalert/loaders.py b/elastalert/loaders.py index f2b1701b3..766d0fe2c 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -81,7 +81,8 @@ class RulesLoader(object): 'linenotify': alerts.LineNotifyAlerter, 'hivealerter': alerts.HiveAlerter, 'zabbix': ZabbixAlerter, - 'discord': alerts.DiscordAlerter + 'discord': alerts.DiscordAlerter, + 'dingtalk': alerts.DingTalkAlerter } # A partial ordering of alert types. Relative order will be preserved in the resulting alerts list diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index d249b327c..0fd37760f 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -374,3 +374,10 @@ properties: ## Discord discord_webhook_url: {type: string} + + ### Dingtalk + dingtalk_access_token: {type: string} + dingtalk_msgtype: {type: string} + dingtalk_single_title: {type: string} + dingtalk_single_url: {type: string} + dingtalk_btn_orientation: {type: string} \ No newline at end of file From f61bb912e4c1895b668f0ab9e32a6744059cac74 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 31 Jan 2021 00:00:45 +0900 Subject: [PATCH 0085/1065] Add Chatwork Alerter / Fix a typo --- README.md | 1 + docs/source/elastalert.rst | 1 + docs/source/ruletypes.rst | 22 +++++++++++++++++++-- elastalert/alerts.py | 39 ++++++++++++++++++++++++++++++++++++++ elastalert/loaders.py | 3 ++- elastalert/schema.yaml | 6 +++++- 6 files changed, 68 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 2785cd730..fb2841426 100644 --- a/README.md +++ b/README.md @@ -80,6 +80,7 @@ Currently, we have built-in support for the following alert types: - Zabbix - Discord - Dingtalk +- Chatwork Additional rule types and alerts can be easily imported or written. diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 37f424e55..17dbf5b06 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -57,6 +57,7 @@ Currently, we have support built in for these alert types: - Zabbix - Discord - Dingtalk +- Chatwork Additional rule types and alerts can be easily imported or written. (See :ref:`Writing rule types ` and :ref:`Writing alerts `) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 0a654daed..4d4ec9635 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2292,7 +2292,7 @@ Required: Discord ~~~~~~~ -Discord will send notification to a Line application. The body of the notification is formatted the same as with other alerters. +Discord will send notification to a Discord application. The body of the notification is formatted the same as with other alerters. Required: @@ -2317,7 +2317,7 @@ Optional: Dingtalk ~~~~~~~~ -Dingtalk will send notification to a Line application. The body of the notification is formatted the same as with other alerters. +Dingtalk will send notification to a Dingtalk application. The body of the notification is formatted the same as with other alerters. Required: @@ -2373,3 +2373,21 @@ Example msgtype : action_card:: dingtalk_msgtype: 'action_card' dingtalk_btn_orientation: '0' dingtalk_btns: [{'title': 'a', 'actionURL': 'https://xxxx1.xxx'}, {'title': 'b', 'actionURL': 'https://xxxx2.xxx'}] + +Chatwork +~~~~~~~~ + +Chatwork will send notification to a Chatwork application. The body of the notification is formatted the same as with other alerters. + +Required: + +``chatwork_apikey``: ChatWork API KEY. + +``chatwork_room_id``: The ID of the room you are talking to in Chatwork. How to find the room ID is the part of the number after "rid" at the end of the URL of the browser. + +Example usage:: + + alert: + - chatwork + chatwork_apikey: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' + chatwork_room_id: 'xxxxxxxxx' diff --git a/elastalert/alerts.py b/elastalert/alerts.py index d0b735e56..eb29e94e3 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2191,3 +2191,42 @@ def get_info(self): "type": "dingtalk", "dingtalk_webhook_url": self.dingtalk_webhook_url } + + +class ChatworkAlerter(Alerter): + """ Creates a Chatwork room message for each alert """ + required_options = frozenset(['chatwork_apikey', 'chatwork_room_id']) + + def __init__(self, rule): + super(ChatworkAlerter, self).__init__(rule) + self.chatwork_apikey = self.rule.get('chatwork_apikey') + self.chatwork_room_id = self.rule.get('chatwork_room_id') + self.url = 'https://api.chatwork.com/v2/rooms/%s/messages' % (self.chatwork_room_id) + self.chatwork_proxy = self.rule.get('chatwork_proxy', None) + self.chatwork_proxy_login = self.rule.get('chatwork_proxy_login', None) + self.chatwork_proxy_pass = self.rule.get('chatwork_proxy_pass', None) + + def alert(self, matches): + body = self.create_alert_body(matches) + + headers = {'X-ChatWorkToken': self.chatwork_apikey} + # set https proxy, if it was provided + proxies = {'https': self.chatwork_proxy} if self.chatwork_proxy else None + auth = HTTPProxyAuth(self.chatwork_proxy_login, self.chatwork_proxy_pass) if self.chatwork_proxy_login else None + params = {'body': body} + + try: + response = requests.post(self.url, params=params, headers=headers, proxies=proxies, auth=auth) + warnings.resetwarnings() + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to Chattwork: %s. Details: %s" % (e, "" if e.response is None else e.response.text)) + + elastalert_logger.info( + "Alert sent to Chatwork room %s" % self.chatwork_room_id) + + def get_info(self): + return { + "type": "chatwork", + "chatwork_room_id": self.chatwork_room_id + } diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 766d0fe2c..36aa74515 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -82,7 +82,8 @@ class RulesLoader(object): 'hivealerter': alerts.HiveAlerter, 'zabbix': ZabbixAlerter, 'discord': alerts.DiscordAlerter, - 'dingtalk': alerts.DingTalkAlerter + 'dingtalk': alerts.DingTalkAlerter, + 'chatwork': alerts.ChatworkAlerter } # A partial ordering of alert types. Relative order will be preserved in the resulting alerts list diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index ca0dfd566..aab13c01a 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -382,4 +382,8 @@ properties: dingtalk_msgtype: {type: string} dingtalk_single_title: {type: string} dingtalk_single_url: {type: string} - dingtalk_btn_orientation: {type: string} \ No newline at end of file + dingtalk_btn_orientation: {type: string} + + ### Chatwork + chatwork_apikey: {type: string} + chatwork_room_id: {type: string} \ No newline at end of file From 6b6af45a02e4f3314e67f3dee7d753a646546f1d Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 12 Feb 2021 22:54:31 +0900 Subject: [PATCH 0086/1065] Update requirements-dev.txt --- requirements-dev.txt | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index d15887c01..f99a6f96b 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,9 +1,10 @@ -r requirements.txt -coverage==4.5.4 +coverage==5.4 flake8 +pluggy>=0.12.0 pre-commit -pylint<1.4 -pytest<3.3.0 +pylint<2.7 +pytest<3.7.0 setuptools sphinx_rtd_theme -tox==3.20.1 +tox==3.21.4 From 87449308778a613e234b355e00437aa40bb6c0c8 Mon Sep 17 00:00:00 2001 From: Dilaver Demirel Date: Sun, 21 Feb 2021 18:11:56 +0300 Subject: [PATCH 0087/1065] Creates a new rule config parameter that "use_local_time_for_query". It provide that use local time when query in "run_query". So, filters able to use local time can create. Indices able to have @timestap field with different timezone from "UTC" can be. --- elastalert/elastalert.py | 5 +++++ elastalert/loaders.py | 1 + elastalert/util.py | 5 ++++- 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 7e3f24c10..43d249ff2 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -53,6 +53,7 @@ from .util import ts_now from .util import ts_to_dt from .util import unix_to_dt +from .util import ts_utc_to_local class ElastAlerter(object): @@ -629,6 +630,10 @@ def run_query(self, rule, start=None, end=None, scroll=False): if end is None: end = ts_now() + if rule.get('use_local_time_for_query'): + start = ts_utc_to_local(start) + end = ts_utc_to_local(end) + # Reset hit counter and query rule_inst = rule['type'] rule['scrolling_cycle'] = rule.get('scrolling_cycle', 0) + 1 diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 36aa74515..e49833649 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -283,6 +283,7 @@ def load_options(self, rule, conf, filename, args=None): rule.setdefault('use_local_time', True) rule.setdefault('description', "") rule.setdefault('jinja_root_name', "_data") + rule.setdefault('use_local_time_for_query', False) # Set timestamp_type conversion function, used when generating queries and processing hits rule['timestamp_type'] = rule['timestamp_type'].strip().lower() diff --git a/elastalert/util.py b/elastalert/util.py index 3e9c9f664..90021ac2d 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -185,6 +185,9 @@ def dt_to_ts_with_format(dt, ts_format): def ts_now(): return datetime.datetime.utcnow().replace(tzinfo=dateutil.tz.tzutc()) +def ts_utc_to_local(ts): + """Convert utc time to local time.""" + return ts.astimezone(dateutil.tz.tzlocal()) def inc_ts(timestamp, milliseconds=1): """Increment a timestamp by milliseconds.""" @@ -202,7 +205,7 @@ def pretty_ts(timestamp, tz=True): dt = ts_to_dt(timestamp) if tz: dt = dt.astimezone(dateutil.tz.tzlocal()) - return dt.strftime('%Y-%m-%d %H:%M %Z') + return dt.strftime('%Y-%m-%d %H:%M %z') def ts_add(ts, td): From 0fa2e9dfb27f028712659be615785f7f74d083cd Mon Sep 17 00:00:00 2001 From: Dilaver Demirel Date: Sun, 21 Feb 2021 18:30:07 +0300 Subject: [PATCH 0088/1065] Adds information about the "use_local_time_for_query" parameter. --- docs/source/ruletypes.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 4d4ec9635..f01429e37 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -102,6 +102,8 @@ Rule Configuration Cheat Sheet +--------------------------------------------------------------+ | | ``scan_entire_timeframe`` (bool, default False) | | +--------------------------------------------------------------+ | +| ``use_local_time_for_query`` (bool, default False) | | ++--------------------------------------------------------------+ | | ``import`` (string) | | | | | | IGNORED IF ``use_count_query`` or ``use_terms_query`` is true| | @@ -695,6 +697,11 @@ scan the same range again, triggering duplicate alerts. Some rules and alerts require additional options, which also go in the top level of the rule configuration file. +use_local_time_for_query +^^^^^^^^^^^^^^^^^^^^^ + +``use_local_time_for_query``: Whether to convert UTC time to the local time zone in rule queries. +If false, start and end time of query will be used UTC. (Optional, boolean, default false) .. _testing : From 0dd8cd8d1b83b7442e84dd488feba85a77272937 Mon Sep 17 00:00:00 2001 From: Dilaver Demirel Date: Sun, 21 Feb 2021 18:46:52 +0300 Subject: [PATCH 0089/1065] Fixes the documentation error. --- docs/source/ruletypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index f01429e37..a9919973c 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -698,7 +698,7 @@ scan the same range again, triggering duplicate alerts. Some rules and alerts require additional options, which also go in the top level of the rule configuration file. use_local_time_for_query -^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^ ``use_local_time_for_query``: Whether to convert UTC time to the local time zone in rule queries. If false, start and end time of query will be used UTC. (Optional, boolean, default false) From b27a50ee6dae55ca6388a0e087da60842f912ebf Mon Sep 17 00:00:00 2001 From: Dilaver Demirel Date: Sun, 21 Feb 2021 18:52:59 +0300 Subject: [PATCH 0090/1065] Fixes styling problems. --- elastalert/util.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/elastalert/util.py b/elastalert/util.py index 90021ac2d..39fadd353 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -185,10 +185,12 @@ def dt_to_ts_with_format(dt, ts_format): def ts_now(): return datetime.datetime.utcnow().replace(tzinfo=dateutil.tz.tzutc()) + def ts_utc_to_local(ts): """Convert utc time to local time.""" return ts.astimezone(dateutil.tz.tzlocal()) + def inc_ts(timestamp, milliseconds=1): """Increment a timestamp by milliseconds.""" dt = ts_to_dt(timestamp) From 55562c2fc7f936c5e333a0bf6c413e7c9e2b240a Mon Sep 17 00:00:00 2001 From: Dilaver Demirel Date: Tue, 23 Feb 2021 09:38:47 +0300 Subject: [PATCH 0091/1065] Rename "use_local_time_for_query" to "query_timezone". If "query_timezone" set, elasticsearch query start and end date will convert from UTC to specified time zone. --- docs/source/ruletypes.rst | 12 +++++++----- elastalert/elastalert.py | 9 +++++---- elastalert/loaders.py | 2 +- elastalert/util.py | 4 ++-- 4 files changed, 15 insertions(+), 12 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index a9919973c..e2efd1e12 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -102,7 +102,7 @@ Rule Configuration Cheat Sheet +--------------------------------------------------------------+ | | ``scan_entire_timeframe`` (bool, default False) | | +--------------------------------------------------------------+ | -| ``use_local_time_for_query`` (bool, default False) | | +| ``query_timezone`` (string, default empty string) | | +--------------------------------------------------------------+ | | ``import`` (string) | | | | | @@ -697,11 +697,13 @@ scan the same range again, triggering duplicate alerts. Some rules and alerts require additional options, which also go in the top level of the rule configuration file. -use_local_time_for_query -^^^^^^^^^^^^^^^^^^^^^^^^ +query_timezone +^^^^^^^^^^^^^^ + +``query_timezone``: Whether to convert UTC time to the specified time zone in rule queries. +If not set, start and end time of query will be used UTC. (Optional, string, default empty string) -``use_local_time_for_query``: Whether to convert UTC time to the local time zone in rule queries. -If false, start and end time of query will be used UTC. (Optional, boolean, default false) +Example value : query_timezone: "Europe/Istanbul" .. _testing : diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 43d249ff2..e689619f7 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -53,7 +53,7 @@ from .util import ts_now from .util import ts_to_dt from .util import unix_to_dt -from .util import ts_utc_to_local +from .util import ts_utc_to_tz class ElastAlerter(object): @@ -630,9 +630,10 @@ def run_query(self, rule, start=None, end=None, scroll=False): if end is None: end = ts_now() - if rule.get('use_local_time_for_query'): - start = ts_utc_to_local(start) - end = ts_utc_to_local(end) + if rule.get('query_timezone') != "": + elastalert_logger.info("Query start and end time converting UTC to query_timezone : {}".format(rule.get('query_timezone'))) + start = ts_utc_to_tz(start, rule.get('query_timezone')) + end = ts_utc_to_tz(end, rule.get('query_timezone')) # Reset hit counter and query rule_inst = rule['type'] diff --git a/elastalert/loaders.py b/elastalert/loaders.py index e49833649..bee5d4754 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -283,7 +283,7 @@ def load_options(self, rule, conf, filename, args=None): rule.setdefault('use_local_time', True) rule.setdefault('description', "") rule.setdefault('jinja_root_name', "_data") - rule.setdefault('use_local_time_for_query', False) + rule.setdefault('query_timezone', "") # Set timestamp_type conversion function, used when generating queries and processing hits rule['timestamp_type'] = rule['timestamp_type'].strip().lower() diff --git a/elastalert/util.py b/elastalert/util.py index 39fadd353..f3f7dde14 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -186,9 +186,9 @@ def ts_now(): return datetime.datetime.utcnow().replace(tzinfo=dateutil.tz.tzutc()) -def ts_utc_to_local(ts): +def ts_utc_to_tz(ts,tz_name): """Convert utc time to local time.""" - return ts.astimezone(dateutil.tz.tzlocal()) + return ts.astimezone(dateutil.tz.gettz(tz_name)) def inc_ts(timestamp, milliseconds=1): From b5a55edb5e9139924df89f85a176c0fab7b28b6a Mon Sep 17 00:00:00 2001 From: Dilaver Demirel Date: Tue, 23 Feb 2021 09:43:38 +0300 Subject: [PATCH 0092/1065] Fix styling problem. --- elastalert/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/util.py b/elastalert/util.py index f3f7dde14..658c6bf4d 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -186,7 +186,7 @@ def ts_now(): return datetime.datetime.utcnow().replace(tzinfo=dateutil.tz.tzutc()) -def ts_utc_to_tz(ts,tz_name): +def ts_utc_to_tz(ts, tz_name): """Convert utc time to local time.""" return ts.astimezone(dateutil.tz.gettz(tz_name)) From 6f35f8205d615858e8a5bc904738e886ca71636f Mon Sep 17 00:00:00 2001 From: Mostafa Hussein Date: Wed, 24 Feb 2021 22:11:26 +0200 Subject: [PATCH 0093/1065] allow alerta_severity to be set dynamically --- elastalert/alerts.py | 2 +- elastalert/schema.yaml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index eb29e94e3..71071d316 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1856,7 +1856,7 @@ def get_json_payload(self, match): alerta_payload_dict = { 'resource': resolve_string(self.resource, match, self.missing_text), - 'severity': self.severity, + 'severity': resolve_string(self.severity, match), 'timeout': self.timeout, 'createTime': createTime, 'type': self.type, diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index aab13c01a..516338e37 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -343,7 +343,7 @@ properties: ### Alerta alerta_api_url: {type: string} alerta_api_key: {type: string} - alerta_severity: {enum: [unknown, security, debug, informational, ok, normal, cleared, indeterminate, warning, minor, major, critical]} + alerta_severity: {type: string} alerta_resource: {type: string} # Python format string alerta_environment: {type: string} # Python format string alerta_origin: {type: string} # Python format string @@ -386,4 +386,4 @@ properties: ### Chatwork chatwork_apikey: {type: string} - chatwork_room_id: {type: string} \ No newline at end of file + chatwork_room_id: {type: string} From d54ddf0258add37ed14d0a5b0c0762e24902a1b1 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 26 Feb 2021 02:08:38 +0900 Subject: [PATCH 0094/1065] Kibana Discover support kibana 7.11 --- docs/source/ruletypes.rst | 4 ++-- elastalert/kibana_discover.py | 2 +- elastalert/schema.yaml | 2 +- tests/kibana_discover_test.py | 12 ++++++------ 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index e2efd1e12..74b3bb0da 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -555,9 +555,9 @@ The currently supported versions of Kibana Discover are: - `5.6` - `6.0`, `6.1`, `6.2`, `6.3`, `6.4`, `6.5`, `6.6`, `6.7`, `6.8` -- `7.0`, `7.1`, `7.2`, `7.3`, `7.4`, `7.5`, `7.6`, `7.7`, `7.8`, `7.9`, `7.10` +- `7.0`, `7.1`, `7.2`, `7.3`, `7.4`, `7.5`, `7.6`, `7.7`, `7.8`, `7.9`, `7.10`, `7.11` -``kibana_discover_version: '7.10'`` +``kibana_discover_version: '7.11'`` kibana_discover_index_pattern_id ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/elastalert/kibana_discover.py b/elastalert/kibana_discover.py index ed8b681db..0cbbc2116 100644 --- a/elastalert/kibana_discover.py +++ b/elastalert/kibana_discover.py @@ -15,7 +15,7 @@ kibana_default_timedelta = datetime.timedelta(minutes=10) kibana5_kibana6_versions = frozenset(['5.6', '6.0', '6.1', '6.2', '6.3', '6.4', '6.5', '6.6', '6.7', '6.8']) -kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10']) +kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10', '7.11']) def generate_kibana_discover_url(rule, match): ''' Creates a link for a kibana discover app. ''' diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 516338e37..65f3dc3e7 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -219,7 +219,7 @@ properties: ### Kibana Discover App Link generate_kibana_discover_url: {type: boolean} kibana_discover_app_url: {type: string, format: uri} - kibana_discover_version: {type: string, enum: ['7.10', '7.9', '7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} + kibana_discover_version: {type: string, enum: ['7.11', '7.10', '7.9', '7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} kibana_discover_index_pattern_id: {type: string, minLength: 1} kibana_discover_columns: {type: array, items: {type: string, minLength: 1}, minItems: 1} kibana_discover_from_timedelta: *timedelta diff --git a/tests/kibana_discover_test.py b/tests/kibana_discover_test.py index a508c25d9..30191b89b 100644 --- a/tests/kibana_discover_test.py +++ b/tests/kibana_discover_test.py @@ -38,7 +38,7 @@ def test_generate_kibana_discover_url_with_kibana_5x_and_6x(kibana_version): assert url == expectedUrl -@pytest.mark.parametrize("kibana_version", ['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10']) +@pytest.mark.parametrize("kibana_version", ['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10', '7.11']) def test_generate_kibana_discover_url_with_kibana_7x(kibana_version): url = generate_kibana_discover_url( rule={ @@ -171,7 +171,7 @@ def test_generate_kibana_discover_url_with_from_timedelta(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.10', + 'kibana_discover_version': '7.11', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_from_timedelta': timedelta(hours=1), 'timestamp_field': 'timestamp' @@ -204,7 +204,7 @@ def test_generate_kibana_discover_url_with_from_timedelta_and_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.10', + 'kibana_discover_version': '7.11', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_from_timedelta': timedelta(hours=1), 'timeframe': timedelta(minutes=20), @@ -238,7 +238,7 @@ def test_generate_kibana_discover_url_with_to_timedelta(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.10', + 'kibana_discover_version': '7.11', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_to_timedelta': timedelta(hours=1), 'timestamp_field': 'timestamp' @@ -271,7 +271,7 @@ def test_generate_kibana_discover_url_with_to_timedelta_and_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.10', + 'kibana_discover_version': '7.11', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_to_timedelta': timedelta(hours=1), 'timeframe': timedelta(minutes=20), @@ -305,7 +305,7 @@ def test_generate_kibana_discover_url_with_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.10', + 'kibana_discover_version': '7.11', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'timeframe': timedelta(minutes=20), 'timestamp_field': 'timestamp' From 6c142ce13d371717c17585bf29fb2d6260b74f52 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 26 Feb 2021 02:18:03 +0900 Subject: [PATCH 0095/1065] Update pylint and tox pylint from <2.7 to <2.8 tox from 3.21.4 to 3.22.0 --- requirements-dev.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index f99a6f96b..6b0642e5b 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -3,8 +3,8 @@ coverage==5.4 flake8 pluggy>=0.12.0 pre-commit -pylint<2.7 +pylint<2.8 pytest<3.7.0 setuptools sphinx_rtd_theme -tox==3.21.4 +tox==3.22.0 From a78620518d2e89eb6e76356d6a2da204107dbaa7 Mon Sep 17 00:00:00 2001 From: feroz Date: Fri, 26 Feb 2021 15:02:12 +0000 Subject: [PATCH 0096/1065] Add a default description for TheHive alerts if one isn't provided Use the default alert body created by ElastAlert, in line with the other alerters --- elastalert/alerts.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 71071d316..c70128c44 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1992,11 +1992,12 @@ def alert(self, matches): alert_config = { 'artifacts': artifacts, - 'sourceRef': str(uuid.uuid4())[0:6], - 'customFields': {}, 'caseTemplate': None, + 'customFields': {}, + 'date': int(time.time()) * 1000, + 'description': self.create_alert_body(matches), + 'sourceRef': str(uuid.uuid4())[0:6], 'title': '{rule[index]}_{rule[name]}'.format(**context), - 'date': int(time.time()) * 1000 } alert_config.update(self.rule.get('hive_alert_config', {})) custom_fields = {} From 5ce0a44838e056ccf1fd7d6297eef02aa12acb14 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 28 Feb 2021 12:44:15 +0900 Subject: [PATCH 0097/1065] added docs slack slack_parse_override slack_text_string --- docs/source/ruletypes.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 74b3bb0da..b2c64b249 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1792,6 +1792,10 @@ Provide absolute address of the pciture. ``slack_msg_color``: By default the alert will be posted with the 'danger' color. You can also use 'good' or 'warning' colors. +``slack_parse_override``: By default the notification message is escaped 'none'. You can also use 'full'. + +``slack_text_string``: Notification message you want to add. + ``slack_proxy``: By default ElastAlert will not use a network proxy to send notifications to Slack. Set this option using ``hostname:port`` if you need to use a proxy. ``slack_alert_fields``: You can add additional fields to your slack alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. From 450ebe4a530b269c849380d2b5d297444569ec73 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 28 Feb 2021 19:37:57 +0900 Subject: [PATCH 0098/1065] add docs jira_assignee --- docs/source/ruletypes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 74b3bb0da..8a850b22a 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1566,6 +1566,8 @@ For an example JIRA account file, see ``example_rules/jira_acct.yaml``. The acco Optional: +``jira_assignee``: Assigns an issue to a user. + ``jira_component``: The name of the component or components to set the ticket to. This can be a single string or a list of strings. This is provided for backwards compatibility and will eventually be deprecated. It is preferable to use the plural ``jira_components`` instead. ``jira_components``: The name of the component or components to set the ticket to. This can be a single string or a list of strings. From c5f1f1c0287ade43750fe7f3252b1dd8fc82ec7a Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 1 Mar 2021 18:04:04 +0000 Subject: [PATCH 0099/1065] Update README to point to new docs, deprecate old README - Update the README to point to the new ReadTheDocs link - Move the old Elastalert README to a new file to reduce confusion - Add some more text to the new README that covers the essential points of the old README --- README-old.md | 373 +++++++++++++++++++++++++++++++++++++++++++++++ README.md | 392 ++++---------------------------------------------- 2 files changed, 400 insertions(+), 365 deletions(-) create mode 100644 README-old.md diff --git a/README-old.md b/README-old.md new file mode 100644 index 000000000..bbdf39d97 --- /dev/null +++ b/README-old.md @@ -0,0 +1,373 @@ +# Deprecated ElastAlert README + +*The documentation below refers to a previous ElastAlert version. For the latest version, see [here][0]* + +Recent changes: As of Elastalert 0.2.0, you must use Python 3.6. Python 2 will not longer be supported. + +[![Build Status](https://travis-ci.org/Yelp/elastalert.svg)](https://travis-ci.org/Yelp/elastalert) +[![Join the chat at https://gitter.im/Yelp/elastalert](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/Yelp/elastalert?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) + +## ElastAlert - [Read the Docs](http://elastalert.readthedocs.org). +### Easy & Flexible Alerting With Elasticsearch + +ElastAlert is a simple framework for alerting on anomalies, spikes, or other patterns of interest from data in Elasticsearch. + +ElastAlert works with all versions of Elasticsearch. + +At Yelp, we use Elasticsearch, Logstash and Kibana for managing our ever increasing amount of data and logs. +Kibana is great for visualizing and querying data, but we quickly realized that it needed a companion tool for alerting +on inconsistencies in our data. Out of this need, ElastAlert was created. + +If you have data being written into Elasticsearch in near real time and want to be alerted when that data matches certain patterns, ElastAlert is the tool for you. If you can see it in Kibana, ElastAlert can alert on it. + +## Overview + +We designed ElastAlert to be reliable, highly modular, and easy to set up and configure. + +It works by combining Elasticsearch with two types of components, rule types and alerts. +Elasticsearch is periodically queried and the data is passed to the rule type, which determines when +a match is found. When a match occurs, it is given to one or more alerts, which take action based on the match. + +This is configured by a set of rules, each of which defines a query, a rule type, and a set of alerts. + +Several rule types with common monitoring paradigms are included with ElastAlert: + +- Match where there are at least X events in Y time" (``frequency`` type) +- Match when the rate of events increases or decreases" (``spike`` type) +- Match when there are less than X events in Y time" (``flatline`` type) +- Match when a certain field matches a blacklist/whitelist" (``blacklist`` and ``whitelist`` type) +- Match on any event matching a given filter" (``any`` type) +- Match when a field has two different values within some time" (``change`` type) +- Match when a never before seen term appears in a field" (``new_term`` type) +- Match when the number of unique values for a field is above or below a threshold (``cardinality`` type) + +Currently, we have built-in support for the following alert types: + +- Command +- Email +- JIRA +- OpsGenie +- AWS SNS +- MS Teams +- Slack +- Mattermost +- Telegram +- GoogleChat +- PagerDuty +- PagerTree +- Exotel +- Twilio +- Splunk On-Call (Formerly VictorOps) +- Gitter +- ServiceNow +- Debug +- Stomp +- Alerta +- HTTP POST +- Line Notify +- TheHive +- Zabbix +- Discord +- Dingtalk +- Chatwork + +Additional rule types and alerts can be easily imported or written. + +In addition to this basic usage, there are many other features that make alerts more useful: + +- Alerts link to Kibana dashboards +- Aggregate counts for arbitrary fields +- Combine alerts into periodic reports +- Separate alerts by using a unique key field +- Intercept and enhance match data + +To get started, check out `Running ElastAlert For The First Time` in the [documentation](http://elastalert.readthedocs.org). + +## Running ElastAlert +You can either install the latest released version of ElastAlert using pip: + +```pip install elastalert``` + +or you can clone the ElastAlert repository for the most recent changes: + +```git clone https://github.com/Yelp/elastalert.git``` + +Install the module: + +```pip install "setuptools>=11.3"``` + +```python setup.py install``` + +The following invocation can be used to run ElastAlert after installing + +``$ elastalert [--debug] [--verbose] [--start ] [--end ] [--rule ] [--config ]`` + +``--debug`` will print additional information to the screen as well as suppresses alerts and instead prints the alert body. Not compatible with `--verbose`. + +``--verbose`` will print additional information without suppressing alerts. Not compatible with `--debug.` + +``--start`` will begin querying at the given timestamp. By default, ElastAlert will begin querying from the present. +Timestamp format is ``YYYY-MM-DDTHH-MM-SS[-/+HH:MM]`` (Note the T between date and hour). +Eg: ``--start 2014-09-26T12:00:00`` (UTC) or ``--start 2014-10-01T07:30:00-05:00`` + +``--end`` will cause ElastAlert to stop querying at the given timestamp. By default, ElastAlert will continue +to query indefinitely. + +``--rule`` will allow you to run only one rule. It must still be in the rules folder. +Eg: ``--rule this_rule.yaml`` + +``--config`` allows you to specify the location of the configuration. By default, it is will look for config.yaml in the current directory. + +## Third Party Tools And Extras +### Kibana plugin +![img](https://raw.githubusercontent.com/bitsensor/elastalert-kibana-plugin/master/showcase.gif) +Available at the [ElastAlert Kibana plugin repository](https://github.com/bitsensor/elastalert-kibana-plugin). + +### Docker +A [Dockerized version](https://github.com/bitsensor/elastalert) of ElastAlert including a REST api is build from `master` to `bitsensor/elastalert:latest`. + +```bash +git clone https://github.com/bitsensor/elastalert.git; cd elastalert +docker run -d -p 3030:3030 -p 3333:3333 \ + -v `pwd`/config/elastalert.yaml:/opt/elastalert/config.yaml \ + -v `pwd`/config/config.json:/opt/elastalert-server/config/config.json \ + -v `pwd`/rules:/opt/elastalert/rules \ + -v `pwd`/rule_templates:/opt/elastalert/rule_templates \ + --net="host" \ + --name elastalert bitsensor/elastalert:latest +``` + +### ElastAlert-Docker + +This Dockerfile will build a Docker image for Elastalert. This image is compatible with the accompanying Helm chart for Kubernetes. + +https://github.com/jertel/elastalert-docker + +### ElastAlert uses ElastAlert Helm Chart almost as it is *Use original Docker Image to fix bugs + +[ElastAlert Server Helm Chart](https://github.com/daichi703n/elastalert-helm) + +[Praeco Helm Chart](https://github.com/daichi703n/praeco-helm) + +[Installing Praeco (ElastAlert GUI) into Kubernetes with Helm](https://en-designetwork.daichi703n.com/entry/2020/02/24/praeco-helm-kubernetes) + +### Praeco +![Praeco screenshot](https://user-images.githubusercontent.com/611996/47752071-7c4a9080-dc61-11e8-8ccf-2196f13429b2.png) +[Praeco](https://github.com/johnsusek/praeco) is a free open source GUI for ElastAlert. + +## Documentation + +Read the documentation at [Read the Docs](http://elastalert.readthedocs.org). + +To build a html version of the docs locally + +``` +pip install sphinx_rtd_theme sphinx +cd docs +make html +``` + +View in browser at build/html/index.html + +## Configuration + +See config.yaml.example for details on configuration. + +## Example rules + +Examples of different types of rules can be found in example_rules/. + +- ``example_spike.yaml`` is an example of the "spike" rule type, which allows you to alert when the rate of events, averaged over a time period, +increases by a given factor. This example will send an email alert when there are 3 times more events matching a filter occurring within the +last 2 hours than the number of events in the previous 2 hours. + +- ``example_frequency.yaml`` is an example of the "frequency" rule type, which will alert when there are a given number of events occuring +within a time period. This example will send an email when 50 documents matching a given filter occur within a 4 hour timeframe. + +- ``example_change.yaml`` is an example of the "change" rule type, which will alert when a certain field in two documents changes. In this example, +the alert email is sent when two documents with the same 'username' field but a different value of the 'country_name' field occur within 24 hours +of each other. + +- ``example_new_term.yaml`` is an example of the "new term" rule type, which alerts when a new value appears in a field or fields. In this example, +an email is sent when a new value of ("username", "computer") is encountered in example login logs. + +## Frequently Asked Questions + +### My rule is not getting any hits? + +So you've managed to set up ElastAlert, write a rule, and run it, but nothing happens, or it says ``0 query hits``. First of all, we recommend using the command ``elastalert-test-rule rule.yaml`` to debug. It will show you how many documents match your filters for the last 24 hours (or more, see ``--help``), and then shows you if any alerts would have fired. If you have a filter in your rule, remove it and try again. This will show you if the index is correct and that you have at least some documents. If you have a filter in Kibana and want to recreate it in ElastAlert, you probably want to use a query string. Your filter will look like + +``` +filter: +- query: + query_string: + query: "foo: bar AND baz: abc*" +``` +If you receive an error that Elasticsearch is unable to parse it, it's likely the YAML is not spaced correctly, and the filter is not in the right format. If you are using other types of filters, like ``term``, a common pitfall is not realizing that you may need to use the analyzed token. This is the default if you are using Logstash. For example, + +``` +filter: +- term: + foo: "Test Document" +``` + +will not match even if the original value for ``foo`` was exactly "Test Document". Instead, you want to use ``foo.raw``. If you are still having trouble troubleshooting why your documents do not match, try running ElastAlert with ``--es_debug_trace /path/to/file.log``. This will log the queries made to Elasticsearch in full so that you can see exactly what is happening. + +### I got hits, why didn't I get an alert? + +If you got logs that had ``X query hits, 0 matches, 0 alerts sent``, it depends on the ``type`` why you didn't get any alerts. If ``type: any``, a match will occur for every hit. If you are using ``type: frequency``, ``num_events`` must occur within ``timeframe`` of each other for a match to occur. Different rules apply for different rule types. + +If you see ``X matches, 0 alerts sent``, this may occur for several reasons. If you set ``aggregation``, the alert will not be sent until after that time has elapsed. If you have gotten an alert for this same rule before, that rule may be silenced for a period of time. The default is one minute between alerts. If a rule is silenced, you will see ``Ignoring match for silenced rule`` in the logs. + +If you see ``X alerts sent`` but didn't get any alert, it's probably related to the alert configuration. If you are using the ``--debug`` flag, you will not receive any alerts. Instead, the alert text will be written to the console. Use ``--verbose`` to achieve the same affects without preventing alerts. If you are using email alert, make sure you have it configured for an SMTP server. By default, it will connect to localhost on port 25. It will also use the word "elastalert" as the "From:" address. Some SMTP servers will reject this because it does not have a domain while others will add their own domain automatically. See the email section in the documentation for how to configure this. + +### Why did I only get one alert when I expected to get several? + +There is a setting called ``realert`` which is the minimum time between two alerts for the same rule. Any alert that occurs within this time will simply be dropped. The default value for this is one minute. If you want to receive an alert for every single match, even if they occur right after each other, use + +``` +realert: + minutes: 0 +``` + +You can of course set it higher as well. + +### How can I prevent duplicate alerts? + +By setting ``realert``, you will prevent the same rule from alerting twice in an amount of time. + +``` +realert: + days: 1 +``` + +You can also prevent duplicates based on a certain field by using ``query_key``. For example, to prevent multiple alerts for the same user, you might use + +``` +realert: + hours: 8 +query_key: user +``` + +Note that this will also affect the way many rule types work. If you are using ``type: frequency`` for example, ``num_events`` for a single value of ``query_key`` must occur before an alert will be sent. You can also use a compound of multiple fields for this key. For example, if you only wanted to receieve an alert once for a specific error and hostname, you could use + +``` +query_key: [error, hostname] +``` + +Internally, this works by creating a new field for each document called ``field1,field2`` with a value of ``value1,value2`` and using that as the ``query_key``. + +The data for when an alert will fire again is stored in Elasticsearch in the ``elastalert_status`` index, with a ``_type`` of ``silence`` and also cached in memory. + +### How can I change what's in the alert? + +You can use the field ``alert_text`` to add custom text to an alert. By setting ``alert_text_type: alert_text_only`` Or ``alert_text_type: alert_text_jinja``, it will be the entirety of the alert. You can also add different fields from the alert: + +With ``alert_text_type: alert_text_jinja`` by using [Jinja2](https://pypi.org/project/Jinja2/) Template. + +``` +alert_text_type: alert_text_jinja + +alert_text: | + Alert triggered! *({{num_hits}} Matches!)* + Something happened with {{username}} ({{email}}) + {{description|truncate}} + +``` + +> Top fields are accessible via `{{field_name}}` or `{{_data['field_name']}}`, `_data` is useful when accessing *fields with dots in their keys*, as Jinja treat dot as a nested field. +> If `_data` conflicts with your top level data, use ``jinja_root_name`` to change its name. + +With ``alert_text_type: alert_text_only`` by using Python style string formatting and ``alert_text_args``. For example + +``` +alert_text: "Something happened with {0} at {1}" +alert_text_type: alert_text_only +alert_text_args: ["username", "@timestamp"] +``` + +You can also limit the alert to only containing certain fields from the document by using ``include``. + +``` +include: ["ip_address", "hostname", "status"] +``` + +### My alert only contains data for one event, how can I see more? + +If you are using ``type: frequency``, you can set the option ``attach_related: true`` and every document will be included in the alert. An alternative, which works for every type, is ``top_count_keys``. This will show the top counts for each value for certain fields. For example, if you have + +``` +top_count_keys: ["ip_address", "status"] +``` + +and 10 documents matched your alert, it may contain something like + +``` +ip_address: +127.0.0.1: 7 +10.0.0.1: 2 +192.168.0.1: 1 + +status: +200: 9 +500: 1 +``` + +### How can I make the alert come at a certain time? + +The ``aggregation`` feature will take every alert that has occured over a period of time and send them together in one alert. You can use cron style syntax to send all alerts that have occured since the last once by using + +``` +aggregation: + schedule: '2 4 * * mon,fri' +``` + +### I have lots of documents and it's really slow, how can I speed it up? + +There are several ways to potentially speed up queries. If you are using ``index: logstash-*``, Elasticsearch will query all shards, even if they do not possibly contain data with the correct timestamp. Instead, you can use Python time format strings and set ``use_strftime_index`` + +``` +index: logstash-%Y.%m +use_strftime_index: true +``` + +Another thing you could change is ``buffer_time``. By default, ElastAlert will query large overlapping windows in order to ensure that it does not miss any events, even if they are indexed in real time. In config.yaml, you can adjust ``buffer_time`` to a smaller number to only query the most recent few minutes. + +``` +buffer_time: + minutes: 5 +``` + +By default, ElastAlert will download every document in full before processing them. Instead, you can have ElastAlert simply get a count of the number of documents that have occured in between each query. To do this, set ``use_count_query: true``. This cannot be used if you use ``query_key``, because ElastAlert will not know the contents of each documents, just the total number of them. This also reduces the precision of alerts, because all events that occur between each query will be rounded to a single timestamp. + +If you are using ``query_key`` (a single key, not multiple keys) you can use ``use_terms_query``. This will make ElastAlert perform a terms aggregation to get the counts for each value of a certain field. Both ``use_terms_query`` and ``use_count_query`` also require ``doc_type`` to be set to the ``_type`` of the documents. They may not be compatible with all rule types. + +### Can I perform aggregations? + +The only aggregation supported currently is a terms aggregation, by setting ``use_terms_query``. + +### I'm not using @timestamp, what do I do? + +You can use ``timestamp_field`` to change which field ElastAlert will use as the timestamp. You can use ``timestamp_type`` to change it between ISO 8601 and unix timestamps. You must have some kind of timestamp for ElastAlert to work. If your events are not in real time, you can use ``query_delay`` and ``buffer_time`` to adjust when ElastAlert will look for documents. + +### I'm using flatline but I don't see any alerts + +When using ``type: flatline``, ElastAlert must see at least one document before it will alert you that it has stopped seeing them. + +### How can I get a "resolve" event? + +ElastAlert does not currently support stateful alerts or resolve events. + +### Can I set a warning threshold? + +Currently, the only way to set a warning threshold is by creating a second rule with a lower threshold. + +## License + +ElastAlert is licensed under the Apache License, Version 2.0: http://www.apache.org/licenses/LICENSE-2.0 + +### Read the documentation at [Read the Docs](http://elastalert.readthedocs.org). + +### Questions? Drop by #elastalert on Freenode IRC. + +[0]: https://github.com/jertel/elastalert/blob/alt/README.md diff --git a/README.md b/README.md index fb2841426..694f03ebd 100644 --- a/README.md +++ b/README.md @@ -1,382 +1,44 @@ # Fork of yelp/elastalert -The original [yelp/elastalert](https://github.com/yelp/elastalert) repository has become mostly stale, with hundreds of open PRs and over 1000 open issues. The yelp team has acknowledged that they are winding down support of Elastalert. Consequently, it is difficult to merge fixes, dependency upgrades, and new features into Elastalert. Because of this, a fork of elastalert has been created. [jertel/elastalert](https://github.com/jertel/elastalert) will be an alternate repository for updates, until a new maintainer is appointed by the Yelp team and it's clear that the new maintainers are responding to PRs and issues. - -PRs are welcome, but must include tests, when possible. PRs will not be merged if they do not pass the automated CI workflows. - -The current status of the _alt_ branch CI workflow: - -![CI Workflow](https://github.com/jertel/elastalert/workflows/alt_build_test/badge.svg) - -If you're interested in a pre-built Docker image for either the official yelp/elastalert release, or for this fork, check out the [elastalert-docker](https://hub.docker.com/r/jertel/elastalert-docker) project on Docker Hub. - - -# Original README from yelp/elastalert follows... - -Recent changes: As of Elastalert 0.2.0, you must use Python 3.6. Python 2 will not longer be supported. - -[![Build Status](https://travis-ci.org/Yelp/elastalert.svg)](https://travis-ci.org/Yelp/elastalert) -[![Join the chat at https://gitter.im/Yelp/elastalert](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/Yelp/elastalert?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) - -## ElastAlert - [Read the Docs](http://elastalert.readthedocs.org). -### Easy & Flexible Alerting With Elasticsearch - -ElastAlert is a simple framework for alerting on anomalies, spikes, or other patterns of interest from data in Elasticsearch. - -ElastAlert works with all versions of Elasticsearch. - -At Yelp, we use Elasticsearch, Logstash and Kibana for managing our ever increasing amount of data and logs. -Kibana is great for visualizing and querying data, but we quickly realized that it needed a companion tool for alerting -on inconsistencies in our data. Out of this need, ElastAlert was created. - -If you have data being written into Elasticsearch in near real time and want to be alerted when that data matches certain patterns, ElastAlert is the tool for you. If you can see it in Kibana, ElastAlert can alert on it. - -## Overview - -We designed ElastAlert to be reliable, highly modular, and easy to set up and configure. - -It works by combining Elasticsearch with two types of components, rule types and alerts. -Elasticsearch is periodically queried and the data is passed to the rule type, which determines when -a match is found. When a match occurs, it is given to one or more alerts, which take action based on the match. - -This is configured by a set of rules, each of which defines a query, a rule type, and a set of alerts. - -Several rule types with common monitoring paradigms are included with ElastAlert: - -- Match where there are at least X events in Y time" (``frequency`` type) -- Match when the rate of events increases or decreases" (``spike`` type) -- Match when there are less than X events in Y time" (``flatline`` type) -- Match when a certain field matches a blacklist/whitelist" (``blacklist`` and ``whitelist`` type) -- Match on any event matching a given filter" (``any`` type) -- Match when a field has two different values within some time" (``change`` type) -- Match when a never before seen term appears in a field" (``new_term`` type) -- Match when the number of unique values for a field is above or below a threshold (``cardinality`` type) - -Currently, we have built-in support for the following alert types: - -- Command -- Email -- JIRA -- OpsGenie -- AWS SNS -- MS Teams -- Slack -- Mattermost -- Telegram -- GoogleChat -- PagerDuty -- PagerTree -- Exotel -- Twilio -- Splunk On-Call (Formerly VictorOps) -- Gitter -- ServiceNow -- Debug -- Stomp -- Alerta -- HTTP POST -- Line Notify -- TheHive -- Zabbix -- Discord -- Dingtalk -- Chatwork - -Additional rule types and alerts can be easily imported or written. - -In addition to this basic usage, there are many other features that make alerts more useful: - -- Alerts link to Kibana dashboards -- Aggregate counts for arbitrary fields -- Combine alerts into periodic reports -- Separate alerts by using a unique key field -- Intercept and enhance match data - -To get started, check out `Running ElastAlert For The First Time` in the [documentation](http://elastalert.readthedocs.org). - -## Running ElastAlert -You can either install the latest released version of ElastAlert using pip: - -```pip install elastalert``` - -or you can clone the ElastAlert repository for the most recent changes: - -```git clone https://github.com/Yelp/elastalert.git``` - -Install the module: - -```pip install "setuptools>=11.3"``` - -```python setup.py install``` - -The following invocation can be used to run ElastAlert after installing - -``$ elastalert [--debug] [--verbose] [--start ] [--end ] [--rule ] [--config ]`` - -``--debug`` will print additional information to the screen as well as suppresses alerts and instead prints the alert body. Not compatible with `--verbose`. - -``--verbose`` will print additional information without suppressing alerts. Not compatible with `--debug.` - -``--start`` will begin querying at the given timestamp. By default, ElastAlert will begin querying from the present. -Timestamp format is ``YYYY-MM-DDTHH-MM-SS[-/+HH:MM]`` (Note the T between date and hour). -Eg: ``--start 2014-09-26T12:00:00`` (UTC) or ``--start 2014-10-01T07:30:00-05:00`` - -``--end`` will cause ElastAlert to stop querying at the given timestamp. By default, ElastAlert will continue -to query indefinitely. - -``--rule`` will allow you to run only one rule. It must still be in the rules folder. -Eg: ``--rule this_rule.yaml`` - -``--config`` allows you to specify the location of the configuration. By default, it is will look for config.yaml in the current directory. - -## Third Party Tools And Extras -### Kibana plugin -![img](https://raw.githubusercontent.com/bitsensor/elastalert-kibana-plugin/master/showcase.gif) -Available at the [ElastAlert Kibana plugin repository](https://github.com/bitsensor/elastalert-kibana-plugin). - -### Docker -A [Dockerized version](https://github.com/bitsensor/elastalert) of ElastAlert including a REST api is build from `master` to `bitsensor/elastalert:latest`. - -```bash -git clone https://github.com/bitsensor/elastalert.git; cd elastalert -docker run -d -p 3030:3030 -p 3333:3333 \ - -v `pwd`/config/elastalert.yaml:/opt/elastalert/config.yaml \ - -v `pwd`/config/config.json:/opt/elastalert-server/config/config.json \ - -v `pwd`/rules:/opt/elastalert/rules \ - -v `pwd`/rule_templates:/opt/elastalert/rule_templates \ - --net="host" \ - --name elastalert bitsensor/elastalert:latest -``` - -### ElastAlert-Docker - -This Dockerfile will build a Docker image for Elastalert. This image is compatible with the accompanying Helm chart for Kubernetes. - -https://github.com/jertel/elastalert-docker - -### ElastAlert uses ElastAlert Helm Chart almost as it is *Use original Docker Image to fix bugs - -[ElastAlert Server Helm Chart](https://github.com/daichi703n/elastalert-helm) - -[Praeco Helm Chart](https://github.com/daichi703n/praeco-helm) - -[Installing Praeco (ElastAlert GUI) into Kubernetes with Helm](https://en-designetwork.daichi703n.com/entry/2020/02/24/praeco-helm-kubernetes) - -### Praeco -![Praeco screenshot](https://user-images.githubusercontent.com/611996/47752071-7c4a9080-dc61-11e8-8ccf-2196f13429b2.png) -[Praeco](https://github.com/johnsusek/praeco) is a free open source GUI for ElastAlert. +The original [yelp/elastalert][0] repository has become mostly stale, with hundreds of open PRs and +over 1000 open issues. The Yelp team has acknowledged that they are winding down support of +Elastalert. Consequently, it is difficult to merge fixes, dependency upgrades, and new features into +Elastalert. Because of this, a fork of Elastalert has been created. [jertel/elastalert][1] will be +an alternate repository for updates, until a new maintainer is appointed by the Yelp team and it's +clear that the new maintainers are responding to PRs and issues. ## Documentation -Read the documentation at [Read the Docs](http://elastalert.readthedocs.org). - -To build a html version of the docs locally - -``` -pip install sphinx_rtd_theme sphinx -cd docs -make html -``` - -View in browser at build/html/index.html - -## Configuration - -See config.yaml.example for details on configuration. - -## Example rules - -Examples of different types of rules can be found in example_rules/. +Updated Elastalert documentation that reflects the state of the _alt_ branch can be found [here][3]. +This is the place to start if you're not familiar with Elastalert at all. -- ``example_spike.yaml`` is an example of the "spike" rule type, which allows you to alert when the rate of events, averaged over a time period, -increases by a given factor. This example will send an email alert when there are 3 times more events matching a filter occurring within the -last 2 hours than the number of events in the previous 2 hours. +The full list of platforms that Elastalert can fire alerts into can be found [here][4] -- ``example_frequency.yaml`` is an example of the "frequency" rule type, which will alert when there are a given number of events occuring -within a time period. This example will send an email when 50 documents matching a given filter occur within a 4 hour timeframe. +The original README for Elastalert can be found [here][5]. Please note that this file is +not being actively maintained, and will probably grow less accurate over time. -- ``example_change.yaml`` is an example of the "change" rule type, which will alert when a certain field in two documents changes. In this example, -the alert email is sent when two documents with the same 'username' field but a different value of the 'country_name' field occur within 24 hours -of each other. +## Contributing -- ``example_new_term.yaml`` is an example of the "new term" rule type, which alerts when a new value appears in a field or fields. In this example, -an email is sent when a new value of ("username", "computer") is encountered in example login logs. +PRs are welcome, but must include tests, when possible. PRs will not be merged if they do not pass +the automated CI workflows. -## Frequently Asked Questions - -### My rule is not getting any hits? - -So you've managed to set up ElastAlert, write a rule, and run it, but nothing happens, or it says ``0 query hits``. First of all, we recommend using the command ``elastalert-test-rule rule.yaml`` to debug. It will show you how many documents match your filters for the last 24 hours (or more, see ``--help``), and then shows you if any alerts would have fired. If you have a filter in your rule, remove it and try again. This will show you if the index is correct and that you have at least some documents. If you have a filter in Kibana and want to recreate it in ElastAlert, you probably want to use a query string. Your filter will look like - -``` -filter: -- query: - query_string: - query: "foo: bar AND baz: abc*" -``` -If you receive an error that Elasticsearch is unable to parse it, it's likely the YAML is not spaced correctly, and the filter is not in the right format. If you are using other types of filters, like ``term``, a common pitfall is not realizing that you may need to use the analyzed token. This is the default if you are using Logstash. For example, - -``` -filter: -- term: - foo: "Test Document" -``` - -will not match even if the original value for ``foo`` was exactly "Test Document". Instead, you want to use ``foo.raw``. If you are still having trouble troubleshooting why your documents do not match, try running ElastAlert with ``--es_debug_trace /path/to/file.log``. This will log the queries made to Elasticsearch in full so that you can see exactly what is happening. - -### I got hits, why didn't I get an alert? - -If you got logs that had ``X query hits, 0 matches, 0 alerts sent``, it depends on the ``type`` why you didn't get any alerts. If ``type: any``, a match will occur for every hit. If you are using ``type: frequency``, ``num_events`` must occur within ``timeframe`` of each other for a match to occur. Different rules apply for different rule types. - -If you see ``X matches, 0 alerts sent``, this may occur for several reasons. If you set ``aggregation``, the alert will not be sent until after that time has elapsed. If you have gotten an alert for this same rule before, that rule may be silenced for a period of time. The default is one minute between alerts. If a rule is silenced, you will see ``Ignoring match for silenced rule`` in the logs. - -If you see ``X alerts sent`` but didn't get any alert, it's probably related to the alert configuration. If you are using the ``--debug`` flag, you will not receive any alerts. Instead, the alert text will be written to the console. Use ``--verbose`` to achieve the same affects without preventing alerts. If you are using email alert, make sure you have it configured for an SMTP server. By default, it will connect to localhost on port 25. It will also use the word "elastalert" as the "From:" address. Some SMTP servers will reject this because it does not have a domain while others will add their own domain automatically. See the email section in the documentation for how to configure this. - -### Why did I only get one alert when I expected to get several? - -There is a setting called ``realert`` which is the minimum time between two alerts for the same rule. Any alert that occurs within this time will simply be dropped. The default value for this is one minute. If you want to receive an alert for every single match, even if they occur right after each other, use - -``` -realert: - minutes: 0 -``` - -You can of course set it higher as well. - -### How can I prevent duplicate alerts? - -By setting ``realert``, you will prevent the same rule from alerting twice in an amount of time. - -``` -realert: - days: 1 -``` - -You can also prevent duplicates based on a certain field by using ``query_key``. For example, to prevent multiple alerts for the same user, you might use - -``` -realert: - hours: 8 -query_key: user -``` - -Note that this will also affect the way many rule types work. If you are using ``type: frequency`` for example, ``num_events`` for a single value of ``query_key`` must occur before an alert will be sent. You can also use a compound of multiple fields for this key. For example, if you only wanted to receieve an alert once for a specific error and hostname, you could use - -``` -query_key: [error, hostname] -``` - -Internally, this works by creating a new field for each document called ``field1,field2`` with a value of ``value1,value2`` and using that as the ``query_key``. - -The data for when an alert will fire again is stored in Elasticsearch in the ``elastalert_status`` index, with a ``_type`` of ``silence`` and also cached in memory. - -### How can I change what's in the alert? - -You can use the field ``alert_text`` to add custom text to an alert. By setting ``alert_text_type: alert_text_only`` Or ``alert_text_type: alert_text_jinja``, it will be the entirety of the alert. You can also add different fields from the alert: - -With ``alert_text_type: alert_text_jinja`` by using [Jinja2](https://pypi.org/project/Jinja2/) Template. - -``` -alert_text_type: alert_text_jinja - -alert_text: | - Alert triggered! *({{num_hits}} Matches!)* - Something happened with {{username}} ({{email}}) - {{description|truncate}} - -``` - -> Top fields are accessible via `{{field_name}}` or `{{_data['field_name']}}`, `_data` is useful when accessing *fields with dots in their keys*, as Jinja treat dot as a nested field. -> If `_data` conflicts with your top level data, use ``jinja_root_name`` to change its name. - -With ``alert_text_type: alert_text_only`` by using Python style string formatting and ``alert_text_args``. For example - -``` -alert_text: "Something happened with {0} at {1}" -alert_text_type: alert_text_only -alert_text_args: ["username", "@timestamp"] -``` - -You can also limit the alert to only containing certain fields from the document by using ``include``. - -``` -include: ["ip_address", "hostname", "status"] -``` - -### My alert only contains data for one event, how can I see more? - -If you are using ``type: frequency``, you can set the option ``attach_related: true`` and every document will be included in the alert. An alternative, which works for every type, is ``top_count_keys``. This will show the top counts for each value for certain fields. For example, if you have - -``` -top_count_keys: ["ip_address", "status"] -``` - -and 10 documents matched your alert, it may contain something like - -``` -ip_address: -127.0.0.1: 7 -10.0.0.1: 2 -192.168.0.1: 1 - -status: -200: 9 -500: 1 -``` - -### How can I make the alert come at a certain time? - -The ``aggregation`` feature will take every alert that has occured over a period of time and send them together in one alert. You can use cron style syntax to send all alerts that have occured since the last once by using - -``` -aggregation: - schedule: '2 4 * * mon,fri' -``` - -### I have lots of documents and it's really slow, how can I speed it up? - -There are several ways to potentially speed up queries. If you are using ``index: logstash-*``, Elasticsearch will query all shards, even if they do not possibly contain data with the correct timestamp. Instead, you can use Python time format strings and set ``use_strftime_index`` - -``` -index: logstash-%Y.%m -use_strftime_index: true -``` - -Another thing you could change is ``buffer_time``. By default, ElastAlert will query large overlapping windows in order to ensure that it does not miss any events, even if they are indexed in real time. In config.yaml, you can adjust ``buffer_time`` to a smaller number to only query the most recent few minutes. - -``` -buffer_time: - minutes: 5 -``` - -By default, ElastAlert will download every document in full before processing them. Instead, you can have ElastAlert simply get a count of the number of documents that have occured in between each query. To do this, set ``use_count_query: true``. This cannot be used if you use ``query_key``, because ElastAlert will not know the contents of each documents, just the total number of them. This also reduces the precision of alerts, because all events that occur between each query will be rounded to a single timestamp. - -If you are using ``query_key`` (a single key, not multiple keys) you can use ``use_terms_query``. This will make ElastAlert perform a terms aggregation to get the counts for each value of a certain field. Both ``use_terms_query`` and ``use_count_query`` also require ``doc_type`` to be set to the ``_type`` of the documents. They may not be compatible with all rule types. - -### Can I perform aggregations? - -The only aggregation supported currently is a terms aggregation, by setting ``use_terms_query``. - -### I'm not using @timestamp, what do I do? - -You can use ``timestamp_field`` to change which field ElastAlert will use as the timestamp. You can use ``timestamp_type`` to change it between ISO 8601 and unix timestamps. You must have some kind of timestamp for ElastAlert to work. If your events are not in real time, you can use ``query_delay`` and ``buffer_time`` to adjust when ElastAlert will look for documents. - -### I'm using flatline but I don't see any alerts - -When using ``type: flatline``, ElastAlert must see at least one document before it will alert you that it has stopped seeing them. - -### How can I get a "resolve" event? +The current status of the _alt_ branch CI workflow: -ElastAlert does not currently support stateful alerts or resolve events. +![CI Workflow](https://github.com/jertel/elastalert/workflows/alt_build_test/badge.svg) -### Can I set a warning threshold? +## Docker -Currently, the only way to set a warning threshold is by creating a second rule with a lower threshold. +If you're interested in a pre-built Docker image for either the official yelp/elastalert release, or +for this fork, check out the [elastalert-docker][2] project on Docker Hub. ## License -ElastAlert is licensed under the Apache License, Version 2.0: http://www.apache.org/licenses/LICENSE-2.0 - -### Read the documentation at [Read the Docs](http://elastalert.readthedocs.org). +Elastalert is licensed under the [Apache License, Version 2.0][6]. -### Questions? Drop by #elastalert on Freenode IRC. +[0]: https://github.com/yelp/elastalert +[1]: https://github.com/jertel/elastalert +[2]: https://hub.docker.com/r/jertel/elastalert-docker +[3]: https://elastalert2.readthedocs.io/ +[4]: https://elastalert2.readthedocs.io/en/latest/ruletypes.html#alerts +[5]: https://github.com/jertel/elastalert/blob/alt/README-old.md +[6]: http://www.apache.org/licenses/LICENSE-2 From 4f947691cbcdada0e3402d8f8f48426ac836ca92 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Tue, 2 Mar 2021 08:36:07 +0900 Subject: [PATCH 0100/1065] Bump coverage from 5.4 to 5.5 --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 6b0642e5b..d81c7ca25 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,5 @@ -r requirements.txt -coverage==5.4 +coverage==5.5 flake8 pluggy>=0.12.0 pre-commit From 2f36a0866d2a646a75710ef10c0310e72586c049 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Tue, 2 Mar 2021 08:03:09 +0000 Subject: [PATCH 0101/1065] Remove mention of new alert platforms from the old README As requested by the author of these alerters, @nsano-rururu --- README-old.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/README-old.md b/README-old.md index bbdf39d97..9902b4d7f 100644 --- a/README-old.md +++ b/README-old.md @@ -67,9 +67,6 @@ Currently, we have built-in support for the following alert types: - Line Notify - TheHive - Zabbix -- Discord -- Dingtalk -- Chatwork Additional rule types and alerts can be easily imported or written. From d3fd9bb3e9f4624822e73f1e7306c648ec4da2c8 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Tue, 2 Mar 2021 09:19:13 +0000 Subject: [PATCH 0102/1065] Migrate the FAQ to Read the Docs - Also add a short README on building the documentation - Make some tweaks to allow the generation of documentation from Markdown rather than ReStructured Text --- docs/README.md | 12 +++ docs/source/conf.py | 4 +- docs/source/index.rst | 1 + docs/source/recipes/faq.md | 181 ++++++++++++++++++++++++++++++++++++ docs/source/recipes/faq.rst | 6 ++ 5 files changed, 202 insertions(+), 2 deletions(-) create mode 100644 docs/README.md create mode 100644 docs/source/recipes/faq.md create mode 100644 docs/source/recipes/faq.rst diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 000000000..eab455249 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,12 @@ +# Documentation + +You can read this documentation at [Read The Docs][0]. + +To build a local version of these docs, the following from within the `/docs` directory: + +``` +pip install m2r2 sphinx_rtd_theme sphinx +make html +``` + +You can then view the generated HTML in from within the `build/` folder. diff --git a/docs/source/conf.py b/docs/source/conf.py index 80a76ed1a..58de3fddc 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -19,13 +19,13 @@ # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = [] +extensions = ["m2r2"] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ['.rst', '.md'] # The encoding of source files. # source_encoding = 'utf-8' diff --git a/docs/source/index.rst b/docs/source/index.rst index cf6d439c6..17b15631d 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -22,6 +22,7 @@ Contents: recipes/adding_enhancements recipes/adding_loaders recipes/signing_requests + recipes/faq Indices and Tables ================== diff --git a/docs/source/recipes/faq.md b/docs/source/recipes/faq.md new file mode 100644 index 000000000..8c2cbe635 --- /dev/null +++ b/docs/source/recipes/faq.md @@ -0,0 +1,181 @@ +My rule is not getting any hits? +========== + +So you've managed to set up ElastAlert, write a rule, and run it, but nothing happens, or it says ``0 query hits``. First of all, we recommend using the command ``elastalert-test-rule rule.yaml`` to debug. It will show you how many documents match your filters for the last 24 hours (or more, see ``--help``), and then shows you if any alerts would have fired. If you have a filter in your rule, remove it and try again. This will show you if the index is correct and that you have at least some documents. If you have a filter in Kibana and want to recreate it in ElastAlert, you probably want to use a query string. Your filter will look like + +``` +filter: +- query: + query_string: + query: "foo: bar AND baz: abc*" +``` +If you receive an error that Elasticsearch is unable to parse it, it's likely the YAML is not spaced correctly, and the filter is not in the right format. If you are using other types of filters, like ``term``, a common pitfall is not realizing that you may need to use the analyzed token. This is the default if you are using Logstash. For example, + +``` +filter: +- term: + foo: "Test Document" +``` + +will not match even if the original value for ``foo`` was exactly "Test Document". Instead, you want to use ``foo.raw``. If you are still having trouble troubleshooting why your documents do not match, try running ElastAlert with ``--es_debug_trace /path/to/file.log``. This will log the queries made to Elasticsearch in full so that you can see exactly what is happening. + +I got hits, why didn't I get an alert? +========== + +If you got logs that had ``X query hits, 0 matches, 0 alerts sent``, it depends on the ``type`` why you didn't get any alerts. If ``type: any``, a match will occur for every hit. If you are using ``type: frequency``, ``num_events`` must occur within ``timeframe`` of each other for a match to occur. Different rules apply for different rule types. + +If you see ``X matches, 0 alerts sent``, this may occur for several reasons. If you set ``aggregation``, the alert will not be sent until after that time has elapsed. If you have gotten an alert for this same rule before, that rule may be silenced for a period of time. The default is one minute between alerts. If a rule is silenced, you will see ``Ignoring match for silenced rule`` in the logs. + +If you see ``X alerts sent`` but didn't get any alert, it's probably related to the alert configuration. If you are using the ``--debug`` flag, you will not receive any alerts. Instead, the alert text will be written to the console. Use ``--verbose`` to achieve the same affects without preventing alerts. If you are using email alert, make sure you have it configured for an SMTP server. By default, it will connect to localhost on port 25. It will also use the word "elastalert" as the "From:" address. Some SMTP servers will reject this because it does not have a domain while others will add their own domain automatically. See the email section in the documentation for how to configure this. + +Why did I only get one alert when I expected to get several? +========== + +There is a setting called ``realert`` which is the minimum time between two alerts for the same rule. Any alert that occurs within this time will simply be dropped. The default value for this is one minute. If you want to receive an alert for every single match, even if they occur right after each other, use + +``` +realert: + minutes: 0 +``` + +You can of course set it higher as well. + +How can I prevent duplicate alerts? +========== + +By setting ``realert``, you will prevent the same rule from alerting twice in an amount of time. + +``` +realert: + days: 1 +``` + +You can also prevent duplicates based on a certain field by using ``query_key``. For example, to prevent multiple alerts for the same user, you might use + +``` +realert: + hours: 8 +query_key: user +``` + +Note that this will also affect the way many rule types work. If you are using ``type: frequency`` for example, ``num_events`` for a single value of ``query_key`` must occur before an alert will be sent. You can also use a compound of multiple fields for this key. For example, if you only wanted to receieve an alert once for a specific error and hostname, you could use + +``` +query_key: [error, hostname] +``` + +Internally, this works by creating a new field for each document called ``field1,field2`` with a value of ``value1,value2`` and using that as the ``query_key``. + +The data for when an alert will fire again is stored in Elasticsearch in the ``elastalert_status`` index, with a ``_type`` of ``silence`` and also cached in memory. + +How can I change what's in the alert? +========== + +You can use the field ``alert_text`` to add custom text to an alert. By setting ``alert_text_type: alert_text_only`` Or ``alert_text_type: alert_text_jinja``, it will be the entirety of the alert. You can also add different fields from the alert: + +With ``alert_text_type: alert_text_jinja`` by using [Jinja2](https://pypi.org/project/Jinja2/) Template. + +``` +alert_text_type: alert_text_jinja + +alert_text: | + Alert triggered! *({{num_hits}} Matches!)* + Something happened with {{username}} ({{email}}) + {{description|truncate}} + +``` + +> Top fields are accessible via `{{field_name}}` or `{{_data['field_name']}}`, `_data` is useful when accessing *fields with dots in their keys*, as Jinja treat dot as a nested field. +> If `_data` conflicts with your top level data, use ``jinja_root_name`` to change its name. + +With ``alert_text_type: alert_text_only`` by using Python style string formatting and ``alert_text_args``. For example + +``` +alert_text: "Something happened with {0} at {1}" +alert_text_type: alert_text_only +alert_text_args: ["username", "@timestamp"] +``` + +You can also limit the alert to only containing certain fields from the document by using ``include``. + +``` +include: ["ip_address", "hostname", "status"] +``` + +My alert only contains data for one event, how can I see more? +========== + +If you are using ``type: frequency``, you can set the option ``attach_related: true`` and every document will be included in the alert. An alternative, which works for every type, is ``top_count_keys``. This will show the top counts for each value for certain fields. For example, if you have + +``` +top_count_keys: ["ip_address", "status"] +``` + +and 10 documents matched your alert, it may contain something like + +``` +ip_address: +127.0.0.1: 7 +10.0.0.1: 2 +192.168.0.1: 1 + +status: +200: 9 +500: 1 +``` + +How can I make the alert come at a certain time? +========== + +The ``aggregation`` feature will take every alert that has occured over a period of time and send them together in one alert. You can use cron style syntax to send all alerts that have occured since the last once by using + +``` +aggregation: + schedule: '2 4 * * mon,fri' +``` + +I have lots of documents and it's really slow, how can I speed it up? +========== + +There are several ways to potentially speed up queries. If you are using ``index: logstash-*``, Elasticsearch will query all shards, even if they do not possibly contain data with the correct timestamp. Instead, you can use Python time format strings and set ``use_strftime_index`` + +``` +index: logstash-%Y.%m +use_strftime_index: true +``` + +Another thing you could change is ``buffer_time``. By default, ElastAlert will query large overlapping windows in order to ensure that it does not miss any events, even if they are indexed in real time. In config.yaml, you can adjust ``buffer_time`` to a smaller number to only query the most recent few minutes. + +``` +buffer_time: + minutes: 5 +``` + +By default, ElastAlert will download every document in full before processing them. Instead, you can have ElastAlert simply get a count of the number of documents that have occured in between each query. To do this, set ``use_count_query: true``. This cannot be used if you use ``query_key``, because ElastAlert will not know the contents of each documents, just the total number of them. This also reduces the precision of alerts, because all events that occur between each query will be rounded to a single timestamp. + +If you are using ``query_key`` (a single key, not multiple keys) you can use ``use_terms_query``. This will make ElastAlert perform a terms aggregation to get the counts for each value of a certain field. Both ``use_terms_query`` and ``use_count_query`` also require ``doc_type`` to be set to the ``_type`` of the documents. They may not be compatible with all rule types. + +Can I perform aggregations? +========== + +The only aggregation supported currently is a terms aggregation, by setting ``use_terms_query``. + +I'm not using @timestamp, what do I do? +========== + +You can use ``timestamp_field`` to change which field ElastAlert will use as the timestamp. You can use ``timestamp_type`` to change it between ISO 8601 and unix timestamps. You must have some kind of timestamp for ElastAlert to work. If your events are not in real time, you can use ``query_delay`` and ``buffer_time`` to adjust when ElastAlert will look for documents. + +I'm using flatline but I don't see any alerts +========== + +When using ``type: flatline``, ElastAlert must see at least one document before it will alert you that it has stopped seeing them. + +How can I get a "resolve" event? +========== + +ElastAlert does not currently support stateful alerts or resolve events. + +Can I set a warning threshold? +========== + +Currently, the only way to set a warning threshold is by creating a second rule with a lower threshold. diff --git a/docs/source/recipes/faq.rst b/docs/source/recipes/faq.rst new file mode 100644 index 000000000..ecbd5ad07 --- /dev/null +++ b/docs/source/recipes/faq.rst @@ -0,0 +1,6 @@ +.. _faq: + +Frequently Asked Questions +-------------------------- + +.. mdinclude:: faq.md From d701105e512597b1225426f03c0df7cb868e1362 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Tue, 2 Mar 2021 19:12:56 +0900 Subject: [PATCH 0103/1065] Docker test python 3.6 to 3.9 --- Dockerfile-test | 6 +++--- tox.ini | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Dockerfile-test b/Dockerfile-test index fb8a78409..2c04ca85c 100644 --- a/Dockerfile-test +++ b/Dockerfile-test @@ -1,9 +1,9 @@ FROM ubuntu:latest -RUN apt-get update && apt-get upgrade -y -RUN apt-get install software-properties-common -y +RUN apt update && apt upgrade -y +RUN apt install software-properties-common -y RUN add-apt-repository ppa:deadsnakes/ppa -RUN apt-get -y install build-essential python3.6 python3.6-dev python3-pip libssl-dev git +RUN apt -y install build-essential python3.9 python3.9-dev python3-pip libssl-dev git WORKDIR /home/elastalert diff --git a/tox.ini b/tox.ini index 71099e17c..f5e3369bc 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [tox] project = elastalert -envlist = py36,docs +envlist = py39,docs [testenv] deps = -rrequirements-dev.txt From 0e7d68cc472bf854b6105efb4fdfdfcbbfc65a51 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Tue, 2 Mar 2021 11:23:55 +0000 Subject: [PATCH 0104/1065] Add Markdown converter to dev dependency list --- requirements-dev.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements-dev.txt b/requirements-dev.txt index d81c7ca25..75ba1002b 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,6 +1,7 @@ -r requirements.txt coverage==5.5 flake8 +m2r2 pluggy>=0.12.0 pre-commit pylint<2.8 From 951c6c1a540d57b2db5c9bdca447229fa1966d60 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Tue, 2 Mar 2021 11:28:08 +0000 Subject: [PATCH 0105/1065] Format line lengths for readability --- docs/source/recipes/faq.md | 118 ++++++++++++++++++++++++++++--------- 1 file changed, 91 insertions(+), 27 deletions(-) diff --git a/docs/source/recipes/faq.md b/docs/source/recipes/faq.md index 8c2cbe635..86ba461a1 100644 --- a/docs/source/recipes/faq.md +++ b/docs/source/recipes/faq.md @@ -1,7 +1,13 @@ My rule is not getting any hits? ========== -So you've managed to set up ElastAlert, write a rule, and run it, but nothing happens, or it says ``0 query hits``. First of all, we recommend using the command ``elastalert-test-rule rule.yaml`` to debug. It will show you how many documents match your filters for the last 24 hours (or more, see ``--help``), and then shows you if any alerts would have fired. If you have a filter in your rule, remove it and try again. This will show you if the index is correct and that you have at least some documents. If you have a filter in Kibana and want to recreate it in ElastAlert, you probably want to use a query string. Your filter will look like +So you've managed to set up ElastAlert, write a rule, and run it, but nothing happens, or it says +``0 query hits``. First of all, we recommend using the command ``elastalert-test-rule rule.yaml`` to +debug. It will show you how many documents match your filters for the last 24 hours (or more, see +``--help``), and then shows you if any alerts would have fired. If you have a filter in your rule, +remove it and try again. This will show you if the index is correct and that you have at least some +documents. If you have a filter in Kibana and want to recreate it in ElastAlert, you probably want +to use a query string. Your filter will look like ``` filter: @@ -9,7 +15,10 @@ filter: query_string: query: "foo: bar AND baz: abc*" ``` -If you receive an error that Elasticsearch is unable to parse it, it's likely the YAML is not spaced correctly, and the filter is not in the right format. If you are using other types of filters, like ``term``, a common pitfall is not realizing that you may need to use the analyzed token. This is the default if you are using Logstash. For example, +If you receive an error that Elasticsearch is unable to parse it, it's likely the YAML is not spaced +correctly, and the filter is not in the right format. If you are using other types of filters, like +``term``, a common pitfall is not realizing that you may need to use the analyzed token. This is the +default if you are using Logstash. For example, ``` filter: @@ -17,21 +26,41 @@ filter: foo: "Test Document" ``` -will not match even if the original value for ``foo`` was exactly "Test Document". Instead, you want to use ``foo.raw``. If you are still having trouble troubleshooting why your documents do not match, try running ElastAlert with ``--es_debug_trace /path/to/file.log``. This will log the queries made to Elasticsearch in full so that you can see exactly what is happening. +will not match even if the original value for ``foo`` was exactly "Test Document". Instead, you want +to use ``foo.raw``. If you are still having trouble troubleshooting why your documents do not match, +try running ElastAlert with ``--es_debug_trace /path/to/file.log``. This will log the queries made +to Elasticsearch in full so that you can see exactly what is happening. I got hits, why didn't I get an alert? ========== -If you got logs that had ``X query hits, 0 matches, 0 alerts sent``, it depends on the ``type`` why you didn't get any alerts. If ``type: any``, a match will occur for every hit. If you are using ``type: frequency``, ``num_events`` must occur within ``timeframe`` of each other for a match to occur. Different rules apply for different rule types. - -If you see ``X matches, 0 alerts sent``, this may occur for several reasons. If you set ``aggregation``, the alert will not be sent until after that time has elapsed. If you have gotten an alert for this same rule before, that rule may be silenced for a period of time. The default is one minute between alerts. If a rule is silenced, you will see ``Ignoring match for silenced rule`` in the logs. - -If you see ``X alerts sent`` but didn't get any alert, it's probably related to the alert configuration. If you are using the ``--debug`` flag, you will not receive any alerts. Instead, the alert text will be written to the console. Use ``--verbose`` to achieve the same affects without preventing alerts. If you are using email alert, make sure you have it configured for an SMTP server. By default, it will connect to localhost on port 25. It will also use the word "elastalert" as the "From:" address. Some SMTP servers will reject this because it does not have a domain while others will add their own domain automatically. See the email section in the documentation for how to configure this. +If you got logs that had ``X query hits, 0 matches, 0 alerts sent``, it depends on the ``type`` why +you didn't get any alerts. If ``type: any``, a match will occur for every hit. If you are using +``type: frequency``, ``num_events`` must occur within ``timeframe`` of each other for a match to +occur. Different rules apply for different rule types. + +If you see ``X matches, 0 alerts sent``, this may occur for several reasons. If you set +``aggregation``, the alert will not be sent until after that time has elapsed. If you have gotten an +alert for this same rule before, that rule may be silenced for a period of time. The default is one +minute between alerts. If a rule is silenced, you will see ``Ignoring match for silenced rule`` in +the logs. + +If you see ``X alerts sent`` but didn't get any alert, it's probably related to the alert +configuration. If you are using the ``--debug`` flag, you will not receive any alerts. Instead, the +alert text will be written to the console. Use ``--verbose`` to achieve the same affects without +preventing alerts. If you are using email alert, make sure you have it configured for an SMTP +server. By default, it will connect to localhost on port 25. It will also use the word "elastalert" +as the "From:" address. Some SMTP servers will reject this because it does not have a domain while +others will add their own domain automatically. See the email section in the documentation for how +to configure this. Why did I only get one alert when I expected to get several? ========== -There is a setting called ``realert`` which is the minimum time between two alerts for the same rule. Any alert that occurs within this time will simply be dropped. The default value for this is one minute. If you want to receive an alert for every single match, even if they occur right after each other, use +There is a setting called ``realert`` which is the minimum time between two alerts for the same +rule. Any alert that occurs within this time will simply be dropped. The default value for this is +one minute. If you want to receive an alert for every single match, even if they occur right after +each other, use ``` realert: @@ -50,7 +79,8 @@ realert: days: 1 ``` -You can also prevent duplicates based on a certain field by using ``query_key``. For example, to prevent multiple alerts for the same user, you might use +You can also prevent duplicates based on a certain field by using ``query_key``. For example, to +prevent multiple alerts for the same user, you might use ``` realert: @@ -58,22 +88,30 @@ realert: query_key: user ``` -Note that this will also affect the way many rule types work. If you are using ``type: frequency`` for example, ``num_events`` for a single value of ``query_key`` must occur before an alert will be sent. You can also use a compound of multiple fields for this key. For example, if you only wanted to receieve an alert once for a specific error and hostname, you could use +Note that this will also affect the way many rule types work. If you are using ``type: frequency`` +for example, ``num_events`` for a single value of ``query_key`` must occur before an alert will be +sent. You can also use a compound of multiple fields for this key. For example, if you only wanted +to receieve an alert once for a specific error and hostname, you could use ``` query_key: [error, hostname] ``` -Internally, this works by creating a new field for each document called ``field1,field2`` with a value of ``value1,value2`` and using that as the ``query_key``. +Internally, this works by creating a new field for each document called ``field1,field2`` with a +value of ``value1,value2`` and using that as the ``query_key``. -The data for when an alert will fire again is stored in Elasticsearch in the ``elastalert_status`` index, with a ``_type`` of ``silence`` and also cached in memory. +The data for when an alert will fire again is stored in Elasticsearch in the ``elastalert_status`` +index, with a ``_type`` of ``silence`` and also cached in memory. How can I change what's in the alert? ========== -You can use the field ``alert_text`` to add custom text to an alert. By setting ``alert_text_type: alert_text_only`` Or ``alert_text_type: alert_text_jinja``, it will be the entirety of the alert. You can also add different fields from the alert: +You can use the field ``alert_text`` to add custom text to an alert. By setting ``alert_text_type: +alert_text_only`` Or ``alert_text_type: alert_text_jinja``, it will be the entirety of the alert. +You can also add different fields from the alert: -With ``alert_text_type: alert_text_jinja`` by using [Jinja2](https://pypi.org/project/Jinja2/) Template. +With ``alert_text_type: alert_text_jinja`` by using [Jinja2](https://pypi.org/project/Jinja2/) +Template. ``` alert_text_type: alert_text_jinja @@ -85,10 +123,12 @@ alert_text: | ``` -> Top fields are accessible via `{{field_name}}` or `{{_data['field_name']}}`, `_data` is useful when accessing *fields with dots in their keys*, as Jinja treat dot as a nested field. +> Top fields are accessible via `{{field_name}}` or `{{_data['field_name']}}`, `_data` is useful + when accessing *fields with dots in their keys*, as Jinja treat dot as a nested field. > If `_data` conflicts with your top level data, use ``jinja_root_name`` to change its name. -With ``alert_text_type: alert_text_only`` by using Python style string formatting and ``alert_text_args``. For example +With ``alert_text_type: alert_text_only`` by using Python style string formatting and +``alert_text_args``. For example ``` alert_text: "Something happened with {0} at {1}" @@ -96,7 +136,8 @@ alert_text_type: alert_text_only alert_text_args: ["username", "@timestamp"] ``` -You can also limit the alert to only containing certain fields from the document by using ``include``. +You can also limit the alert to only containing certain fields from the document by using +``include``. ``` include: ["ip_address", "hostname", "status"] @@ -105,7 +146,10 @@ include: ["ip_address", "hostname", "status"] My alert only contains data for one event, how can I see more? ========== -If you are using ``type: frequency``, you can set the option ``attach_related: true`` and every document will be included in the alert. An alternative, which works for every type, is ``top_count_keys``. This will show the top counts for each value for certain fields. For example, if you have +If you are using ``type: frequency``, you can set the option ``attach_related: true`` and every +document will be included in the alert. An alternative, which works for every type, is +``top_count_keys``. This will show the top counts for each value for certain fields. For example, if +you have ``` top_count_keys: ["ip_address", "status"] @@ -127,7 +171,9 @@ status: How can I make the alert come at a certain time? ========== -The ``aggregation`` feature will take every alert that has occured over a period of time and send them together in one alert. You can use cron style syntax to send all alerts that have occured since the last once by using +The ``aggregation`` feature will take every alert that has occured over a period of time and send +them together in one alert. You can use cron style syntax to send all alerts that have occured since +the last once by using ``` aggregation: @@ -137,23 +183,36 @@ aggregation: I have lots of documents and it's really slow, how can I speed it up? ========== -There are several ways to potentially speed up queries. If you are using ``index: logstash-*``, Elasticsearch will query all shards, even if they do not possibly contain data with the correct timestamp. Instead, you can use Python time format strings and set ``use_strftime_index`` +There are several ways to potentially speed up queries. If you are using ``index: logstash-*``, +Elasticsearch will query all shards, even if they do not possibly contain data with the correct +timestamp. Instead, you can use Python time format strings and set ``use_strftime_index`` ``` index: logstash-%Y.%m use_strftime_index: true ``` -Another thing you could change is ``buffer_time``. By default, ElastAlert will query large overlapping windows in order to ensure that it does not miss any events, even if they are indexed in real time. In config.yaml, you can adjust ``buffer_time`` to a smaller number to only query the most recent few minutes. +Another thing you could change is ``buffer_time``. By default, ElastAlert will query large +overlapping windows in order to ensure that it does not miss any events, even if they are indexed in +real time. In config.yaml, you can adjust ``buffer_time`` to a smaller number to only query the most +recent few minutes. ``` buffer_time: minutes: 5 ``` -By default, ElastAlert will download every document in full before processing them. Instead, you can have ElastAlert simply get a count of the number of documents that have occured in between each query. To do this, set ``use_count_query: true``. This cannot be used if you use ``query_key``, because ElastAlert will not know the contents of each documents, just the total number of them. This also reduces the precision of alerts, because all events that occur between each query will be rounded to a single timestamp. +By default, ElastAlert will download every document in full before processing them. Instead, you can +have ElastAlert simply get a count of the number of documents that have occured in between each +query. To do this, set ``use_count_query: true``. This cannot be used if you use ``query_key``, +because ElastAlert will not know the contents of each documents, just the total number of them. This +also reduces the precision of alerts, because all events that occur between each query will be +rounded to a single timestamp. -If you are using ``query_key`` (a single key, not multiple keys) you can use ``use_terms_query``. This will make ElastAlert perform a terms aggregation to get the counts for each value of a certain field. Both ``use_terms_query`` and ``use_count_query`` also require ``doc_type`` to be set to the ``_type`` of the documents. They may not be compatible with all rule types. +If you are using ``query_key`` (a single key, not multiple keys) you can use ``use_terms_query``. +This will make ElastAlert perform a terms aggregation to get the counts for each value of a certain +field. Both ``use_terms_query`` and ``use_count_query`` also require ``doc_type`` to be set to the +``_type`` of the documents. They may not be compatible with all rule types. Can I perform aggregations? ========== @@ -163,12 +222,16 @@ The only aggregation supported currently is a terms aggregation, by setting ``us I'm not using @timestamp, what do I do? ========== -You can use ``timestamp_field`` to change which field ElastAlert will use as the timestamp. You can use ``timestamp_type`` to change it between ISO 8601 and unix timestamps. You must have some kind of timestamp for ElastAlert to work. If your events are not in real time, you can use ``query_delay`` and ``buffer_time`` to adjust when ElastAlert will look for documents. +You can use ``timestamp_field`` to change which field ElastAlert will use as the timestamp. You can +use ``timestamp_type`` to change it between ISO 8601 and unix timestamps. You must have some kind of +timestamp for ElastAlert to work. If your events are not in real time, you can use ``query_delay`` +and ``buffer_time`` to adjust when ElastAlert will look for documents. I'm using flatline but I don't see any alerts ========== -When using ``type: flatline``, ElastAlert must see at least one document before it will alert you that it has stopped seeing them. +When using ``type: flatline``, ElastAlert must see at least one document before it will alert you +that it has stopped seeing them. How can I get a "resolve" event? ========== @@ -178,4 +241,5 @@ ElastAlert does not currently support stateful alerts or resolve events. Can I set a warning threshold? ========== -Currently, the only way to set a warning threshold is by creating a second rule with a lower threshold. +Currently, the only way to set a warning threshold is by creating a second rule with a lower +threshold. From 31f04f4610c8426350416b34ac9cfda194930424 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Tue, 2 Mar 2021 15:28:45 +0000 Subject: [PATCH 0106/1065] Update the docs configuration to work better with Markdown The ReStructured Text parser doesn't like the Markdown file having the same name as the .md, so I've renamed the .md, and excluded it from being built. --- docs/source/conf.py | 1 + docs/source/recipes/{faq.md => faq-md.md} | 5 +++-- docs/source/recipes/faq.rst | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) rename docs/source/recipes/{faq.md => faq-md.md} (98%) diff --git a/docs/source/conf.py b/docs/source/conf.py index 58de3fddc..4a7ac542b 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -62,6 +62,7 @@ # List of directories, relative to source directory, that shouldn't be searched # for source files. exclude_trees = [] +exclude_patterns = ['recipes/*.md'] # The reST default role (used for this markup: `text`) to use for all documents. # default_role = None diff --git a/docs/source/recipes/faq.md b/docs/source/recipes/faq-md.md similarity index 98% rename from docs/source/recipes/faq.md rename to docs/source/recipes/faq-md.md index 86ba461a1..15f87fbad 100644 --- a/docs/source/recipes/faq.md +++ b/docs/source/recipes/faq-md.md @@ -123,9 +123,9 @@ alert_text: | ``` -> Top fields are accessible via `{{field_name}}` or `{{_data['field_name']}}`, `_data` is useful +- Top fields are accessible via `{{field_name}}` or `{{_data['field_name']}}`, `_data` is useful when accessing *fields with dots in their keys*, as Jinja treat dot as a nested field. -> If `_data` conflicts with your top level data, use ``jinja_root_name`` to change its name. +- If `_data` conflicts with your top level data, use ``jinja_root_name`` to change its name. With ``alert_text_type: alert_text_only`` by using Python style string formatting and ``alert_text_args``. For example @@ -243,3 +243,4 @@ Can I set a warning threshold? Currently, the only way to set a warning threshold is by creating a second rule with a lower threshold. + diff --git a/docs/source/recipes/faq.rst b/docs/source/recipes/faq.rst index ecbd5ad07..34a632f69 100644 --- a/docs/source/recipes/faq.rst +++ b/docs/source/recipes/faq.rst @@ -3,4 +3,4 @@ Frequently Asked Questions -------------------------- -.. mdinclude:: faq.md +.. mdinclude:: faq-md.md From fb457532b974343b93a2dfeb75cd23d1fb83c1db Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Thu, 4 Mar 2021 21:54:16 +0900 Subject: [PATCH 0107/1065] Bump tox from 3.22.0 to 3.23.0 --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 75ba1002b..5ceb8762a 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -8,4 +8,4 @@ pylint<2.8 pytest<3.7.0 setuptools sphinx_rtd_theme -tox==3.22.0 +tox==3.23.0 From f95b9d8e4e30ba7aa25c43295e83e346f223b37d Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 14 Mar 2021 13:30:01 +0900 Subject: [PATCH 0108/1065] fix docs slack_timeout --- docs/source/ruletypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 1f99f22a1..5eb1efac3 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1808,7 +1808,7 @@ Provide absolute address of the pciture. ``slack_title_link``: You can add a link in your Slack notification by setting this to a valid URL. Requires slack_title to be set. -``slack_timeout``: You can specify a timeout value, in seconds, for making communicating with Slac. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. +``slack_timeout``: You can specify a timeout value, in seconds, for making communicating with Slack. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. ``slack_attach_kibana_discover_url``: Enables the attachment of the ``kibana_discover_url`` to the slack notification. The config ``generate_kibana_discover_url`` must also be ``True`` in order to generate the url. Defaults to ``False``. From e438760cde0a0274dd69373112f85020679195f9 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 14 Mar 2021 14:06:30 +0900 Subject: [PATCH 0109/1065] Remove unused items in alerta from schema.yaml alerta_new_style_string_format alerta_customer --- elastalert/schema.yaml | 4 +--- tests/alerts_test.py | 1 - 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 65f3dc3e7..656be87e0 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -352,14 +352,12 @@ properties: alerta_correlate: {type: array, items: {type: string}} # Python format string alerta_tags: {type: array, items: {type: string}} # Python format string alerta_event: {type: string} # Python format string - alerta_customer: {type: string} alerta_text: {type: string} # Python format string alerta_type: {type: string} alerta_value: {type: string} # Python format string alerta_attributes_keys: {type: array, items: {type: string}} alerta_attributes_values: {type: array, items: {type: string}} # Python format string - alerta_new_style_string_format: {type: boolean} - + ### Simple simple_webhook_url: *arrayOfString diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 11190fdd2..c0221ed1b 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -2220,7 +2220,6 @@ def test_alerta_new_style(ea): 'alerta_severity': "debug", 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", 'alerta_value': "UP", - 'alerta_new_style_string_format': True, 'type': 'any', 'alerta_use_match_timestamp': True, 'alert': 'alerta' From a33afd39d86203f5af04e50a9b9db5098dac4182 Mon Sep 17 00:00:00 2001 From: Naoyuki Sano Date: Sun, 14 Mar 2021 14:11:29 +0900 Subject: [PATCH 0110/1065] Update schema.yaml --- elastalert/schema.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 656be87e0..5e95ab865 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -357,7 +357,6 @@ properties: alerta_value: {type: string} # Python format string alerta_attributes_keys: {type: array, items: {type: string}} alerta_attributes_values: {type: array, items: {type: string}} # Python format string - ### Simple simple_webhook_url: *arrayOfString From cde1644e3724cd2a805406cb719e7db013ac1d85 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 15 Mar 2021 08:24:01 +0000 Subject: [PATCH 0111/1065] Fix search syntax on aggregations This brings in a change that's been applied on other forks (and opened on the main Elastalert repo but not merged): https://github.com/skillz/elastalert/pull/1 https://github.com/Yelp/elastalert/pull/2038 Without this fix, syntax errors are generated when document IDs that contain '-' characters are seen by alerts that use the `aggregation` field. --- elastalert/elastalert.py | 2 +- tests/base_test.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index e689619f7..57bd771cb 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -1748,7 +1748,7 @@ def get_aggregated_matches(self, _id): """ Removes and returns all matches from writeback_es that have aggregate_id == _id """ # XXX if there are more than self.max_aggregation matches, you have big alerts and we will leave entries in ES. - query = {'query': {'query_string': {'query': 'aggregate_id:%s' % (_id)}}, 'sort': {'@timestamp': 'asc'}} + query = {'query': {'query_string': {'query': 'aggregate_id:"%s"' % (_id)}}, 'sort': {'@timestamp': 'asc'}} matches = [] try: if self.writeback_es.is_atleastsixtwo(): diff --git a/tests/base_test.py b/tests/base_test.py index 92dc35f7e..81724e729 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -427,8 +427,8 @@ def test_agg_matchtime(ea): call4 = ea.writeback_es.deprecated_search.call_args_list[10][1]['body'] assert 'alert_time' in call2['filter']['range'] - assert call3['query']['query_string']['query'] == 'aggregate_id:ABCD' - assert call4['query']['query_string']['query'] == 'aggregate_id:CDEF' + assert call3['query']['query_string']['query'] == 'aggregate_id:"ABCD"' + assert call4['query']['query_string']['query'] == 'aggregate_id:"CDEF"' assert ea.writeback_es.deprecated_search.call_args_list[9][1]['size'] == 1337 From 172bd5f81713bed2f508cb931f56bacf41954b65 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 15 Mar 2021 08:35:11 +0000 Subject: [PATCH 0112/1065] Update unit tests to match new query structure --- tests/base_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/base_test.py b/tests/base_test.py index 81724e729..b86498b1d 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -596,8 +596,8 @@ def test_agg_with_aggregation_key(ea): call4 = ea.writeback_es.deprecated_search.call_args_list[10][1]['body'] assert 'alert_time' in call2['filter']['range'] - assert call3['query']['query_string']['query'] == 'aggregate_id:ABCD' - assert call4['query']['query_string']['query'] == 'aggregate_id:CDEF' + assert call3['query']['query_string']['query'] == 'aggregate_id:"ABCD"' + assert call4['query']['query_string']['query'] == 'aggregate_id:"CDEF"' assert ea.writeback_es.deprecated_search.call_args_list[9][1]['size'] == 1337 From 14cada534f16dca84783c4e3f4cf70f49a9426ca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 20 Mar 2021 05:39:52 +0000 Subject: [PATCH 0113/1065] Bump jinja2 from 2.10.1 to 2.11.3 Bumps [jinja2](https://github.com/pallets/jinja) from 2.10.1 to 2.11.3. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/master/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/2.10.1...2.11.3) Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 52a32fd56..99e3306e2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,7 +8,7 @@ croniter>=0.3.16 elasticsearch==7.0.0 envparse>=0.2.0 exotel>=0.1.3 -Jinja2==2.10.1 +Jinja2==2.11.3 jira>=2.0.0 jsonschema>=3.0.2 mock>=2.0.0 diff --git a/setup.py b/setup.py index 4754843a1..e0e320ee6 100644 --- a/setup.py +++ b/setup.py @@ -37,7 +37,7 @@ 'envparse>=0.2.0', 'exotel>=0.1.3', 'jira>=2.0.0', - 'Jinja2==2.10.1', + 'Jinja2==2.11.3', 'jsonschema>=3.0.2', 'mock>=2.0.0', 'prison>=0.1.2', From 0cfe1bf1bafa5d74e59a9620031048f96c1fe120 Mon Sep 17 00:00:00 2001 From: Alice Date: Sun, 21 Mar 2021 19:36:48 +0000 Subject: [PATCH 0114/1065] allow loading jinja templates from filesystem - and to extend templates --- elastalert/loaders.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index bee5d4754..cecdd8adb 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -9,6 +9,8 @@ import yaml import yaml.scanner from jinja2 import Template +from jinja2 import Environment +from jinja2 import FileSystemLoader from staticconf.loader import yaml_loader from . import alerts @@ -95,6 +97,8 @@ class RulesLoader(object): base_config = {} + jinja_environment = Environment(loader=FileSystemLoader("")) + def __init__(self, conf): # schema for rule yaml self.rule_schema = jsonschema.Draft7Validator( @@ -401,7 +405,11 @@ def _dt_to_ts_with_format(dt): # Compile Jinja Template if rule.get('alert_text_type') == 'alert_text_jinja': - rule["jinja_template"] = Template(str(rule.get('alert_text', ''))) + jinja_template_path = rule.get('jinja_template_path') + if jinja_template_path: + rule["jinja_template"] = self.jinja_environment.get_or_select_template(jinja_template_path) + else: + rule["jinja_template"] = Template(str(rule.get('alert_text', ''))) def load_modules(self, rule, args=None): """ Loads things that could be modules. Enhancements, alerts and rule type. """ From f15b8aa45680503c7537de44259ba01aa64987c5 Mon Sep 17 00:00:00 2001 From: Alice Date: Sun, 21 Mar 2021 20:30:49 +0000 Subject: [PATCH 0115/1065] update docs --- docs/source/elastalert.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 17dbf5b06..6b5bfe31f 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -220,6 +220,8 @@ The default value is ``.raw`` for Elasticsearch 2 and ``.keyword`` for Elasticse ``jinja_root_name``: When using a Jinja template, specify the name of the root field name in the template. The default is ``_data``. +``jinja_template_path``: When using a Jinja template, specify filesystem path to template, this overrides the default behaviour of using alert_text as the template. + Logging ------- From 24c6c982241a9ebdf2a8ce12cbae5f34bae6f1e4 Mon Sep 17 00:00:00 2001 From: Evan Lock Date: Fri, 2 Apr 2021 09:40:10 -0400 Subject: [PATCH 0116/1065] Add alert handler to create Datadog Events --- elastalert/alerts.py | 30 ++++++++++++++++++++++++++++++ elastalert/loaders.py | 3 ++- 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index f2f31853f..17f2e730a 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2184,3 +2184,33 @@ def get_info(self): 'type': 'hivealerter', 'hive_host': self.rule.get('hive_connection', {}).get('hive_host', '') } + +class DatadogAlerter(Alerter): + ''' Creates a Datadog Event for each alert ''' + required_options = frozenset(['datadog_api_key', 'datadog_app_key']) + + def __init__(self, rule): + super(DatadogAlerter, self).__init__(rule) + self.dd_api_key = self.rule.get('datadog_api_key', None) + self.dd_app_key = self.rule.get('datadog_app_key', None) + + def alert(self, matches): + url = 'https://api.datadoghq.com/api/v1/events' + headers = { + 'Content-Type': 'application/json', + 'DD-API-KEY': self.dd_api_key, + 'DD-APPLICATION-KEY': self.dd_app_key + } + payload = { + 'title': self.create_title(matches), + 'text': self.create_alert_body(matches) + } + try: + response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers) + response.raise_for_status() + except RequestException as e: + raise EAException('Error posting event to Datadog: %s' % e) + elastalert_logger.info('Alert sent to Datadog') + + def get_info(self): + return {'type': 'datadog'} diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 771194768..1aabd0034 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -77,7 +77,8 @@ class RulesLoader(object): 'servicenow': alerts.ServiceNowAlerter, 'alerta': alerts.AlertaAlerter, 'post': alerts.HTTPPostAlerter, - 'hivealerter': alerts.HiveAlerter + 'hivealerter': alerts.HiveAlerter, + 'datadog': alerts.DatadogAlerter } # A partial ordering of alert types. Relative order will be preserved in the resulting alerts list From 457501e2b0ef983cd0c613c29355f08019169647 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 4 Apr 2021 23:59:39 +0900 Subject: [PATCH 0117/1065] Kibana Discover support kibana 7.12 --- docs/source/ruletypes.rst | 4 ++-- elastalert/kibana_discover.py | 2 +- elastalert/schema.yaml | 2 +- tests/kibana_discover_test.py | 12 ++++++------ 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 5eb1efac3..253a9dbc7 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -555,9 +555,9 @@ The currently supported versions of Kibana Discover are: - `5.6` - `6.0`, `6.1`, `6.2`, `6.3`, `6.4`, `6.5`, `6.6`, `6.7`, `6.8` -- `7.0`, `7.1`, `7.2`, `7.3`, `7.4`, `7.5`, `7.6`, `7.7`, `7.8`, `7.9`, `7.10`, `7.11` +- `7.0`, `7.1`, `7.2`, `7.3`, `7.4`, `7.5`, `7.6`, `7.7`, `7.8`, `7.9`, `7.10`, `7.11`, `7.12` -``kibana_discover_version: '7.11'`` +``kibana_discover_version: '7.12'`` kibana_discover_index_pattern_id ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/elastalert/kibana_discover.py b/elastalert/kibana_discover.py index 0cbbc2116..58e3476f4 100644 --- a/elastalert/kibana_discover.py +++ b/elastalert/kibana_discover.py @@ -15,7 +15,7 @@ kibana_default_timedelta = datetime.timedelta(minutes=10) kibana5_kibana6_versions = frozenset(['5.6', '6.0', '6.1', '6.2', '6.3', '6.4', '6.5', '6.6', '6.7', '6.8']) -kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10', '7.11']) +kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10', '7.11', '7.12']) def generate_kibana_discover_url(rule, match): ''' Creates a link for a kibana discover app. ''' diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 5e95ab865..771fc99aa 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -219,7 +219,7 @@ properties: ### Kibana Discover App Link generate_kibana_discover_url: {type: boolean} kibana_discover_app_url: {type: string, format: uri} - kibana_discover_version: {type: string, enum: ['7.11', '7.10', '7.9', '7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} + kibana_discover_version: {type: string, enum: ['7.12', '7.11', '7.10', '7.9', '7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} kibana_discover_index_pattern_id: {type: string, minLength: 1} kibana_discover_columns: {type: array, items: {type: string, minLength: 1}, minItems: 1} kibana_discover_from_timedelta: *timedelta diff --git a/tests/kibana_discover_test.py b/tests/kibana_discover_test.py index 30191b89b..0e796e480 100644 --- a/tests/kibana_discover_test.py +++ b/tests/kibana_discover_test.py @@ -38,7 +38,7 @@ def test_generate_kibana_discover_url_with_kibana_5x_and_6x(kibana_version): assert url == expectedUrl -@pytest.mark.parametrize("kibana_version", ['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10', '7.11']) +@pytest.mark.parametrize("kibana_version", ['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10', '7.11', '7.12']) def test_generate_kibana_discover_url_with_kibana_7x(kibana_version): url = generate_kibana_discover_url( rule={ @@ -171,7 +171,7 @@ def test_generate_kibana_discover_url_with_from_timedelta(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.11', + 'kibana_discover_version': '7.12', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_from_timedelta': timedelta(hours=1), 'timestamp_field': 'timestamp' @@ -204,7 +204,7 @@ def test_generate_kibana_discover_url_with_from_timedelta_and_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.11', + 'kibana_discover_version': '7.12', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_from_timedelta': timedelta(hours=1), 'timeframe': timedelta(minutes=20), @@ -238,7 +238,7 @@ def test_generate_kibana_discover_url_with_to_timedelta(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.11', + 'kibana_discover_version': '7.12', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_to_timedelta': timedelta(hours=1), 'timestamp_field': 'timestamp' @@ -271,7 +271,7 @@ def test_generate_kibana_discover_url_with_to_timedelta_and_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.11', + 'kibana_discover_version': '7.12', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_to_timedelta': timedelta(hours=1), 'timeframe': timedelta(minutes=20), @@ -305,7 +305,7 @@ def test_generate_kibana_discover_url_with_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.11', + 'kibana_discover_version': '7.12', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'timeframe': timedelta(minutes=20), 'timestamp_field': 'timestamp' From ab397683c9b49fe98b43669b7a3b86e5984ceba8 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Mon, 12 Apr 2021 23:44:40 +0900 Subject: [PATCH 0118/1065] apscheduler>=3.3.0_to_>=3.3.0,<4.0 --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 99e3306e2..a865b3f47 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -apscheduler>=3.3.0 +apscheduler>=3.3.0,<4.0 aws-requests-auth>=0.3.0 sortedcontainers>=2.2.2 boto3>=1.4.4 diff --git a/setup.py b/setup.py index e0e320ee6..85a102c66 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ packages=find_packages(), package_data={'elastalert': ['schema.yaml', 'es_mappings/**/*.json']}, install_requires=[ - 'apscheduler>=3.3.0', + 'apscheduler>=3.3.0,<4.0', 'aws-requests-auth>=0.3.0', 'sortedcontainers>=2.2.2', 'boto3>=1.4.4', From 45237cc364e2cafc1859d12841b1bab11c810d9f Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Tue, 13 Apr 2021 23:45:08 +0900 Subject: [PATCH 0119/1065] Update sphinx and elasticsearch-py sphinx 1.6.6 to 3.5.4 elasticsearch 7.0.0 to >=7.0.0,<8.0.0 --- requirements.txt | 2 +- setup.py | 2 +- tox.ini | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements.txt b/requirements.txt index a865b3f47..67558b3de 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ boto3>=1.4.4 cffi>=1.11.5 configparser>=3.5.0 croniter>=0.3.16 -elasticsearch==7.0.0 +elasticsearch>=7.0.0,<8.0.0 envparse>=0.2.0 exotel>=0.1.3 Jinja2==2.11.3 diff --git a/setup.py b/setup.py index 85a102c66..cd2644983 100644 --- a/setup.py +++ b/setup.py @@ -33,7 +33,7 @@ 'boto3>=1.4.4', 'configparser>=3.5.0', 'croniter>=0.3.16', - 'elasticsearch==7.0.0', + 'elasticsearch>=7.0.0,<8.0.0', 'envparse>=0.2.0', 'exotel>=0.1.3', 'jira>=2.0.0', diff --git a/tox.ini b/tox.ini index f5e3369bc..47e62caa6 100644 --- a/tox.ini +++ b/tox.ini @@ -25,6 +25,6 @@ norecursedirs = .* virtualenv_run docs build venv env [testenv:docs] deps = {[testenv]deps} - sphinx==1.6.6 + sphinx==3.5.4 changedir = docs commands = sphinx-build -b html -d build/doctrees -W source build/html From 955f75742dd532069bbe230564e9fdc0757b0e57 Mon Sep 17 00:00:00 2001 From: "GIBSON, NICHOLAS R" Date: Fri, 16 Apr 2021 16:26:41 -0700 Subject: [PATCH 0120/1065] added optional Prometheus metrics endpoint --- README-old.md | 2 ++ elastalert/elastalert.py | 8 +++++ elastalert/prometheus_wrapper.py | 55 ++++++++++++++++++++++++++++++++ requirements.txt | 1 + setup.py | 1 + 5 files changed, 67 insertions(+) create mode 100644 elastalert/prometheus_wrapper.py diff --git a/README-old.md b/README-old.md index 9902b4d7f..982ebbc47 100644 --- a/README-old.md +++ b/README-old.md @@ -115,6 +115,8 @@ Eg: ``--rule this_rule.yaml`` ``--config`` allows you to specify the location of the configuration. By default, it is will look for config.yaml in the current directory. +``--prometheus_port`` exposes ElastAlert Prometheus metrics on the specified port. Prometheus metrics disabled by default. + ## Third Party Tools And Extras ### Kibana plugin ![img](https://raw.githubusercontent.com/bitsensor/elastalert-kibana-plugin/master/showcase.gif) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 57bd771cb..c7509a7c1 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -25,6 +25,7 @@ from elasticsearch.exceptions import ElasticsearchException from elasticsearch.exceptions import NotFoundError from elasticsearch.exceptions import TransportError +from .prometheus_wrapper import PrometheusWrapper from . import kibana from .alerts import DebugAlerter @@ -111,6 +112,7 @@ def parse_args(self, args): dest='es_debug_trace', help='Enable logging from Elasticsearch queries as curl command. Queries will be logged to file. Note that ' 'this will incorrectly display localhost:9200 as the host/port') + parser.add_argument('--prometheus_port', type=int, dest='prometheus_port', help='Enables Prometheus metrics on specified port.') self.args = parser.parse_args(args) def __init__(self, args): @@ -171,6 +173,7 @@ def __init__(self, args): self.scheduler = BackgroundScheduler() self.string_multi_field_name = self.conf.get('string_multi_field_name', False) self.add_metadata_alert = self.conf.get('add_metadata_alert', False) + self.prometheus_port = self.args.prometheus_port self.show_disabled_rules = self.conf.get('show_disabled_rules', True) self.writeback_es = elasticsearch_client(self.conf) @@ -2076,6 +2079,11 @@ def main(args=None): if not args: args = sys.argv[1:] client = ElastAlerter(args) + + if client.prometheus_port and not client.debug: + p = PrometheusWrapper(client) + p.start() + if not client.args.silence: client.start() diff --git a/elastalert/prometheus_wrapper.py b/elastalert/prometheus_wrapper.py new file mode 100644 index 000000000..b3d0758b2 --- /dev/null +++ b/elastalert/prometheus_wrapper.py @@ -0,0 +1,55 @@ +import prometheus_client + + +class PrometheusWrapper: + """ Exposes ElastAlert metrics on a Prometheus metrics endpoint. + Wraps ElastAlerter run_rule and writeback to collect metrics. """ + + def __init__(self, client): + self.prometheus_port = client.prometheus_port + self.run_rule = client.run_rule + self.writeback = client.writeback + + client.run_rule = self.metrics_run_rule + client.writeback = self.metrics_writeback + + # initialize prometheus metrics to be exposed + self.prom_scrapes = prometheus_client.Counter('elastalert_scrapes', 'Number of scrapes for rule', ['rule_name']) + self.prom_hits = prometheus_client.Counter('elastalert_hits', 'Number of hits for rule', ['rule_name']) + self.prom_matches = prometheus_client.Counter('elastalert_matches', 'Number of matches for rule', ['rule_name']) + self.prom_time_taken = prometheus_client.Counter('elastalert_time_taken', 'Time taken to evaluate rule', ['rule_name']) + self.prom_alerts_sent = prometheus_client.Counter('elastalert_alerts_sent', 'Number of alerts sent for rule', ['rule_name']) + self.prom_alerts_not_sent = prometheus_client.Counter('elastalert_alerts_not_sent', 'Number of alerts not sent', ['rule_name']) + self.prom_errors = prometheus_client.Counter('elastalert_errors', 'Number of errors for rule') + self.prom_alerts_silenced = prometheus_client.Counter('elastalert_alerts_silenced', 'Number of silenced alerts', ['rule_name']) + + def start(self): + prometheus_client.start_http_server(self.prometheus_port) + + def metrics_run_rule(self, rule, endtime, starttime=None): + """ Increment counter every time rule is run """ + try: + self.prom_scrapes.labels(rule['name']).inc() + finally: + return self.run_rule(rule, endtime, starttime) + + def metrics_writeback(self, doc_type, body): + """ Update various prometheus metrics accoording to the doc_type """ + + res = self.writeback(doc_type, body) + try: + if doc_type == 'elastalert_status': + self.prom_hits.labels(body['rule_name']).inc(int(body['hits'])) + self.prom_matches.labels(body['rule_name']).inc(int(body['matches'])) + self.prom_time_taken.labels(body['rule_name']).inc(float(body['time_taken'])) + elif doc_type == 'elastalert': + if body['alert_sent']: + self.prom_alerts_sent.labels(body['rule_name']).inc() + else: + self.prom_alerts_not_sent.labels(body['rule_name']).inc() + elif doc_type == 'elastalert_error': + self.prom_errors.inc() + elif doc_type == 'silence': + self.prom_alerts_silenced.labels(body['rule_name']).inc() + finally: + return res \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 67558b3de..c35de8e21 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,6 +13,7 @@ jira>=2.0.0 jsonschema>=3.0.2 mock>=2.0.0 prison>=0.1.2 +prometheus_client>=0.10.1 py-zabbix>=1.1.3 PyStaticConfiguration>=0.10.3 python-dateutil>=2.6.0,<2.7.0 diff --git a/setup.py b/setup.py index cd2644983..5abdd4360 100644 --- a/setup.py +++ b/setup.py @@ -41,6 +41,7 @@ 'jsonschema>=3.0.2', 'mock>=2.0.0', 'prison>=0.1.2', + 'prometheus_client>=0.10.1', 'py-zabbix>=1.1.3', 'PyStaticConfiguration>=0.10.3', 'python-dateutil>=2.6.0,<2.7.0', From 606f89e2cb8c8ca0685e803bbbcf33aebbb99f3f Mon Sep 17 00:00:00 2001 From: "GIBSON, NICHOLAS R" Date: Mon, 19 Apr 2021 10:14:49 -0700 Subject: [PATCH 0121/1065] fix build issues --- elastalert/elastalert.py | 2 +- elastalert/prometheus_wrapper.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index c7509a7c1..dc910c6b0 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -2079,7 +2079,7 @@ def main(args=None): if not args: args = sys.argv[1:] client = ElastAlerter(args) - + if client.prometheus_port and not client.debug: p = PrometheusWrapper(client) p.start() diff --git a/elastalert/prometheus_wrapper.py b/elastalert/prometheus_wrapper.py index b3d0758b2..d94a35200 100644 --- a/elastalert/prometheus_wrapper.py +++ b/elastalert/prometheus_wrapper.py @@ -52,4 +52,4 @@ def metrics_writeback(self, doc_type, body): elif doc_type == 'silence': self.prom_alerts_silenced.labels(body['rule_name']).inc() finally: - return res \ No newline at end of file + return res From 63a7cc41d3a42f939556244a48608062aa20ddbe Mon Sep 17 00:00:00 2001 From: just1900 Date: Wed, 31 Mar 2021 10:42:45 +0800 Subject: [PATCH 0122/1065] fix compound query key in metric aggregation --- elastalert/ruletypes.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index c05912753..575607548 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -1122,7 +1122,13 @@ def check_matches_recursive(self, timestamp, query_key, aggregation_data, compou result, compound_keys[1:], match_data) - + elif 'interval_aggs' in aggregation_data: + for result in aggregation_data['interval_aggs']['buckets']: + self.check_matches_recursive(timestamp, + query_key, + result, + compound_keys[1:], + match_data) else: metric_val = aggregation_data[self.metric_key]['value'] if self.crossed_thresholds(metric_val): From 3d0a285a509e35589cabfe751be9b6f30565d106 Mon Sep 17 00:00:00 2001 From: just1900 Date: Thu, 22 Apr 2021 23:07:14 +0800 Subject: [PATCH 0123/1065] Unit Test added for metric_aggregation with complex query key and bucket interval --- elastalert/ruletypes.py | 30 +++++++++++++----------------- tests/rules_test.py | 35 +++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 17 deletions(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 575607548..39719739c 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -1122,24 +1122,20 @@ def check_matches_recursive(self, timestamp, query_key, aggregation_data, compou result, compound_keys[1:], match_data) - elif 'interval_aggs' in aggregation_data: - for result in aggregation_data['interval_aggs']['buckets']: - self.check_matches_recursive(timestamp, - query_key, - result, - compound_keys[1:], - match_data) else: - metric_val = aggregation_data[self.metric_key]['value'] - if self.crossed_thresholds(metric_val): - match_data[self.rules['timestamp_field']] = timestamp - match_data[self.metric_key] = metric_val - - # add compound key to payload to allow alerts to trigger for every unique occurence - compound_value = [match_data[key] for key in self.rules['compound_query_key']] - match_data[self.rules['query_key']] = ",".join([str(value) for value in compound_value]) - - self.add_match(match_data) + if 'interval_aggs' in aggregation_data: + metric_val_arr = [term[self.metric_key]['value'] for term in aggregation_data['interval_aggs']['buckets']] + else: + metric_val_arr = [aggregation_data[self.metric_key]['value']] + for metric_val in metric_val_arr: + if self.crossed_thresholds(metric_val): + match_data[self.rules['timestamp_field']] = timestamp + match_data[self.metric_key] = metric_val + + # add compound key to payload to allow alerts to trigger for every unique occurence + compound_value = [match_data[key] for key in self.rules['compound_query_key']] + match_data[self.rules['query_key']] = ",".join([str(value) for value in compound_value]) + self.add_match(match_data) def crossed_thresholds(self, metric_value): if metric_value is None: diff --git a/tests/rules_test.py b/tests/rules_test.py index 1954b5d54..5f6421d87 100644 --- a/tests/rules_test.py +++ b/tests/rules_test.py @@ -1184,6 +1184,41 @@ def test_metric_aggregation_complex_query_key(): assert rule.matches[1]['sub_qk'] == 'sub_qk_val2' +def test_metric_aggregation_complex_query_key_bucket_interval(): + rules = {'buffer_time': datetime.timedelta(minutes=5), + 'timestamp_field': '@timestamp', + 'metric_agg_type': 'avg', + 'metric_agg_key': 'cpu_pct', + 'bucket_interval': {'minutes': 1}, + 'bucket_interval_timedelta': datetime.timedelta(minutes=1), + 'compound_query_key': ['qk', 'sub_qk'], + 'query_key': 'qk,sub_qk', + 'max_threshold': 0.8} + + # Quoted from https://elastalert.readthedocs.io/en/latest/ruletypes.html#metric-aggregation + # bucket_interval: If present this will divide the metric calculation window into bucket_interval sized segments. + # The metric value will be calculated and evaluated against the threshold(s) for each segment. + interval_aggs = {"interval_aggs": {"buckets": [ + {"metric_cpu_pct_avg": {"value": 0.91}, "key": "1617156690000"}, + {"metric_cpu_pct_avg": {"value": 0.89}, "key": "1617156750000"}, + {"metric_cpu_pct_avg": {"value": 0.78}, "key": "1617156810000"}, + {"metric_cpu_pct_avg": {"value": 0.85}, "key": "1617156870000"}, + {"metric_cpu_pct_avg": {"value": 0.86}, "key": "1617156930000"}, + ]}, "key": "sub_qk_val1"} + + query = {"bucket_aggs": {"buckets": [ + interval_aggs + ]}, "key": "qk_val"} + + rule = MetricAggregationRule(rules) + rule.check_matches(datetime.datetime.now(), 'qk_val', query) + assert len(rule.matches) == 4 + assert rule.matches[0]['qk'] == 'qk_val' + assert rule.matches[1]['qk'] == 'qk_val' + assert rule.matches[0]['sub_qk'] == 'sub_qk_val1' + assert rule.matches[1]['sub_qk'] == 'sub_qk_val1' + + def test_percentage_match(): rules = {'match_bucket_filter': {'term': 'term_val'}, 'buffer_time': datetime.timedelta(minutes=5), From d348a2f55fe3170323e8800172c05726b85de1e8 Mon Sep 17 00:00:00 2001 From: Evan Lock Date: Fri, 23 Apr 2021 14:02:05 -0400 Subject: [PATCH 0124/1065] Add documentation for alert type --- docs/source/ruletypes.rst | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 253a9dbc7..62e5a4c6b 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1489,6 +1489,18 @@ Example usage using new-style format:: command: ["/bin/send_alert", "--username", "{match[username]}"] +Datadog +~~~~~~~ + +This alert will create a [Datadog Event](https://docs.datadoghq.com/events/). Events are limited to 4000 characters. If an event is sent that contains +a message that is longer than 4000 characters, only his first 4000 characters will be displayed. + +This alert requires two additional options: + +``datadog_api_key``: [Datadog API key](https://docs.datadoghq.com/account_management/api-app-keys/#api-keys) + +``datadog_app_key``: [Datadog application key](https://docs.datadoghq.com/account_management/api-app-keys/#application-keys) + Email ~~~~~ From bb141fdc253214cb74aa77e11cc9ee58ec541802 Mon Sep 17 00:00:00 2001 From: Evan Lock Date: Fri, 23 Apr 2021 15:46:31 -0400 Subject: [PATCH 0125/1065] Add test for Datadog alert --- tests/alerts_test.py | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/tests/alerts_test.py b/tests/alerts_test.py index c0221ed1b..f66f08729 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -12,6 +12,7 @@ from elastalert.alerts import Alerter from elastalert.alerts import BasicMatchString from elastalert.alerts import CommandAlerter +from elastalert.alerts import DatadogAlerter from elastalert.alerts import EmailAlerter from elastalert.alerts import HTTPPostAlerter from elastalert.alerts import JiraAlerter @@ -2296,3 +2297,38 @@ def test_alert_subject_size_limit_with_args(ea): alert = Alerter(rule) alertSubject = alert.create_custom_title([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) assert 6 == len(alertSubject) + +def test_datadog_alerter(): + rule = { + 'name': 'Test Datadog Event Alerter', + 'type': 'any', + 'datadog_api_key': 'test-api-key', + 'datadog_app_key': 'test-app-key', + 'alert': [], + 'alert_subject': 'Test Datadog Event Alert' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DatadogAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'name': 'datadog-test-name' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'title': rule['alert_subject'], + 'text': "Test Datadog Event Alerter\n\n@timestamp: 2021-01-01T00:00:00\nname: datadog-test-name\n" + } + mock_post_request.assert_called_once_with( + "https://api.datadoghq.com/api/v1/events", + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'DD-API-KEY': rule['datadog_api_key'], + 'DD-APPLICATION-KEY': rule['datadog_app_key'] + } + ) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data From a6e8673a1049d2223c2f86f96b3b225a958d9112 Mon Sep 17 00:00:00 2001 From: Evan Lock Date: Fri, 23 Apr 2021 15:59:06 -0400 Subject: [PATCH 0126/1065] Correct linting errors --- elastalert/alerts.py | 1 + tests/alerts_test.py | 1 + 2 files changed, 2 insertions(+) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 26d19008b..db3988442 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2232,6 +2232,7 @@ def get_info(self): "chatwork_room_id": self.chatwork_room_id } + class DatadogAlerter(Alerter): ''' Creates a Datadog Event for each alert ''' required_options = frozenset(['datadog_api_key', 'datadog_app_key']) diff --git a/tests/alerts_test.py b/tests/alerts_test.py index f66f08729..4f56ad1a7 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -2298,6 +2298,7 @@ def test_alert_subject_size_limit_with_args(ea): alertSubject = alert.create_custom_title([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) assert 6 == len(alertSubject) + def test_datadog_alerter(): rule = { 'name': 'Test Datadog Event Alerter', From 12cba14c95ad64db8e69462c6754d16990c9db49 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 13:04:34 -0400 Subject: [PATCH 0127/1065] Update for Elastalert 2 --- README.md | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 694f03ebd..2539d3823 100644 --- a/README.md +++ b/README.md @@ -1,18 +1,15 @@ -# Fork of yelp/elastalert +# Elastalert 2 -The original [yelp/elastalert][0] repository has become mostly stale, with hundreds of open PRs and -over 1000 open issues. The Yelp team has acknowledged that they are winding down support of -Elastalert. Consequently, it is difficult to merge fixes, dependency upgrades, and new features into -Elastalert. Because of this, a fork of Elastalert has been created. [jertel/elastalert][1] will be -an alternate repository for updates, until a new maintainer is appointed by the Yelp team and it's -clear that the new maintainers are responding to PRs and issues. +Elastalert 2 is the supported fork of [Elastalert][0], which had been maintained by the Yelp team +but become mostly stale when the Yelp team ceased using Elastalert. + +Elastalert 2 is backwards compatible with the original Elastalert rules. ## Documentation -Updated Elastalert documentation that reflects the state of the _alt_ branch can be found [here][3]. -This is the place to start if you're not familiar with Elastalert at all. +Documentation for Elastalert 2 can be found on [readthedocs.com][3]. This is the place to start if you're not familiar with Elastalert at all. -The full list of platforms that Elastalert can fire alerts into can be found [here][4] +The full list of platforms that Elastalert can fire alerts into can be found [in the documentation][4]. The original README for Elastalert can be found [here][5]. Please note that this file is not being actively maintained, and will probably grow less accurate over time. @@ -22,14 +19,14 @@ not being actively maintained, and will probably grow less accurate over time. PRs are welcome, but must include tests, when possible. PRs will not be merged if they do not pass the automated CI workflows. -The current status of the _alt_ branch CI workflow: +The current status of the CI workflow: -![CI Workflow](https://github.com/jertel/elastalert/workflows/alt_build_test/badge.svg) +![CI Workflow](https://github.com/jertel/elastalert/workflows/master_build_test/badge.svg) ## Docker If you're interested in a pre-built Docker image for either the official yelp/elastalert release, or -for this fork, check out the [elastalert-docker][2] project on Docker Hub. +for this fork, check out the [elastalert2][2] project on Docker Hub. ## License @@ -37,7 +34,7 @@ Elastalert is licensed under the [Apache License, Version 2.0][6]. [0]: https://github.com/yelp/elastalert [1]: https://github.com/jertel/elastalert -[2]: https://hub.docker.com/r/jertel/elastalert-docker +[2]: https://hub.docker.com/r/jertel/elastalert2 [3]: https://elastalert2.readthedocs.io/ [4]: https://elastalert2.readthedocs.io/en/latest/ruletypes.html#alerts [5]: https://github.com/jertel/elastalert/blob/alt/README-old.md From 9df402fc1b2658230354e18922b36d5b88792f24 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 13:07:44 -0400 Subject: [PATCH 0128/1065] Switch action to use master branch --- .../{alt_build_test.yml => master_build_test.yml} | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) rename .github/workflows/{alt_build_test.yml => master_build_test.yml} (86%) diff --git a/.github/workflows/alt_build_test.yml b/.github/workflows/master_build_test.yml similarity index 86% rename from .github/workflows/alt_build_test.yml rename to .github/workflows/master_build_test.yml index 7dab4f1c0..8ab9d01a5 100644 --- a/.github/workflows/alt_build_test.yml +++ b/.github/workflows/master_build_test.yml @@ -1,14 +1,12 @@ -# This is a basic workflow to help you get started with Actions - -name: alt_build_test +name: master_build_test # Controls when the action will run. on: # Triggers the workflow on push or pull request events but only for the master branch push: - branches: [ alt ] + branches: [ master ] pull_request: - branches: [ alt ] + branches: [ master ] # Allows you to run this workflow manually from the Actions tab workflow_dispatch: From cef5e2b328458933a08b97903b30110ade7ae75c Mon Sep 17 00:00:00 2001 From: Elad Amit Date: Sat, 24 Apr 2021 20:08:45 +0300 Subject: [PATCH 0129/1065] updated the import directive docs --- docs/source/ruletypes.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index bb6ca2b49..2e26b0ca0 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -249,8 +249,8 @@ import ``import``: If specified includes all the settings from this yaml file. This allows common config options to be shared. Note that imported files that aren't complete rules should not have a ``.yml`` or ``.yaml`` suffix so that ElastAlert doesn't treat them as rules. Filters in imported files are merged (ANDed) -with any filters in the rule. You can only have one import per rule, though the imported file can import another file, recursively. The filename -can be an absolute path or relative to the rules directory. (Optional, string, no default) +with any filters in the rule. You can only have one import per rule, though the imported file can import another file or multiple files, recursively. +The filename can be an absolute path or relative to the rules directory. (Optional, string or array of strings, no default) use_ssl ^^^^^^^ From 7c1cff44893152ff0400ea61e3fdc317c5f609c2 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 14:27:24 -0400 Subject: [PATCH 0130/1065] Merge elastalert-docker project; update README --- .github/workflows/release.yml | 19 + .secrets.baseline | 27 -- .travis.yml | 42 -- Dockerfile | 36 ++ README-old.md | 372 ----------------- README.md | 56 ++- changelog.md | 383 ------------------ chart/elastalert2/.helmignore | 21 + chart/elastalert2/Chart.yaml | 12 + chart/elastalert2/README.md | 90 ++++ chart/elastalert2/templates/NOTES.txt | 1 + chart/elastalert2/templates/_helpers.tpl | 36 ++ chart/elastalert2/templates/deployment.yaml | 124 ++++++ .../templates/podsecuritypolicy.yaml | 39 ++ chart/elastalert2/templates/role.yaml | 20 + chart/elastalert2/templates/rolebinding.yaml | 18 + chart/elastalert2/templates/rules.yaml | 14 + .../elastalert2/templates/serviceaccount.yaml | 15 + chart/elastalert2/values.yaml | 228 +++++++++++ setup.py | 10 +- 20 files changed, 724 insertions(+), 839 deletions(-) create mode 100644 .github/workflows/release.yml delete mode 100644 .secrets.baseline delete mode 100644 .travis.yml create mode 100644 Dockerfile delete mode 100644 README-old.md delete mode 100644 changelog.md create mode 100644 chart/elastalert2/.helmignore create mode 100644 chart/elastalert2/Chart.yaml create mode 100644 chart/elastalert2/README.md create mode 100644 chart/elastalert2/templates/NOTES.txt create mode 100644 chart/elastalert2/templates/_helpers.tpl create mode 100644 chart/elastalert2/templates/deployment.yaml create mode 100644 chart/elastalert2/templates/podsecuritypolicy.yaml create mode 100644 chart/elastalert2/templates/role.yaml create mode 100644 chart/elastalert2/templates/rolebinding.yaml create mode 100644 chart/elastalert2/templates/rules.yaml create mode 100644 chart/elastalert2/templates/serviceaccount.yaml create mode 100644 chart/elastalert2/values.yaml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 000000000..5f51356b8 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,19 @@ +name: upload-chart + +on: + release: + types: [published] + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Helm push chart + env: + HELM_REPO_ACCESS_TOKEN: ${{ secrets.HELM_REPO_ACCESS_TOKEN }} + run: | + wget https://get.helm.sh/helm-v3.2.1-linux-amd64.tar.gz + tar xf helm-v3.2.1-linux-amd64.tar.gz + linux-amd64/helm plugin install https://github.com/chartmuseum/helm-push + linux-amd64/helm push chart/elastalert2 https://charts.banzaicloud.io/gh/Codesim-LLC diff --git a/.secrets.baseline b/.secrets.baseline deleted file mode 100644 index b4405a48d..000000000 --- a/.secrets.baseline +++ /dev/null @@ -1,27 +0,0 @@ -{ - "exclude_regex": ".*tests/.*|.*yelp/testing/.*|\\.pre-commit-config\\.yaml", - "generated_at": "2018-07-06T22:54:22Z", - "plugins_used": [ - { - "base64_limit": 4.5, - "name": "Base64HighEntropyString" - }, - { - "hex_limit": 3, - "name": "HexHighEntropyString" - }, - { - "name": "PrivateKeyDetector" - } - ], - "results": { - ".travis.yml": [ - { - "hashed_secret": "4f7a1ea04dafcbfee994ee1d08857b8aaedf8065", - "line_number": 14, - "type": "Base64 High Entropy String" - } - ] - }, - "version": "0.9.1" -} diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 569bf12d6..000000000 --- a/.travis.yml +++ /dev/null @@ -1,42 +0,0 @@ -language: python -python: -- '3.6' -env: -- TOXENV=docs -- TOXENV=py36 -install: -- pip install tox -- > - if [[ -n "${ES_VERSION}" ]] ; then - wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-${ES_VERSION}.tar.gz - mkdir elasticsearch-${ES_VERSION} && tar -xzf elasticsearch-${ES_VERSION}.tar.gz -C elasticsearch-${ES_VERSION} --strip-components=1 - ./elasticsearch-${ES_VERSION}/bin/elasticsearch & - fi -script: -- > - if [[ -n "${ES_VERSION}" ]] ; then - wget -q --waitretry=1 --retry-connrefused --tries=30 -O - http://127.0.0.1:9200 - make test-elasticsearch - else - make test - fi -jobs: - include: - - stage: 'Elasticsearch test' - env: TOXENV=py36 ES_VERSION=7.0.0-linux-x86_64 - - env: TOXENV=py36 ES_VERSION=6.6.2 - - env: TOXENV=py36 ES_VERSION=6.3.2 - - env: TOXENV=py36 ES_VERSION=6.2.4 - - env: TOXENV=py36 ES_VERSION=6.0.1 - - env: TOXENV=py36 ES_VERSION=5.6.16 - -deploy: - provider: pypi - user: yelplabs - password: - secure: TpSTlFu89tciZzboIfitHhU5NhAB1L1/rI35eQTXstiqzYg2mweOuip+MPNx9AlX3Swg7MhaFYnSUvRqPljuoLjLD0EQ7BHLVSBFl92ukkAMTeKvM6LbB9HnGOwzmAvTR5coegk8IHiegudODWvnhIj4hp7/0EA+gVX7E55kEAw= - on: - tags: true - distributions: sdist bdist_wheel - repo: Yelp/elastalert - branch: master diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..9b6924ce3 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,36 @@ +FROM python:alpine as builder + +LABEL description="Elastalert 2 suitable for Kubernetes and Helm" +LABEL maintainer="Jason Ertel (jertel at codesim.com)" + +RUN apk --update upgrade && \ + apk add git && \ + rm -rf /var/cache/apk/* + +RUN mkdir -p /opt/elastalert && \ + git clone https://github.com/jertel/elastalert2 /tmp/elastalert && \ + cd /tmp/elastalert && \ + pip install setuptools wheel && \ + python setup.py sdist bdist_wheel + +FROM python:alpine + +COPY --from=builder /tmp/elastalert/dist/*.tar.gz /tmp/ + +RUN apk --update upgrade && \ + apk add gcc libffi-dev musl-dev python3-dev openssl-dev tzdata libmagic cargo && \ + pip install /tmp/*.tar.gz && \ + apk del gcc libffi-dev musl-dev python3-dev openssl-dev cargo && \ + rm -rf /var/cache/apk/* + +RUN mkdir -p /opt/elastalert && \ + echo "#!/bin/sh" >> /opt/elastalert/run.sh && \ + echo "set -e" >> /opt/elastalert/run.sh && \ + echo "elastalert-create-index --config /opt/config/elastalert_config.yaml" >> /opt/elastalert/run.sh && \ + echo "elastalert --config /opt/config/elastalert_config.yaml \"\$@\"" >> /opt/elastalert/run.sh && \ + chmod +x /opt/elastalert/run.sh + +ENV TZ "UTC" + +WORKDIR /opt/elastalert +ENTRYPOINT ["/opt/elastalert/run.sh"] diff --git a/README-old.md b/README-old.md deleted file mode 100644 index 982ebbc47..000000000 --- a/README-old.md +++ /dev/null @@ -1,372 +0,0 @@ -# Deprecated ElastAlert README - -*The documentation below refers to a previous ElastAlert version. For the latest version, see [here][0]* - -Recent changes: As of Elastalert 0.2.0, you must use Python 3.6. Python 2 will not longer be supported. - -[![Build Status](https://travis-ci.org/Yelp/elastalert.svg)](https://travis-ci.org/Yelp/elastalert) -[![Join the chat at https://gitter.im/Yelp/elastalert](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/Yelp/elastalert?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) - -## ElastAlert - [Read the Docs](http://elastalert.readthedocs.org). -### Easy & Flexible Alerting With Elasticsearch - -ElastAlert is a simple framework for alerting on anomalies, spikes, or other patterns of interest from data in Elasticsearch. - -ElastAlert works with all versions of Elasticsearch. - -At Yelp, we use Elasticsearch, Logstash and Kibana for managing our ever increasing amount of data and logs. -Kibana is great for visualizing and querying data, but we quickly realized that it needed a companion tool for alerting -on inconsistencies in our data. Out of this need, ElastAlert was created. - -If you have data being written into Elasticsearch in near real time and want to be alerted when that data matches certain patterns, ElastAlert is the tool for you. If you can see it in Kibana, ElastAlert can alert on it. - -## Overview - -We designed ElastAlert to be reliable, highly modular, and easy to set up and configure. - -It works by combining Elasticsearch with two types of components, rule types and alerts. -Elasticsearch is periodically queried and the data is passed to the rule type, which determines when -a match is found. When a match occurs, it is given to one or more alerts, which take action based on the match. - -This is configured by a set of rules, each of which defines a query, a rule type, and a set of alerts. - -Several rule types with common monitoring paradigms are included with ElastAlert: - -- Match where there are at least X events in Y time" (``frequency`` type) -- Match when the rate of events increases or decreases" (``spike`` type) -- Match when there are less than X events in Y time" (``flatline`` type) -- Match when a certain field matches a blacklist/whitelist" (``blacklist`` and ``whitelist`` type) -- Match on any event matching a given filter" (``any`` type) -- Match when a field has two different values within some time" (``change`` type) -- Match when a never before seen term appears in a field" (``new_term`` type) -- Match when the number of unique values for a field is above or below a threshold (``cardinality`` type) - -Currently, we have built-in support for the following alert types: - -- Command -- Email -- JIRA -- OpsGenie -- AWS SNS -- MS Teams -- Slack -- Mattermost -- Telegram -- GoogleChat -- PagerDuty -- PagerTree -- Exotel -- Twilio -- Splunk On-Call (Formerly VictorOps) -- Gitter -- ServiceNow -- Debug -- Stomp -- Alerta -- HTTP POST -- Line Notify -- TheHive -- Zabbix - -Additional rule types and alerts can be easily imported or written. - -In addition to this basic usage, there are many other features that make alerts more useful: - -- Alerts link to Kibana dashboards -- Aggregate counts for arbitrary fields -- Combine alerts into periodic reports -- Separate alerts by using a unique key field -- Intercept and enhance match data - -To get started, check out `Running ElastAlert For The First Time` in the [documentation](http://elastalert.readthedocs.org). - -## Running ElastAlert -You can either install the latest released version of ElastAlert using pip: - -```pip install elastalert``` - -or you can clone the ElastAlert repository for the most recent changes: - -```git clone https://github.com/Yelp/elastalert.git``` - -Install the module: - -```pip install "setuptools>=11.3"``` - -```python setup.py install``` - -The following invocation can be used to run ElastAlert after installing - -``$ elastalert [--debug] [--verbose] [--start ] [--end ] [--rule ] [--config ]`` - -``--debug`` will print additional information to the screen as well as suppresses alerts and instead prints the alert body. Not compatible with `--verbose`. - -``--verbose`` will print additional information without suppressing alerts. Not compatible with `--debug.` - -``--start`` will begin querying at the given timestamp. By default, ElastAlert will begin querying from the present. -Timestamp format is ``YYYY-MM-DDTHH-MM-SS[-/+HH:MM]`` (Note the T between date and hour). -Eg: ``--start 2014-09-26T12:00:00`` (UTC) or ``--start 2014-10-01T07:30:00-05:00`` - -``--end`` will cause ElastAlert to stop querying at the given timestamp. By default, ElastAlert will continue -to query indefinitely. - -``--rule`` will allow you to run only one rule. It must still be in the rules folder. -Eg: ``--rule this_rule.yaml`` - -``--config`` allows you to specify the location of the configuration. By default, it is will look for config.yaml in the current directory. - -``--prometheus_port`` exposes ElastAlert Prometheus metrics on the specified port. Prometheus metrics disabled by default. - -## Third Party Tools And Extras -### Kibana plugin -![img](https://raw.githubusercontent.com/bitsensor/elastalert-kibana-plugin/master/showcase.gif) -Available at the [ElastAlert Kibana plugin repository](https://github.com/bitsensor/elastalert-kibana-plugin). - -### Docker -A [Dockerized version](https://github.com/bitsensor/elastalert) of ElastAlert including a REST api is build from `master` to `bitsensor/elastalert:latest`. - -```bash -git clone https://github.com/bitsensor/elastalert.git; cd elastalert -docker run -d -p 3030:3030 -p 3333:3333 \ - -v `pwd`/config/elastalert.yaml:/opt/elastalert/config.yaml \ - -v `pwd`/config/config.json:/opt/elastalert-server/config/config.json \ - -v `pwd`/rules:/opt/elastalert/rules \ - -v `pwd`/rule_templates:/opt/elastalert/rule_templates \ - --net="host" \ - --name elastalert bitsensor/elastalert:latest -``` - -### ElastAlert-Docker - -This Dockerfile will build a Docker image for Elastalert. This image is compatible with the accompanying Helm chart for Kubernetes. - -https://github.com/jertel/elastalert-docker - -### ElastAlert uses ElastAlert Helm Chart almost as it is *Use original Docker Image to fix bugs - -[ElastAlert Server Helm Chart](https://github.com/daichi703n/elastalert-helm) - -[Praeco Helm Chart](https://github.com/daichi703n/praeco-helm) - -[Installing Praeco (ElastAlert GUI) into Kubernetes with Helm](https://en-designetwork.daichi703n.com/entry/2020/02/24/praeco-helm-kubernetes) - -### Praeco -![Praeco screenshot](https://user-images.githubusercontent.com/611996/47752071-7c4a9080-dc61-11e8-8ccf-2196f13429b2.png) -[Praeco](https://github.com/johnsusek/praeco) is a free open source GUI for ElastAlert. - -## Documentation - -Read the documentation at [Read the Docs](http://elastalert.readthedocs.org). - -To build a html version of the docs locally - -``` -pip install sphinx_rtd_theme sphinx -cd docs -make html -``` - -View in browser at build/html/index.html - -## Configuration - -See config.yaml.example for details on configuration. - -## Example rules - -Examples of different types of rules can be found in example_rules/. - -- ``example_spike.yaml`` is an example of the "spike" rule type, which allows you to alert when the rate of events, averaged over a time period, -increases by a given factor. This example will send an email alert when there are 3 times more events matching a filter occurring within the -last 2 hours than the number of events in the previous 2 hours. - -- ``example_frequency.yaml`` is an example of the "frequency" rule type, which will alert when there are a given number of events occuring -within a time period. This example will send an email when 50 documents matching a given filter occur within a 4 hour timeframe. - -- ``example_change.yaml`` is an example of the "change" rule type, which will alert when a certain field in two documents changes. In this example, -the alert email is sent when two documents with the same 'username' field but a different value of the 'country_name' field occur within 24 hours -of each other. - -- ``example_new_term.yaml`` is an example of the "new term" rule type, which alerts when a new value appears in a field or fields. In this example, -an email is sent when a new value of ("username", "computer") is encountered in example login logs. - -## Frequently Asked Questions - -### My rule is not getting any hits? - -So you've managed to set up ElastAlert, write a rule, and run it, but nothing happens, or it says ``0 query hits``. First of all, we recommend using the command ``elastalert-test-rule rule.yaml`` to debug. It will show you how many documents match your filters for the last 24 hours (or more, see ``--help``), and then shows you if any alerts would have fired. If you have a filter in your rule, remove it and try again. This will show you if the index is correct and that you have at least some documents. If you have a filter in Kibana and want to recreate it in ElastAlert, you probably want to use a query string. Your filter will look like - -``` -filter: -- query: - query_string: - query: "foo: bar AND baz: abc*" -``` -If you receive an error that Elasticsearch is unable to parse it, it's likely the YAML is not spaced correctly, and the filter is not in the right format. If you are using other types of filters, like ``term``, a common pitfall is not realizing that you may need to use the analyzed token. This is the default if you are using Logstash. For example, - -``` -filter: -- term: - foo: "Test Document" -``` - -will not match even if the original value for ``foo`` was exactly "Test Document". Instead, you want to use ``foo.raw``. If you are still having trouble troubleshooting why your documents do not match, try running ElastAlert with ``--es_debug_trace /path/to/file.log``. This will log the queries made to Elasticsearch in full so that you can see exactly what is happening. - -### I got hits, why didn't I get an alert? - -If you got logs that had ``X query hits, 0 matches, 0 alerts sent``, it depends on the ``type`` why you didn't get any alerts. If ``type: any``, a match will occur for every hit. If you are using ``type: frequency``, ``num_events`` must occur within ``timeframe`` of each other for a match to occur. Different rules apply for different rule types. - -If you see ``X matches, 0 alerts sent``, this may occur for several reasons. If you set ``aggregation``, the alert will not be sent until after that time has elapsed. If you have gotten an alert for this same rule before, that rule may be silenced for a period of time. The default is one minute between alerts. If a rule is silenced, you will see ``Ignoring match for silenced rule`` in the logs. - -If you see ``X alerts sent`` but didn't get any alert, it's probably related to the alert configuration. If you are using the ``--debug`` flag, you will not receive any alerts. Instead, the alert text will be written to the console. Use ``--verbose`` to achieve the same affects without preventing alerts. If you are using email alert, make sure you have it configured for an SMTP server. By default, it will connect to localhost on port 25. It will also use the word "elastalert" as the "From:" address. Some SMTP servers will reject this because it does not have a domain while others will add their own domain automatically. See the email section in the documentation for how to configure this. - -### Why did I only get one alert when I expected to get several? - -There is a setting called ``realert`` which is the minimum time between two alerts for the same rule. Any alert that occurs within this time will simply be dropped. The default value for this is one minute. If you want to receive an alert for every single match, even if they occur right after each other, use - -``` -realert: - minutes: 0 -``` - -You can of course set it higher as well. - -### How can I prevent duplicate alerts? - -By setting ``realert``, you will prevent the same rule from alerting twice in an amount of time. - -``` -realert: - days: 1 -``` - -You can also prevent duplicates based on a certain field by using ``query_key``. For example, to prevent multiple alerts for the same user, you might use - -``` -realert: - hours: 8 -query_key: user -``` - -Note that this will also affect the way many rule types work. If you are using ``type: frequency`` for example, ``num_events`` for a single value of ``query_key`` must occur before an alert will be sent. You can also use a compound of multiple fields for this key. For example, if you only wanted to receieve an alert once for a specific error and hostname, you could use - -``` -query_key: [error, hostname] -``` - -Internally, this works by creating a new field for each document called ``field1,field2`` with a value of ``value1,value2`` and using that as the ``query_key``. - -The data for when an alert will fire again is stored in Elasticsearch in the ``elastalert_status`` index, with a ``_type`` of ``silence`` and also cached in memory. - -### How can I change what's in the alert? - -You can use the field ``alert_text`` to add custom text to an alert. By setting ``alert_text_type: alert_text_only`` Or ``alert_text_type: alert_text_jinja``, it will be the entirety of the alert. You can also add different fields from the alert: - -With ``alert_text_type: alert_text_jinja`` by using [Jinja2](https://pypi.org/project/Jinja2/) Template. - -``` -alert_text_type: alert_text_jinja - -alert_text: | - Alert triggered! *({{num_hits}} Matches!)* - Something happened with {{username}} ({{email}}) - {{description|truncate}} - -``` - -> Top fields are accessible via `{{field_name}}` or `{{_data['field_name']}}`, `_data` is useful when accessing *fields with dots in their keys*, as Jinja treat dot as a nested field. -> If `_data` conflicts with your top level data, use ``jinja_root_name`` to change its name. - -With ``alert_text_type: alert_text_only`` by using Python style string formatting and ``alert_text_args``. For example - -``` -alert_text: "Something happened with {0} at {1}" -alert_text_type: alert_text_only -alert_text_args: ["username", "@timestamp"] -``` - -You can also limit the alert to only containing certain fields from the document by using ``include``. - -``` -include: ["ip_address", "hostname", "status"] -``` - -### My alert only contains data for one event, how can I see more? - -If you are using ``type: frequency``, you can set the option ``attach_related: true`` and every document will be included in the alert. An alternative, which works for every type, is ``top_count_keys``. This will show the top counts for each value for certain fields. For example, if you have - -``` -top_count_keys: ["ip_address", "status"] -``` - -and 10 documents matched your alert, it may contain something like - -``` -ip_address: -127.0.0.1: 7 -10.0.0.1: 2 -192.168.0.1: 1 - -status: -200: 9 -500: 1 -``` - -### How can I make the alert come at a certain time? - -The ``aggregation`` feature will take every alert that has occured over a period of time and send them together in one alert. You can use cron style syntax to send all alerts that have occured since the last once by using - -``` -aggregation: - schedule: '2 4 * * mon,fri' -``` - -### I have lots of documents and it's really slow, how can I speed it up? - -There are several ways to potentially speed up queries. If you are using ``index: logstash-*``, Elasticsearch will query all shards, even if they do not possibly contain data with the correct timestamp. Instead, you can use Python time format strings and set ``use_strftime_index`` - -``` -index: logstash-%Y.%m -use_strftime_index: true -``` - -Another thing you could change is ``buffer_time``. By default, ElastAlert will query large overlapping windows in order to ensure that it does not miss any events, even if they are indexed in real time. In config.yaml, you can adjust ``buffer_time`` to a smaller number to only query the most recent few minutes. - -``` -buffer_time: - minutes: 5 -``` - -By default, ElastAlert will download every document in full before processing them. Instead, you can have ElastAlert simply get a count of the number of documents that have occured in between each query. To do this, set ``use_count_query: true``. This cannot be used if you use ``query_key``, because ElastAlert will not know the contents of each documents, just the total number of them. This also reduces the precision of alerts, because all events that occur between each query will be rounded to a single timestamp. - -If you are using ``query_key`` (a single key, not multiple keys) you can use ``use_terms_query``. This will make ElastAlert perform a terms aggregation to get the counts for each value of a certain field. Both ``use_terms_query`` and ``use_count_query`` also require ``doc_type`` to be set to the ``_type`` of the documents. They may not be compatible with all rule types. - -### Can I perform aggregations? - -The only aggregation supported currently is a terms aggregation, by setting ``use_terms_query``. - -### I'm not using @timestamp, what do I do? - -You can use ``timestamp_field`` to change which field ElastAlert will use as the timestamp. You can use ``timestamp_type`` to change it between ISO 8601 and unix timestamps. You must have some kind of timestamp for ElastAlert to work. If your events are not in real time, you can use ``query_delay`` and ``buffer_time`` to adjust when ElastAlert will look for documents. - -### I'm using flatline but I don't see any alerts - -When using ``type: flatline``, ElastAlert must see at least one document before it will alert you that it has stopped seeing them. - -### How can I get a "resolve" event? - -ElastAlert does not currently support stateful alerts or resolve events. - -### Can I set a warning threshold? - -Currently, the only way to set a warning threshold is by creating a second rule with a lower threshold. - -## License - -ElastAlert is licensed under the Apache License, Version 2.0: http://www.apache.org/licenses/LICENSE-2.0 - -### Read the documentation at [Read the Docs](http://elastalert.readthedocs.org). - -### Questions? Drop by #elastalert on Freenode IRC. - -[0]: https://github.com/jertel/elastalert/blob/alt/README.md diff --git a/README.md b/README.md index 2539d3823..70ea5231a 100644 --- a/README.md +++ b/README.md @@ -7,12 +7,10 @@ Elastalert 2 is backwards compatible with the original Elastalert rules. ## Documentation -Documentation for Elastalert 2 can be found on [readthedocs.com][3]. This is the place to start if you're not familiar with Elastalert at all. +Documentation, including an FAQ, for Elastalert 2 can be found on [readthedocs.com][3]. This is the place to start if you're not familiar with Elastalert at all. The full list of platforms that Elastalert can fire alerts into can be found [in the documentation][4]. -The original README for Elastalert can be found [here][5]. Please note that this file is -not being actively maintained, and will probably grow less accurate over time. ## Contributing @@ -25,17 +23,57 @@ The current status of the CI workflow: ## Docker -If you're interested in a pre-built Docker image for either the official yelp/elastalert release, or -for this fork, check out the [elastalert2][2] project on Docker Hub. +If you're interested in a pre-built Docker image check out the [elastalert2][2] project on Docker Hub. + +A properly configured elastalert_config.json file must be mounted into the container during startup of the container. Use the [example file][1] provided as a template, and once saved locally to a file such as `/tmp/elastalert.yaml`, run the container as follows: + +```bash +docker run -d -v /tmp/elastalert.yaml:/opt/config/elastalert_config.yaml jertel/elastalert2 +``` + +To build the image locally, install Docker and then run the following command: +``` +docker build . -t elastalert +``` + +## Kubernetes + +See the Helm chart [README.md](chart/elastalert/README.md) for information on installing this application into an existing Kubernetes cluster. + +## Releases + +As Elastalert 2 is a community-maintained project, releases will typically contain unrelated contributions without a common theme. It's up to the maintainers to determine when the project is ready for a release, however, if you are looking to use a newly merged feature that hasn't yet been released, feel free to open a [discussion][6] and let us know. + +Maintainers, when creating a new release, following the procedure below: + +1. Determine an appropriate new version number in the format _a.b.c_, using the following guidelines: + - The major version (a) should not change. + - The minor version (b) should be incremented if a new feature has been added or if a bug fix will have a significant user-impact. Reset the patch version to zero if the minor version is incremented. + - The patch version (c) should be incremented when low-impact bugs are fixed, or security vulnerabilities are patched. +2. Ensure the following are updated _before_ publishing/tagging the new release: + - [setup.py](setup.py): Match the version to the new release version + - [Chart.yaml](chart/elastalert2/Chart.yaml): Match chart version and the app version to the new release version (typically keep them in sync) + - [values.yaml](chart/elastalert2/values.yaml): Match the default image version to the new release version. + - [README.md](chart/elastalert2/README.md): Match the default image version to the new release version. +3. Double-check that the Docker image successfully built the latest image. +4. Create a [new][7] release. + - The title (and tag) of the release will be the same value as the new version determined in step 1. + - The description of the release will contain a bulleted list of all merged pull requests, in the following format: + `- PR/commit message #000 - @committer` + - Check the box to 'Create a discussion for this release'. + - Save the draft. +5. Wait a minimum of a few hours for community feedback in case someone notices a problem with the the upcoming release. +6. Publish the release. ## License -Elastalert is licensed under the [Apache License, Version 2.0][6]. +Elastalert is licensed under the [Apache License, Version 2.0][5]. [0]: https://github.com/yelp/elastalert -[1]: https://github.com/jertel/elastalert +[1]: https://github.com/jertel/elastalert2/blob/master/config.yaml.example [2]: https://hub.docker.com/r/jertel/elastalert2 [3]: https://elastalert2.readthedocs.io/ [4]: https://elastalert2.readthedocs.io/en/latest/ruletypes.html#alerts -[5]: https://github.com/jertel/elastalert/blob/alt/README-old.md -[6]: http://www.apache.org/licenses/LICENSE-2 +[5]: http://www.apache.org/licenses/LICENSE-2 +[6]: https://github.com/jertel/elastalert2/discussions +[7]: https://github.com/jertel/elastalert2/releases/new \ No newline at end of file diff --git a/changelog.md b/changelog.md deleted file mode 100644 index 975d6855f..000000000 --- a/changelog.md +++ /dev/null @@ -1,383 +0,0 @@ -# Change Log - -# v0.2.4 - -### Added -- Added back customFields support for The Hive - -# v0.2.3 - -### Added -- Added back TheHive alerter without TheHive4py library - -# v0.2.2 - -### Added -- Integration with Kibana Discover app -- Addied ability to specify opsgenie alert details  - -### Fixed -- Fix some encoding issues with command alerter -- Better error messages for missing config file -- Fixed an issue with run_every not applying per-rule -- Fixed an issue with rules not being removed -- Fixed an issue with top count keys and nested query keys -- Various documentation fixes -- Fixed an issue with not being able to use spike aggregation - -### Removed -- Remove The Hive alerter - -# v0.2.1 - -### Fixed -- Fixed an AttributeError introduced in 0.2.0 - -# v0.2.0 - -- Switched to Python 3 - -### Added -- Add rule loader class for customized rule loading -- Added thread based rules and limit_execution -- Run_every can now be customized per rule - -### Fixed -- Various small fixes - -# v0.1.39 - -### Added -- Added spike alerts for metric aggregations -- Allow SSL connections for Stomp -- Allow limits on alert text length -- Add optional min doc count for terms queries -- Add ability to index into arrays for alert_text_args, etc - -### Fixed -- Fixed bug involving --config flag with create-index -- Fixed some settings not being inherited from the config properly -- Some fixes for Hive alerter -- Close SMTP connections properly -- Fix timestamps in Pagerduty v2 payload -- Fixed an bug causing aggregated alerts to mix up - -# v0.1.38 - -### Added -- Added PagerTree alerter -- Added Line alerter -- Added more customizable logging -- Added new logic in test-rule to detemine the default timeframe - -### Fixed -- Fixed an issue causing buffer_time to sometimes be ignored - -# v0.1.37 - -### Added -- Added more options for Opsgenie alerter -- Added more pagerduty options -- Added ability to add metadata to elastalert logs - -### Fixed -- Fixed some documentation to be more clear -- Stop requiring doc_type for metric aggregations -- No longer puts quotes around regex terms in blacklists or whitelists - -# v0.1.36 - -### Added -- Added a prefix "metric_" to the key used for metric aggregations to avoid possible conflicts -- Added option to skip Alerta certificate validation - -### Fixed -- Fixed a typo in the documentation for spike rule - -# v0.1.35 - -### Fixed -- Fixed an issue preventing new term rule from working with terms query - -# v0.1.34 - -### Added -- Added prefix/suffix support for summary table -- Added support for ignoring SSL validation in Slack -- More visible exceptions during query parse failures - -### Fixed -- Fixed top_count_keys when using compound query_key -- Fixed num_hits sometimes being reported too low -- Fixed an issue with setting ES_USERNAME via env -- Fixed an issue when using test script with custom timestamps -- Fixed a unicode error when using Telegram -- Fixed an issue with jsonschema version conflict -- Fixed an issue with nested timestamps in cardinality type - -# v0.1.33 - -### Added -- Added ability to pipe alert text to a command -- Add --start and --end support for elastalert-test-rule -- Added ability to turn blacklist/whitelist files into queries for better performance -- Allow setting of OpsGenie priority -- Add ability to query the adjacent index if timestamp_field not used for index timestamping -- Add support for pagerduty v2 -- Add option to turn off .raw/.keyword field postfixing in new term rule -- Added --use-downloaded feature for elastalert-test-rule - -### Fixed -- Fixed a bug that caused num_hits in matches to sometimes be erroneously small -- Fixed an issue with HTTP Post alerter that could cause it to hang indefinitely -- Fixed some issues with string formatting for various alerters -- Fixed a couple of incorrect parts of the documentation - -# v0.1.32 - -### Added -- Add support for setting ES url prefix via environment var -- Add support for using native Slack fields in alerts - -### Fixed -- Fixed a bug that would could scrolling queries to sometimes terminate early - -# v0.1.31 - -### Added -- Added ability to add start date to new term rule - -### Fixed -- Fixed a bug in create_index which would try to delete a nonexistent index -- Apply filters to new term rule all terms query -- Support Elasticsearch 6 for new term rule -- Fixed is_enabled not working on rule changes - - -# v0.1.30 - -### Added -- Alerta alerter -- Added support for transitioning JIRA issues -- Option to recreate index in elastalert-create-index - -### Fixed -- Update jira_ custom fields before each alert if they were modified -- Use json instead of simplejson -- Allow for relative path for smtp_auth_file -- Fixed some grammar issues -- Better code formatting of index mappings -- Better formatting and size limit for HipChat HTML -- Fixed gif link in readme for kibana plugin -- Fixed elastalert-test-rule with Elasticsearch > 4 -- Added documentation for is_enabled option - -## v0.1.29 - -### Added -- Added a feature forget_keys to prevent realerting when using flatline with query_key -- Added a new alert_text_type, aggregation_summary_only - -### Fixed -- Fixed incorrect documentation about es_conn_timeout default - -## v0.1.28 - -### Added -- Added support for Stride formatting of simple HTML tags -- Added support for custom titles in Opsgenie alerts -- Added a denominator to percentage match based alerts - -### Fixed -- Fixed a bug with Stomp alerter connections -- Removed escaping of some characaters in Slack messages - -## v0.1.27 - -# Added -- Added support for a value other than in formatted alerts - -### Fixed -- Fixed a failed creation of elastalert indicies when using Elasticsearch 6 -- Truncate Telegram alerts to avoid API errors - -## v0.1.26 - -### Added -- Added support for Elasticsearch 6 -- Added support for mentions in Hipchat - -### Fixed -- Fixed an issue where a nested field lookup would crash if one of the intermediate fields was null - -## v0.1.25 - -### Fixed -- Fixed a bug causing new term rule to break unless you passed a start time -- Add a slight clarification on the localhost:9200 reported in es_debug_trace - -## v0.1.24 - -### Fixed -- Pinned pytest -- create-index reads index name from config.yaml -- top_count_keys now works for context on a flatline rule type -- Fixed JIRA behavior for issues with statuses that have spaces in the name - -## v0.1.22 - -### Added -- Added Stride alerter -- Allow custom string formatters for aggregation percentage -- Added a field to disable rules from config -- Added support for subaggregations for the metric rule type - -### Fixed -- Fixed a bug causing create-index to fail if missing config.yaml -- Fixed a bug when using ES5 with query_key and top_count_keys -- Allow enhancements to set and clear arbitrary JIRA fields -- Fixed a bug causing timestamps to be formatted in scientific notation -- Stop attempting to initialize alerters in debug mode -- Changed default alert ordering so that JIRA tickets end up in other alerts -- Fixed a bug when using Stomp alerter with complex query_key -- Fixed a bug preventing hipchat room ID from being an integer -- Fixed a bug causing duplicate alerts when using spike with alert_on_new_data -- Minor fixes to summary table formatting -- Fixed elastalert-test-rule when using new term rule type - -## v0.1.21 - -### Fixed -- Fixed an incomplete bug fix for preventing duplicate enhancement runs - -## v0.1.20 - -### Added -- Added support for client TLS keys - -### Fixed -- Fixed the formatting of summary tables in Slack -- Fixed ES_USE_SSL env variable -- Fixed the unique value count printed by new_term rule type -- Jira alerter no longer uses the non-existent json code formatter - -## v0.1.19 - -### Added -- Added support for populating JIRA fields via fields in the match -- Added support for using a TLS certificate file for SMTP connections -- Allow a custom suffix for non-analyzed Elasticsearch fields, like ".raw" or ".keyword" -- Added match_time to Elastalert alert documents in Elasticsearch - -### Fixed -- Fixed an error in the documentation for rule importing -- Prevent enhancements from re-running on retried alerts -- Fixed a bug when using custom timestamp formats and new term rule -- Lowered jira_bump_after_inactivity default to 0 days - -## v0.1.18 - -### Added -- Added a new alerter "post" based on "simple" which makes POSTS JSON to HTTP endpoints -- Added an option jira_bump_after_inacitivty to prevent ElastAlert commenting on active JIRA tickets - -### Removed -- Removed "simple" alerter, replaced by "post" - -## v0.1.17 - -### Added -- Added a --patience flag to allow Elastalert to wait for Elasticsearch to become available -- Allow custom PagerDuty alert titles via alert_subject - -## v0.1.16 - -### Fixed -- Fixed a bug where JIRA titles might not use query_key values -- Fixed a bug where flatline alerts don't respect query_key for realert -- Fixed a typo "twilio_accout_sid" - -### Added -- Added support for env variables in kibana4 dashboard links -- Added ca_certs option for custom CA support - -## v0.1.15 - -### Fixed -- Fixed a bug where Elastalert would crash on connection error during startup -- Fixed some typos in documentation -- Fixed a bug in metric bucket offset calculation -- Fixed a TypeError in Service Now alerter - -### Added -- Added support for compound compare key in change rules -- Added support for absolute paths in rule config imports -- Added Microsoft Teams alerter -- Added support for markdown in Slack alerts -- Added error codes to test script -- Added support for lists in email_from_field - - -## v0.1.14 - 2017-05-11 - -### Fixed -- Twilio alerter uses the from number appropriately -- Fixed a TypeError in SNS alerter -- Some changes to requirements.txt and setup.py -- Fixed a TypeError in new term rule - -### Added -- Set a custom pagerduty incident key -- Preserve traceback in most exceptions - -## v0.1.12 - 2017-04-21 - -### Fixed -- Fixed a bug causing filters to be ignored when using Elasticsearch 5 - - -## v0.1.11 - 2017-04-19 - -### Fixed -- Fixed an issue that would cause filters starting with "query" to sometimes throw errors in ES5 -- Fixed a bug with multiple versions of ES on different rules -- Fixed a possible KeyError when using use_terms_query with ES5 - -## v0.1.10 - 2017-04-17 - -### Fixed -- Fixed an AttributeError occuring with older versions of Elasticsearch library -- Made example rules more consistent and with unique names -- Fixed an error caused by a typo when es_username is used - -## v0.1.9 - 2017-04-14 - -### Added -- Added a changelog -- Added metric aggregation rule type -- Added percentage match rule type -- Added default doc style and improved the instructions -- Rule names will default to the filename -- Added import keyword in rules to include sections from other files -- Added email_from_field option to derive the recipient from a field in the match -- Added simple HTTP alerter -- Added Exotel SMS alerter -- Added a readme link to third party Kibana plugin -- Added option to use env variables to configure some settings -- Added duplicate hits count in log line - -### Fixed -- Fixed a bug in change rule where a boolean false would be ignored -- Clarify documentation on format of alert_text_args and alert_text_kw -- Fixed a bug preventing new silence stashes from being loaded after a rule has previous alerted -- Changed the default es_host in elastalert-test-rule to localhost -- Fixed a bug preventing ES <5.0 formatted queries working in elastalert-test-rule -- Fixed top_count_keys adding .raw on ES >5.0, uses .keyword instead -- Fixed a bug causing compound aggregation keys not to work -- Better error reporting for the Jira alerter -- AWS request signing now refreshes credentials, uses boto3 -- Support multiple ES versions on different rules -- Added documentation for percentage match rule type - -### Removed -- Removed a feature that would disable writeback_es on errors, causing various issues diff --git a/chart/elastalert2/.helmignore b/chart/elastalert2/.helmignore new file mode 100644 index 000000000..f0c131944 --- /dev/null +++ b/chart/elastalert2/.helmignore @@ -0,0 +1,21 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*~ +# Various IDEs +.project +.idea/ +*.tmproj diff --git a/chart/elastalert2/Chart.yaml b/chart/elastalert2/Chart.yaml new file mode 100644 index 000000000..813b4808c --- /dev/null +++ b/chart/elastalert2/Chart.yaml @@ -0,0 +1,12 @@ +apiVersion: v1 +description: Elastalert 2 is a simple framework for alerting on anomalies, spikes, or other patterns of interest from data in Elasticsearch. +name: elastalert2 +version: 2.0.2 +appVersion: 2.0.2 +home: https://github.com/jertel/elastalert2 +sources: +- https://github.com/jertel/elastalert2 +maintainers: + - name: jertel + email: jertel@codesim.com +engine: gotpl diff --git a/chart/elastalert2/README.md b/chart/elastalert2/README.md new file mode 100644 index 000000000..72173672b --- /dev/null +++ b/chart/elastalert2/README.md @@ -0,0 +1,90 @@ + +# Elastalert 2 Helm Chart for Kubernetes + +An Elastalert 2 helm chart is available in the jertel Helm repository, and can be installed into an existing Kubernetes cluster by following the instructions below. + +## Installing the Chart + +Add the jertel repository to your Helm configuration: + +```console +helm repo add codesim https://helm.jertel.com +``` + +Next, install the chart with a release name, such as _elastalert2_: + +```console +helm install elastalert2 jertel/elastalert2 +``` + +The command deploys Elastalert 2 on the Kubernetes cluster in the default configuration. The [configuration](#configuration) section lists the parameters that can be configured during installation. + +See the comment in the default `values.yaml` for specifying a `writebackIndex` for ES 5.x. + +If necessary, open Dev Tools on Kibana and send the below request to avoid errors like `RequestError: TransportError(400, u'search_phase_execution_exception', u'No mapping found for [alert_time] in order to sort on')` + +``` +PUT /elastalert/_mapping/elastalert +{ + "properties": { + "alert_time": {"type": "date"} + } +} +``` + +## Uninstalling the Chart + +To uninstall/delete the Elastalert 2 deployment: + +```console +helm delete elastalert2 --purge +``` + +The command removes all the Kubernetes components associated with the chart and deletes the release. + +## Configuration + +| Parameter | Description | Default | +|----------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------|---------------------------------| +| `image.repository` | docker image | jertel/elastalert-docker | +| `image.tag` | docker image tag | 2,0.2 | +| `image.pullPolicy` | image pull policy | IfNotPresent | +| `podAnnotations` | Annotations to be added to pods | {} | +| `command` | command override for container | `NULL` | +| `args` | args override for container | `NULL` | +| `replicaCount` | number of replicas to run | 1 | +| `elasticsearch.host` | elasticsearch endpoint to use | elasticsearch | +| `elasticsearch.port` | elasticsearch port to use | 9200 | +| `elasticsearch.useSsl` | whether or not to connect to es_host using SSL | False | +| `elasticsearch.username` | Username for ES with basic auth | `NULL` | +| `elasticsearch.password` | Password for ES with basic auth | `NULL` | +| `elasticsearch.credentialsSecret` | Specifies an existing secret to be used for the ES username/password auth | `NULL` | +| `elasticsearch.credentialsSecretUsernameKey` | The key in elasticsearch.credentialsSecret that stores the ES password auth | `NULL` | +| `elasticsearch.credentialsSecretPasswordKey` | The key in elasticsearch.credentialsSecret that stores the ES username auth | `NULL` | +| `elasticsearch.verifyCerts` | whether or not to verify TLS certificates | True | +| `elasticsearch.clientCert` | path to a PEM certificate to use as the client certificate | /certs/client.pem | +| `elasticsearch.clientKey` | path to a private key file to use as the client key | /certs/client-key.pem | +| `elasticsearch.caCerts` | path to a CA cert bundle to use to verify SSL connections | /certs/ca.pem | +| `elasticsearch.certsVolumes` | certs volumes, required to mount ssl certificates when elasticsearch has tls enabled | `NULL` | +| `elasticsearch.certsVolumeMounts` | mount certs volumes, required to mount ssl certificates when elasticsearch has tls enabled | `NULL` | +| `extraConfigOptions` | Additional options to propagate to all rules, cannot be `alert`, `type`, `name` or `index` | `{}` | +| `secretConfigName` | name of the secret which holds the Elastalert config. **Note:** this will completely overwrite the generated config | `NULL` | +| `secretRulesName` | name of the secret which holds the Elastalert rules. **Note:** this will overwrite the generated rules | `NULL` | +| `secretRulesList` | a list of rules to enable from the secret | [] | +| `optEnv` | Additional pod environment variable definitions | [] | +| `extraVolumes` | Additional volume definitions | [] | +| `extraVolumeMounts` | Additional volumeMount definitions | [] | +| `serviceAccount.create` | Specifies whether a service account should be created. | `true` | +| `serviceAccount.name` | Service account to be used. If not set and `serviceAccount.create` is `true`, a name is generated using the fullname template | | +| `serviceAccount.annotations` | ServiceAccount annotations | | +| `podSecurityPolicy.create` | Create pod security policy resources | `false` | +| `resources` | Container resource requests and limits | {} | +| `rules` | Rule and alert configuration for Elastalert | {} example shown in values.yaml | +| `runIntervalMins` | Default interval between alert checks, in minutes | 1 | +| `realertIntervalMins` | Time between alarms for same rule, in minutes | `NULL` | +| `alertRetryLimitMins` | Time to retry failed alert deliveries, in minutes | 2880 (2 days) | +| `bufferTimeMins` | Default rule buffer time, in minutes | 15 | +| `writebackIndex` | Name or prefix of elastalert index(es) | elastalert | +| `nodeSelector` | Node selector for deployment | {} | +| `affinity` | Affinity specifications for the deployed pod(s) | {} | +| `tolerations` | Tolerations for deployment | [] | diff --git a/chart/elastalert2/templates/NOTES.txt b/chart/elastalert2/templates/NOTES.txt new file mode 100644 index 000000000..7b1c2985d --- /dev/null +++ b/chart/elastalert2/templates/NOTES.txt @@ -0,0 +1 @@ +1. Elastalert is now running against: {{ .Values.elasticsearch.host }}:{{ .Values.elasticsearch.port }} \ No newline at end of file diff --git a/chart/elastalert2/templates/_helpers.tpl b/chart/elastalert2/templates/_helpers.tpl new file mode 100644 index 000000000..2fbdad460 --- /dev/null +++ b/chart/elastalert2/templates/_helpers.tpl @@ -0,0 +1,36 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Expand the name of the chart. +*/}} +{{- define "elastalert.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "elastalert.fullname" -}} +{{- if .Values.fullnameOverride -}} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- $name := default .Chart.Name .Values.nameOverride -}} +{{- if contains $name .Release.Name -}} +{{- .Release.Name | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} +{{- end -}} +{{- end -}} +{{- end -}} + +{{/* +Create the name of the service account to use +*/}} +{{- define "elastalert.serviceAccountName" -}} +{{- if .Values.serviceAccount.create -}} + {{ default (include "elastalert.fullname" .) .Values.serviceAccount.name }} +{{- else -}} + {{ default "default" .Values.serviceAccount.name }} +{{- end -}} +{{- end -}} diff --git a/chart/elastalert2/templates/deployment.yaml b/chart/elastalert2/templates/deployment.yaml new file mode 100644 index 000000000..0d696a122 --- /dev/null +++ b/chart/elastalert2/templates/deployment.yaml @@ -0,0 +1,124 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ template "elastalert.fullname" . }} + labels: + app: {{ template "elastalert.name" . }} + chart: "{{ .Chart.Name }}-{{ .Chart.Version }}" + release: {{ .Release.Name }} + heritage: {{ .Release.Service }} +spec: + selector: + matchLabels: + app: {{ template "elastalert.name" . }} + release: {{ .Release.Name }} + replicas: {{ .Values.replicaCount }} + revisionHistoryLimit: {{ .Values.revisionHistoryLimit }} + template: + metadata: + annotations: + checksum/config: {{ include (print $.Template.BasePath "/config.yaml") . | sha256sum }} + checksum/rules: {{ include (print $.Template.BasePath "/rules.yaml") . | sha256sum }} +{{- if .Values.podAnnotations }} +{{ toYaml .Values.podAnnotations | indent 8 }} +{{- end }} + labels: + name: {{ template "elastalert.fullname" . }}-elastalert + app: {{ template "elastalert.name" . }} + release: {{ .Release.Name }} + spec: + serviceAccountName: {{ include "elastalert.serviceAccountName" . }} + containers: + - name: elastalert + image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}" + imagePullPolicy: {{ .Values.image.pullPolicy }} +{{- if .Values.command }} + command: +{{ toYaml .Values.command | indent 10 }} +{{- end }} +{{- if .Values.args }} + args: +{{ toYaml .Values.args | indent 10 }} +{{- end }} + volumeMounts: + - name: config + mountPath: '/opt/config' + - name: rules + mountPath: '/opt/rules' +{{- if .Values.elasticsearch.certsVolumeMounts }} +{{ toYaml .Values.elasticsearch.certsVolumeMounts | indent 10 }} +{{- end }} +{{- if .Values.extraVolumeMounts }} +{{ toYaml .Values.extraVolumeMounts | indent 10 }} +{{- end }} + resources: +{{ toYaml .Values.resources | indent 12 }} + env: +{{- if .Values.elasticsearch.credentialsSecret }} +{{- if .Values.elasticsearch.credentialsSecretUsernameKey }} + - name: ES_USERNAME + valueFrom: + secretKeyRef: + name: {{ .Values.elasticsearch.credentialsSecret }} + key: {{ .Values.elasticsearch.credentialsSecretUsernameKey }} +{{- end }} +{{- if .Values.elasticsearch.credentialsSecretPasswordKey }} + - name: ES_PASSWORD + valueFrom: + secretKeyRef: + name: {{ .Values.elasticsearch.credentialsSecret }} + key: {{ .Values.elasticsearch.credentialsSecretPasswordKey }} +{{- end }} +{{- end }} +{{- if .Values.optEnv }} +{{ .Values.optEnv | toYaml | indent 10}} +{{- end }} + restartPolicy: Always +{{- if .Values.tolerations }} + tolerations: +{{ toYaml .Values.tolerations | indent 8 }} +{{- end }} +{{- if .Values.nodeSelector }} + nodeSelector: +{{ toYaml .Values.nodeSelector | indent 8 }} +{{- end }} +{{- if .Values.affinity }} + affinity: +{{ toYaml .Values.affinity | indent 8 }} +{{- end }} + volumes: + - name: rules +{{- if .Values.secretRulesName }} + secret: + secretName: {{ .Values.secretRulesName }} + items: +{{- range $key := .Values.secretRulesList }} + - key: {{ $key }} + path: {{ $key}}.yaml +{{- end }} +{{- else }} + configMap: + name: {{ template "elastalert.fullname" . }}-rules + items: +{{- range $key, $value := .Values.rules }} + - key: {{ $key }} + path: {{ $key}}.yaml +{{- end }} +{{- end }} + - name: config +{{- if .Values.secretConfigName }} + secret: + secretName: {{ .Values.secretConfigName }} +{{- else }} + configMap: + name: {{ template "elastalert.fullname" . }}-config +{{- end }} + items: + - key: elastalert_config + path: elastalert_config.yaml +{{- if .Values.elasticsearch.certsVolumes }} +{{ toYaml .Values.elasticsearch.certsVolumes | indent 8 }} +{{- end }} +{{- if .Values.extraVolumes }} +{{ toYaml .Values.extraVolumes | indent 8 }} +{{- end }} diff --git a/chart/elastalert2/templates/podsecuritypolicy.yaml b/chart/elastalert2/templates/podsecuritypolicy.yaml new file mode 100644 index 000000000..e37772035 --- /dev/null +++ b/chart/elastalert2/templates/podsecuritypolicy.yaml @@ -0,0 +1,39 @@ +{{- if .Values.podSecurityPolicy.create }} +apiVersion: policy/v1beta1 +kind: PodSecurityPolicy +metadata: + name: {{ template "elastalert.fullname" . }} + labels: + app: {{ template "elastalert.name" . }} + chart: "{{ .Chart.Name }}-{{ .Chart.Version }}" + release: {{ .Release.Name }} + heritage: {{ .Release.Service }} +spec: + # Prevents running in privileged mode + privileged: false + # Required to prevent escalations to root. + allowPrivilegeEscalation: false + volumes: + - configMap + - secret + hostNetwork: false + hostIPC: false + hostPID: false + runAsUser: + rule: RunAsAny + seLinux: + rule: RunAsAny + supplementalGroups: + rule: MustRunAs + ranges: + # Forbid adding the root group. + - min: 1 + max: 65535 + fsGroup: + rule: MustRunAs + ranges: + # Forbid adding the root group. + - min: 1 + max: 65535 + readOnlyRootFilesystem: false +{{- end }} diff --git a/chart/elastalert2/templates/role.yaml b/chart/elastalert2/templates/role.yaml new file mode 100644 index 000000000..93b9caddc --- /dev/null +++ b/chart/elastalert2/templates/role.yaml @@ -0,0 +1,20 @@ +{{- if .Values.podSecurityPolicy.create }} +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: {{ template "elastalert.fullname" . }} + labels: + app: {{ template "elastalert.name" . }} + chart: "{{ .Chart.Name }}-{{ .Chart.Version }}" + release: {{ .Release.Name }} + heritage: {{ .Release.Service }} +rules: + - apiGroups: + - policy + resources: + - podsecuritypolicies + resourceNames: + - {{ template "elastalert.fullname" . }} + verbs: + - use +{{- end -}} diff --git a/chart/elastalert2/templates/rolebinding.yaml b/chart/elastalert2/templates/rolebinding.yaml new file mode 100644 index 000000000..67a69d1f9 --- /dev/null +++ b/chart/elastalert2/templates/rolebinding.yaml @@ -0,0 +1,18 @@ +{{- if .Values.podSecurityPolicy.create }} +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: {{ template "elastalert.fullname" . }} + labels: + app: {{ template "elastalert.name" . }} + chart: "{{ .Chart.Name }}-{{ .Chart.Version }}" + release: {{ .Release.Name }} + heritage: {{ .Release.Service }} +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: {{ template "elastalert.fullname" . }} +subjects: + - kind: ServiceAccount + name: {{ template "elastalert.serviceAccountName" . }} +{{- end -}} diff --git a/chart/elastalert2/templates/rules.yaml b/chart/elastalert2/templates/rules.yaml new file mode 100644 index 000000000..1e4afd457 --- /dev/null +++ b/chart/elastalert2/templates/rules.yaml @@ -0,0 +1,14 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ template "elastalert.fullname" . }}-rules + labels: + app: {{ template "elastalert.name" . }} + chart: {{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }} + release: {{ .Release.Name }} + heritage: {{ .Release.Service }} +data: +{{- range $key, $value := .Values.rules }} +{{ $key | indent 2}}: |- +{{ $value | indent 4}} +{{- end }} diff --git a/chart/elastalert2/templates/serviceaccount.yaml b/chart/elastalert2/templates/serviceaccount.yaml new file mode 100644 index 000000000..dc1e08c52 --- /dev/null +++ b/chart/elastalert2/templates/serviceaccount.yaml @@ -0,0 +1,15 @@ +{{- if .Values.serviceAccount.create -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "elastalert.serviceAccountName" . }} + labels: + app: {{ template "elastalert.name" . }} + chart: "{{ .Chart.Name }}-{{ .Chart.Version }}" + release: {{ .Release.Name }} + heritage: {{ .Release.Service }} + {{- with .Values.serviceAccount.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +{{- end -}} diff --git a/chart/elastalert2/values.yaml b/chart/elastalert2/values.yaml new file mode 100644 index 000000000..c3a24acbb --- /dev/null +++ b/chart/elastalert2/values.yaml @@ -0,0 +1,228 @@ +# number of replicas to run +replicaCount: 1 + +# number of helm release revisions to retain +revisionHistoryLimit: 5 + +# Default internal between alert checks against the elasticsearch datasource, in minutes +runIntervalMins: 1 + +# Default rule buffer duration, in minutes +bufferTimeMins: 15 + +# Amount of time to retry and deliver failed alerts (1440 minutes per day) +alertRetryLimitMins: 2880 + +# Default time before realerting, in minutes +realertIntervalMins: "" + +# For ES 5: The name of the index which stores elastalert 2 statuses, typically elastalert_status +# For ES 6: The prefix of the names of indices which store elastalert 2 statuses, typically elastalert +# +writebackIndex: elastalert + +image: + # docker image + repository: jertel/elastalert2 + # docker image tag + tag: 2.0.2 + pullPolicy: IfNotPresent + +resources: {} + +# Annotations to be added to pods +podAnnotations: {} + +elasticsearch: + # elasticsearch endpoint e.g. (svc.namespace||svc) + host: elasticsearch + # elasticsearch port + port: 9200 + # whether or not to connect to es_host using TLS + useSsl: "False" + # Username if authenticating to ES with basic auth + username: "" + # Password if authenticating to ES with basic auth + password: "" + # Specifies an existing secret to be used for the ES username/password + credentialsSecret: "" + # The key in elasticsearch.credentialsSecret that stores the ES password + credentialsSecretUsernameKey: "" + # The key in elasticsearch.credentialsSecret that stores the ES username + credentialsSecretPasswordKey: "" + # whether or not to verify TLS certificates + verifyCerts: "True" + # Enable certificate based authentication + # path to a PEM certificate to use as the client certificate + # clientCert: "/certs/client.pem" + # path to a private key file to use as the client key + # clientKey: "/certs/client-key.pem" + # path to a CA cert bundle to use to verify SSL connections + # caCerts: "/certs/ca.pem" + # # certs volumes, required to mount ssl certificates when elasticsearch has tls enabled + # certsVolumes: + # - name: es-certs + # secret: + # defaultMode: 420 + # secretName: es-certs + # # mount certs volumes, required to mount ssl certificates when elasticsearch has tls enabled + # certsVolumeMounts: + # - name: es-certs + # mountPath: /certs + # readOnly: true + +# Optional env variables for the pod +optEnv: [] + +extraConfigOptions: {} + # # Options to propagate to all rules, e.g. a common slack_webhook_url or kibana_url + # # Please note at the time of implementing this value, it will not work for required_locals + # # Which MUST be set at the rule level, these are: ['alert', 'type', 'name', 'index'] + # generate_kibana_link: true + # kibana_url: https://kibana.yourdomain.com + # slack_webhook_url: dummy + +# To load Elastalert 2 config via secret, uncomment the line below +# secretConfigName: elastalert-config-secret + +# Example of a secret config + +#apiVersion: v1 +#kind: Secret +#metadata: +# name: elastalert-config-secret +#type: Opaque +#stringData: +# elastalert_config: |- +# rules_folder: /opt/rules +# scan_subdirectories: false +# run_every: +# minutes: 1 +# buffer_time: +# minutes: 15 +# es_host: elasticsearch +# es_port: 9200 +# writeback_index: elastalert +# use_ssl: False +# verify_certs: True +# alert_time_limit: +# minutes: 2880 +# slack_webhook_url: https://hooks.slack.com/services/xxxx +# slack_channel_override: '#alerts' + + +# To load Elastalert's rules via secret, uncomment the line below +#secretRulesName: elastalert-rules-secret + +# Additionally, you must specificy which rules to load from the secret +#secretRulesList: [ "rule_1", "rule_2" ] + +# Example of secret rules + +#apiVersion: v1 +#kind: Secret +#metadata: +# name: elastalert-rules-secret +# namespace: elastic-system +#type: Opaque +#stringData: +# rule_1: |- +# name: Rule 1 +# type: frequency +# index: index1-* +# num_events: 3 +# timeframe: +# minutes: 1 +# alert: +# - "slack" +# rule_2: |- +# name: Rule 2 +# type: frequency +# index: index2-* +# num_events: 5 +# timeframe: +# minutes: 10 +# alert: +# - "slack" + +# Command and args override for container e.g. (https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/) +# command: ["YOUR_CUSTOM_COMMAND"] +# args: ["YOUR", "CUSTOM", "ARGS"] + +# rule configurations e.g. (http://elastalert2.readthedocs.io/en/latest/) +rules: {} + # deadman_slack: |- + # --- + # name: Deadman Switch Slack + # type: frequency + # index: containers-* + # num_events: 3 + # timeframe: + # minutes: 3 + # filter: + # - term: + # message: "deadmanslack" + # alert: + # - "slack" + # slack: + # slack_webhook_url: dummy + # deadman_pagerduty: |- + # --- + # name: Deadman Switch PagerDuty + # type: frequency + # index: containers-* + # num_events: 3 + # timeframe: + # minutes: 3 + # filter: + # - term: + # message: "deadmanpd" + # alert: + # - "pagerduty" + # pagerduty: + # pagerduty_service_key: dummy + # pagerduty_client_name: Elastalert Deadman Switch + +serviceAccount: + # Specifies whether a service account should be created + create: true + # Annotations to add to the service account + annotations: {} + # The name of the service account to use. + # If not set and create is true, a name is generated using the fullname template + name: + +# Enable pod security policy +# https://kubernetes.io/docs/concepts/policy/pod-security-policy/ +podSecurityPolicy: + create: false + +# Support using node selectors and tolerations +# nodeSelector: +# "node-role.kubernetes.io/infra_worker": "true" +nodeSelector: {} + +# Specify node affinity or anti-affinity specifications +affinity: {} + +# tolerations: +# - key: "node_role" +# operator: "Equal" +# value: "infra_worker" +# effect: "NoSchedule" +tolerations: [] + +extraVolumes: [] + # - name: smtp-auth + # secret: + # secretName: elastalert-smtp-auth + # items: + # - key: smtp_auth.yaml + # path: smtp_auth.yaml + # mode: 0400 + +extraVolumeMounts: [] + # - name: smtp-auth + # mountPath: /opt/config-smtp/smtp_auth.yaml + # subPath: smtp_auth.yaml + # readOnly: true diff --git a/setup.py b/setup.py index 5abdd4360..058ca5313 100644 --- a/setup.py +++ b/setup.py @@ -7,15 +7,13 @@ base_dir = os.path.dirname(__file__) setup( - name='elastalert', - version='0.2.4', + name='elastalert2', + version='2.0.2', description='Runs custom filters on Elasticsearch and alerts on matches', - author='Quentin Long', - author_email='qlo@yelp.com', setup_requires='setuptools', - license='Copyright 2014 Yelp', + license='Apache 2.0', classifiers=[ - 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.9', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', ], From 358a11c7ff7f6e6aa2a04a1298c13645b8527d75 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 14:30:20 -0400 Subject: [PATCH 0131/1065] Fix typographical errors. --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 70ea5231a..b899d3a55 100644 --- a/README.md +++ b/README.md @@ -38,13 +38,13 @@ docker build . -t elastalert ## Kubernetes -See the Helm chart [README.md](chart/elastalert/README.md) for information on installing this application into an existing Kubernetes cluster. +See the Helm chart [README.md](chart/elastalert2/README.md) for information on installing this application into an existing Kubernetes cluster. ## Releases As Elastalert 2 is a community-maintained project, releases will typically contain unrelated contributions without a common theme. It's up to the maintainers to determine when the project is ready for a release, however, if you are looking to use a newly merged feature that hasn't yet been released, feel free to open a [discussion][6] and let us know. -Maintainers, when creating a new release, following the procedure below: +Maintainers, when creating a new release, follow the procedure below: 1. Determine an appropriate new version number in the format _a.b.c_, using the following guidelines: - The major version (a) should not change. From 57ba69e45c7c4773d287538ca554001722b702cd Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 14:51:08 -0400 Subject: [PATCH 0132/1065] Add new issue template to help prevent unnecessary issues from being created. --- .github/ISSUE_TEMPLATE | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE diff --git a/.github/ISSUE_TEMPLATE b/.github/ISSUE_TEMPLATE new file mode 100644 index 000000000..0c580a744 --- /dev/null +++ b/.github/ISSUE_TEMPLATE @@ -0,0 +1,3 @@ +PLEASE READ: The majority of topics are better suited for the [Discussion forum](https://github.com/jertel/elastalert2/discussions). Please search the discussions area first, for keywords that could be associated with the problem you are experiencing. If you do not see an existing discussion, please open a new discussion and include sufficient details for someone in the community to help you. + +If you are confident you have discovered a legitimate issue, attach logs and reproduction steps to this issue. Failure to provide sufficient information will likely cause this issue to go stale and eventually be deleted. \ No newline at end of file From 0fc105e1fc91126098026398acdcb54cb5694fc2 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 14:52:02 -0400 Subject: [PATCH 0133/1065] Add new issue template to help prevent unnecessary issues from being created. --- .github/ISSUE_TEMPLATE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE b/.github/ISSUE_TEMPLATE index 0c580a744..81f621e05 100644 --- a/.github/ISSUE_TEMPLATE +++ b/.github/ISSUE_TEMPLATE @@ -1,3 +1,3 @@ -PLEASE READ: The majority of topics are better suited for the [Discussion forum](https://github.com/jertel/elastalert2/discussions). Please search the discussions area first, for keywords that could be associated with the problem you are experiencing. If you do not see an existing discussion, please open a new discussion and include sufficient details for someone in the community to help you. +PLEASE READ: The majority of topics are better suited for the Discussion forum. You can access this area by clicking The Discussions link above. Please search the discussions area first, for keywords that could be associated with the problem you are experiencing. If you do not see an existing discussion, please open a new discussion and include sufficient details for someone in the community to help you. If you are confident you have discovered a legitimate issue, attach logs and reproduction steps to this issue. Failure to provide sufficient information will likely cause this issue to go stale and eventually be deleted. \ No newline at end of file From 592d226550d222e3793de3a006db7afa7297fcc5 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 14:54:23 -0400 Subject: [PATCH 0134/1065] Correct helm repo instructions --- chart/elastalert2/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chart/elastalert2/README.md b/chart/elastalert2/README.md index 72173672b..a6bbc1755 100644 --- a/chart/elastalert2/README.md +++ b/chart/elastalert2/README.md @@ -8,7 +8,7 @@ An Elastalert 2 helm chart is available in the jertel Helm repository, and can b Add the jertel repository to your Helm configuration: ```console -helm repo add codesim https://helm.jertel.com +helm repo add jertel https://helm.jertel.com ``` Next, install the chart with a release name, such as _elastalert2_: From 9f7f4706abb49e6b90a7dda9a59072b802fca62e Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 14:56:05 -0400 Subject: [PATCH 0135/1065] Correct version in Helm README --- chart/elastalert2/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chart/elastalert2/README.md b/chart/elastalert2/README.md index a6bbc1755..a3dd5c912 100644 --- a/chart/elastalert2/README.md +++ b/chart/elastalert2/README.md @@ -47,7 +47,7 @@ The command removes all the Kubernetes components associated with the chart and | Parameter | Description | Default | |----------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------|---------------------------------| | `image.repository` | docker image | jertel/elastalert-docker | -| `image.tag` | docker image tag | 2,0.2 | +| `image.tag` | docker image tag | 2.0.2 | | `image.pullPolicy` | image pull policy | IfNotPresent | | `podAnnotations` | Annotations to be added to pods | {} | | `command` | command override for container | `NULL` | From a4383afc4160714970dc2b6b3479fe8d34efa31d Mon Sep 17 00:00:00 2001 From: Pavlo Bondar <123rpv@gmail.com> Date: Wed, 23 Dec 2020 16:58:09 +0200 Subject: [PATCH 0136/1065] Bearer token authorization. --- docs/source/ruletypes.rst | 7 +++++++ docs/source/running_elastalert.rst | 2 ++ elastalert/__init__.py | 1 + elastalert/config.py | 1 + elastalert/create_index.py | 4 ++++ elastalert/schema.yaml | 1 + elastalert/util.py | 13 +++++++++++-- requirements.txt | 2 +- setup.py | 1 + 9 files changed, 29 insertions(+), 3 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index ff3763712..4493a3c4b 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -38,6 +38,8 @@ Rule Configuration Cheat Sheet +--------------------------------------------------------------+ | | ``es_password`` (string, no default) | | +--------------------------------------------------------------+ | +| ``es_bearer`` (string, no default) | | ++--------------------------------------------------------------+ | | ``es_url_prefix`` (string, no default) | | +--------------------------------------------------------------+ | | ``es_send_get_body_as`` (string, default "GET") | | @@ -284,6 +286,11 @@ es_password ``es_password``: basic-auth password for connecting to ``es_host``. (Optional, string, no default) The environment variable ``ES_PASSWORD`` will override this field. +es_bearer +^^^^^^^^^^^ + +``es_bearer``: bearer-token authorization for connecting to ``es_host``. (Optional, string, no default) The environment variable ``ES_BEARER`` will override this field. + es_url_prefix ^^^^^^^^^^^^^ diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index 7fdf1eeba..b85cba60c 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -64,6 +64,8 @@ Next, open up config.yaml.example. In it, you will find several configuration op ``es_password``: Optional; basic-auth password for connecting to ``es_host``. +``es_bearer``: Optional; bearer token authorization for connecting to ``es_host``. If bearer token is specified, login and password are ignored. + ``es_url_prefix``: Optional; URL prefix for the Elasticsearch endpoint. ``es_send_get_body_as``: Optional; Method for querying Elasticsearch - ``GET``, ``POST`` or ``source``. The default is ``GET`` diff --git a/elastalert/__init__.py b/elastalert/__init__.py index 55bfdb32f..6e5420e82 100644 --- a/elastalert/__init__.py +++ b/elastalert/__init__.py @@ -24,6 +24,7 @@ def __init__(self, conf): ca_certs=conf['ca_certs'], connection_class=RequestsHttpConnection, http_auth=conf['http_auth'], + headers=conf['headers'], timeout=conf['es_conn_timeout'], send_get_body_as=conf['send_get_body_as'], client_cert=conf['client_cert'], diff --git a/elastalert/config.py b/elastalert/config.py index 5ae9a26e6..42dc64f6e 100644 --- a/elastalert/config.py +++ b/elastalert/config.py @@ -16,6 +16,7 @@ # Settings that can be derived from ENV variables env_settings = {'ES_USE_SSL': 'use_ssl', + 'ES_BEARER': 'es_bearer', 'ES_PASSWORD': 'es_password', 'ES_USERNAME': 'es_username', 'ES_HOST': 'es_host', diff --git a/elastalert/create_index.py b/elastalert/create_index.py index a0858da70..d18a8d913 100644 --- a/elastalert/create_index.py +++ b/elastalert/create_index.py @@ -152,6 +152,7 @@ def main(): parser.add_argument('--port', default=os.environ.get('ES_PORT', None), type=int, help='Elasticsearch port') parser.add_argument('--username', default=os.environ.get('ES_USERNAME', None), help='Elasticsearch username') parser.add_argument('--password', default=os.environ.get('ES_PASSWORD', None), help='Elasticsearch password') + parser.add_argument('--bearer', default=os.environ.get('ES_BEARER', None), help='Elasticsearch bearer token') parser.add_argument('--url-prefix', help='Elasticsearch URL prefix') parser.add_argument('--no-auth', action='store_const', const=True, help='Suppress prompt for basic auth') parser.add_argument('--ssl', action='store_true', default=env('ES_USE_SSL', None), help='Use TLS') @@ -197,6 +198,7 @@ def main(): port = args.port if args.port else data.get('es_port') username = args.username if args.username else data.get('es_username') password = args.password if args.password else data.get('es_password') + bearer = args.bearer if args.bearer else data.get('es_bearer') url_prefix = args.url_prefix if args.url_prefix is not None else data.get('es_url_prefix', '') use_ssl = args.ssl if args.ssl is not None else data.get('use_ssl') verify_certs = args.verify_certs if args.verify_certs is not None else data.get('verify_certs') is not False @@ -211,6 +213,7 @@ def main(): else: username = args.username if args.username else None password = args.password if args.password else None + bearer = args.bearer if args.bearer else None aws_region = args.aws_region host = args.host if args.host else input('Enter Elasticsearch host: ') port = args.port if args.port else int(input('Enter Elasticsearch port: ')) @@ -255,6 +258,7 @@ def main(): verify_certs=verify_certs, connection_class=RequestsHttpConnection, http_auth=http_auth, + headers=bearer, url_prefix=url_prefix, send_get_body_as=send_get_body_as, client_cert=client_cert, diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 1241315dc..7629e2a16 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -178,6 +178,7 @@ properties: verify_certs: {type: boolean} es_username: {type: string} es_password: {type: string} + es_bearer: {type: string} use_strftime_index: {type: boolean} # Optional Settings diff --git a/elastalert/util.py b/elastalert/util.py index bbb0600ff..bc64d9a8e 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -319,10 +319,12 @@ def elasticsearch_client(conf): es_conn_conf = build_es_conn_config(conf) auth = Auth() es_conn_conf['http_auth'] = auth(host=es_conn_conf['es_host'], - username=es_conn_conf['es_username'], - password=es_conn_conf['es_password'], + username=None if es_conn_conf['es_bearer'] else es_conn_conf['es_username'], + password=None if es_conn_conf['es_bearer'] else es_conn_conf['es_password'], aws_region=es_conn_conf['aws_region'], profile_name=es_conn_conf['profile']) + if es_conn_conf['es_bearer']: + es_conn_conf['headers'] = {"Authorization": "Bearer " + es_conn_conf['es_bearer']} return ElasticSearchClient(es_conn_conf) @@ -341,8 +343,10 @@ def build_es_conn_config(conf): parsed_conf['http_auth'] = None parsed_conf['es_username'] = None parsed_conf['es_password'] = None + parsed_conf['es_bearer'] = None parsed_conf['aws_region'] = None parsed_conf['profile'] = None + parsed_conf['headers'] = None parsed_conf['es_host'] = os.environ.get('ES_HOST', conf['es_host']) parsed_conf['es_port'] = int(os.environ.get('ES_PORT', conf['es_port'])) parsed_conf['es_url_prefix'] = '' @@ -356,6 +360,11 @@ def build_es_conn_config(conf): parsed_conf['es_username'] = conf['es_username'] parsed_conf['es_password'] = conf['es_password'] + if os.environ.get('ES_BEARER'): + parsed_conf['es_bearer'] = os.environ.get('ES_BEARER') + elif 'es_bearer' in conf: + parsed_conf['es_bearer'] = conf['es_bearer'] + if 'aws_region' in conf: parsed_conf['aws_region'] = conf['aws_region'] diff --git a/requirements.txt b/requirements.txt index 9c32052d0..0deab0ee0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,7 +8,7 @@ croniter>=0.3.16 elasticsearch>=7.0.0 envparse>=0.2.0 exotel>=0.1.3 -jira>=1.0.10,<1.0.15 +jira>=2.0.0 jsonschema>=3.0.2 mock>=2.0.0 prison>=0.1.2 diff --git a/setup.py b/setup.py index 2845836a7..324b30a7c 100644 --- a/setup.py +++ b/setup.py @@ -44,6 +44,7 @@ 'python-dateutil>=2.6.0,<2.7.0', 'PyYAML>=3.12', 'requests>=2.10.0', + 'tzlocal<3.0', 'stomp.py>=4.1.17', 'texttable>=0.8.8', 'twilio>=6.0.0,<6.1', From 6e1eba21564cd12392e59dfbfa76e044b22dee31 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 20:08:29 -0400 Subject: [PATCH 0137/1065] Create python-publish.yml --- .github/workflows/python-publish.yml | 31 ++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 .github/workflows/python-publish.yml diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml new file mode 100644 index 000000000..8c173da23 --- /dev/null +++ b/.github/workflows/python-publish.yml @@ -0,0 +1,31 @@ +# This workflow will upload a Python Package using Twine when a release is created +# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries + +name: Upload Python Package + +on: + release: + types: [created] + +jobs: + deploy: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.x' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install setuptools wheel twine + - name: Build and publish + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} + run: | + python setup.py sdist bdist_wheel + twine upload dist/* From 8a554265669e803ab2e9f75ad7efb0b1f8df4973 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 20:17:07 -0400 Subject: [PATCH 0138/1065] Update release procedure and scripts --- .github/workflows/{release.yml => upload_chart.yml} | 4 ++-- README.md | 13 ++++++++++--- 2 files changed, 12 insertions(+), 5 deletions(-) rename .github/workflows/{release.yml => upload_chart.yml} (92%) diff --git a/.github/workflows/release.yml b/.github/workflows/upload_chart.yml similarity index 92% rename from .github/workflows/release.yml rename to .github/workflows/upload_chart.yml index 5f51356b8..7cba508b6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/upload_chart.yml @@ -1,8 +1,8 @@ -name: upload-chart +name: upload_chart on: release: - types: [published] + types: [created] jobs: build: diff --git a/README.md b/README.md index b899d3a55..b55fcd883 100644 --- a/README.md +++ b/README.md @@ -62,8 +62,12 @@ Maintainers, when creating a new release, follow the procedure below: `- PR/commit message #000 - @committer` - Check the box to 'Create a discussion for this release'. - Save the draft. -5. Wait a minimum of a few hours for community feedback in case someone notices a problem with the the upcoming release. -6. Publish the release. +5. Verify that artifacts have been published: + - Python PIP package was [published][9] successfully. + - Helm chart has been [published][10] successfully. + - Docker Hub image was [published][8] successfully. +6. Wait a minimum of a few hours for community feedback in case someone notices a problem with the the upcoming release. +7. Publish the release. ## License @@ -76,4 +80,7 @@ Elastalert is licensed under the [Apache License, Version 2.0][5]. [4]: https://elastalert2.readthedocs.io/en/latest/ruletypes.html#alerts [5]: http://www.apache.org/licenses/LICENSE-2 [6]: https://github.com/jertel/elastalert2/discussions -[7]: https://github.com/jertel/elastalert2/releases/new \ No newline at end of file +[7]: https://github.com/jertel/elastalert2/releases/new +[8]: https://hub.docker.com/r/jertel/elastalert2/builds +[9]: https://github.com/jertel/elastalert2/actions/workflows/python-publish.yml +[10]: https://github.com/jertel/elastalert2/actions/workflows/upload_chart.yml \ No newline at end of file From aa6bbf5516e1586636247629f08336c3ef86a165 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 20:18:00 -0400 Subject: [PATCH 0139/1065] Update release procedure and scripts --- .github/workflows/python-publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index 8c173da23..0b003bac8 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -1,7 +1,7 @@ # This workflow will upload a Python Package using Twine when a release is created # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries -name: Upload Python Package +name: upload_python_package on: release: From 3778040d3f1f1c75aaeeb680f9d1bf87d3f2d96b Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 20:26:13 -0400 Subject: [PATCH 0140/1065] Split out release procedure from README into its own file --- README.md | 29 ----------------------------- RELEASE.md | 28 ++++++++++++++++++++++++++++ 2 files changed, 28 insertions(+), 29 deletions(-) create mode 100644 RELEASE.md diff --git a/README.md b/README.md index b55fcd883..c8fa89e12 100644 --- a/README.md +++ b/README.md @@ -40,35 +40,6 @@ docker build . -t elastalert See the Helm chart [README.md](chart/elastalert2/README.md) for information on installing this application into an existing Kubernetes cluster. -## Releases - -As Elastalert 2 is a community-maintained project, releases will typically contain unrelated contributions without a common theme. It's up to the maintainers to determine when the project is ready for a release, however, if you are looking to use a newly merged feature that hasn't yet been released, feel free to open a [discussion][6] and let us know. - -Maintainers, when creating a new release, follow the procedure below: - -1. Determine an appropriate new version number in the format _a.b.c_, using the following guidelines: - - The major version (a) should not change. - - The minor version (b) should be incremented if a new feature has been added or if a bug fix will have a significant user-impact. Reset the patch version to zero if the minor version is incremented. - - The patch version (c) should be incremented when low-impact bugs are fixed, or security vulnerabilities are patched. -2. Ensure the following are updated _before_ publishing/tagging the new release: - - [setup.py](setup.py): Match the version to the new release version - - [Chart.yaml](chart/elastalert2/Chart.yaml): Match chart version and the app version to the new release version (typically keep them in sync) - - [values.yaml](chart/elastalert2/values.yaml): Match the default image version to the new release version. - - [README.md](chart/elastalert2/README.md): Match the default image version to the new release version. -3. Double-check that the Docker image successfully built the latest image. -4. Create a [new][7] release. - - The title (and tag) of the release will be the same value as the new version determined in step 1. - - The description of the release will contain a bulleted list of all merged pull requests, in the following format: - `- PR/commit message #000 - @committer` - - Check the box to 'Create a discussion for this release'. - - Save the draft. -5. Verify that artifacts have been published: - - Python PIP package was [published][9] successfully. - - Helm chart has been [published][10] successfully. - - Docker Hub image was [published][8] successfully. -6. Wait a minimum of a few hours for community feedback in case someone notices a problem with the the upcoming release. -7. Publish the release. - ## License Elastalert is licensed under the [Apache License, Version 2.0][5]. diff --git a/RELEASE.md b/RELEASE.md new file mode 100644 index 000000000..c43ee69b7 --- /dev/null +++ b/RELEASE.md @@ -0,0 +1,28 @@ +# Releases + +As Elastalert 2 is a community-maintained project, releases will typically contain unrelated contributions without a common theme. It's up to the maintainers to determine when the project is ready for a release, however, if you are looking to use a newly merged feature that hasn't yet been released, feel free to open a [discussion][6] and let us know. + +Maintainers, when creating a new release, follow the procedure below: + +1. Determine an appropriate new version number in the format _a.b.c_, using the following guidelines: + - The major version (a) should not change. + - The minor version (b) should be incremented if a new feature has been added or if a bug fix will have a significant user-impact. Reset the patch version to zero if the minor version is incremented. + - The patch version (c) should be incremented when low-impact bugs are fixed, or security vulnerabilities are patched. +2. Ensure the following are updated _before_ publishing/tagging the new release: + - [setup.py](setup.py): Match the version to the new release version + - [Chart.yaml](chart/elastalert2/Chart.yaml): Match chart version and the app version to the new release version (typically keep them in sync) + - [values.yaml](chart/elastalert2/values.yaml): Match the default image version to the new release version. + - [README.md](chart/elastalert2/README.md): Match the default image version to the new release version. +3. Double-check that the Docker image successfully built the latest image. +4. Create a [new][7] release. + - The title (and tag) of the release will be the same value as the new version determined in step 1. + - The description of the release will contain a bulleted list of all merged pull requests, in the following format: + `- PR/commit message #000 - @committer` + - Check the box to 'Create a discussion for this release'. + - Save the draft. +5. Verify that artifacts have been published: + - Python PIP package was [published][9] successfully. + - Helm chart has been [published][10] successfully. + - Docker Hub image was [published][8] successfully. +6. Wait a minimum of a few hours for community feedback in case someone notices a problem with the the upcoming release. +7. Publish the release. \ No newline at end of file From df805fc32a7db45d14fc86dacfce2193d23963ec Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 20:27:10 -0400 Subject: [PATCH 0141/1065] Split out release procedure from README into its own file --- README.md | 3 --- RELEASE.md | 6 +++++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index c8fa89e12..f06ccb263 100644 --- a/README.md +++ b/README.md @@ -52,6 +52,3 @@ Elastalert is licensed under the [Apache License, Version 2.0][5]. [5]: http://www.apache.org/licenses/LICENSE-2 [6]: https://github.com/jertel/elastalert2/discussions [7]: https://github.com/jertel/elastalert2/releases/new -[8]: https://hub.docker.com/r/jertel/elastalert2/builds -[9]: https://github.com/jertel/elastalert2/actions/workflows/python-publish.yml -[10]: https://github.com/jertel/elastalert2/actions/workflows/upload_chart.yml \ No newline at end of file diff --git a/RELEASE.md b/RELEASE.md index c43ee69b7..0323a852b 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -25,4 +25,8 @@ Maintainers, when creating a new release, follow the procedure below: - Helm chart has been [published][10] successfully. - Docker Hub image was [published][8] successfully. 6. Wait a minimum of a few hours for community feedback in case someone notices a problem with the the upcoming release. -7. Publish the release. \ No newline at end of file +7. Publish the release. + +[8]: https://hub.docker.com/r/jertel/elastalert2/builds +[9]: https://github.com/jertel/elastalert2/actions/workflows/python-publish.yml +[10]: https://github.com/jertel/elastalert2/actions/workflows/upload_chart.yml \ No newline at end of file From 4180fd43ad57f119513c49c2e906336480a7e052 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 20:29:14 -0400 Subject: [PATCH 0142/1065] Split out release procedure from README into its own file --- README.md | 3 +-- RELEASE.md | 22 +++++++++++++--------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index f06ccb263..463b238bb 100644 --- a/README.md +++ b/README.md @@ -50,5 +50,4 @@ Elastalert is licensed under the [Apache License, Version 2.0][5]. [3]: https://elastalert2.readthedocs.io/ [4]: https://elastalert2.readthedocs.io/en/latest/ruletypes.html#alerts [5]: http://www.apache.org/licenses/LICENSE-2 -[6]: https://github.com/jertel/elastalert2/discussions -[7]: https://github.com/jertel/elastalert2/releases/new + diff --git a/RELEASE.md b/RELEASE.md index 0323a852b..91cdffe9d 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,6 +1,6 @@ # Releases -As Elastalert 2 is a community-maintained project, releases will typically contain unrelated contributions without a common theme. It's up to the maintainers to determine when the project is ready for a release, however, if you are looking to use a newly merged feature that hasn't yet been released, feel free to open a [discussion][6] and let us know. +As Elastalert 2 is a community-maintained project, releases will typically contain unrelated contributions without a common theme. It's up to the maintainers to determine when the project is ready for a release, however, if you are looking to use a newly merged feature that hasn't yet been released, feel free to open a [discussion][5] and let us know. Maintainers, when creating a new release, follow the procedure below: @@ -14,19 +14,23 @@ Maintainers, when creating a new release, follow the procedure below: - [values.yaml](chart/elastalert2/values.yaml): Match the default image version to the new release version. - [README.md](chart/elastalert2/README.md): Match the default image version to the new release version. 3. Double-check that the Docker image successfully built the latest image. -4. Create a [new][7] release. +4. Create a [new][1] release. - The title (and tag) of the release will be the same value as the new version determined in step 1. - The description of the release will contain a bulleted list of all merged pull requests, in the following format: - `- PR/commit message #000 - @committer` + `- change description #PR - @committer` + Ex: + `- Added new Foobar alerts #12345 - @jertel` - Check the box to 'Create a discussion for this release'. - Save the draft. 5. Verify that artifacts have been published: - - Python PIP package was [published][9] successfully. - - Helm chart has been [published][10] successfully. - - Docker Hub image was [published][8] successfully. + - Python PIP package was [published][2] successfully. + - Helm chart has been [published][3] successfully. + - Docker Hub image was [published][4] successfully. 6. Wait a minimum of a few hours for community feedback in case someone notices a problem with the the upcoming release. 7. Publish the release. -[8]: https://hub.docker.com/r/jertel/elastalert2/builds -[9]: https://github.com/jertel/elastalert2/actions/workflows/python-publish.yml -[10]: https://github.com/jertel/elastalert2/actions/workflows/upload_chart.yml \ No newline at end of file +[1]: https://github.com/jertel/elastalert2/releases/new +[2]: https://hub.docker.com/r/jertel/elastalert2/builds +[3]: https://github.com/jertel/elastalert2/actions/workflows/python-publish.yml +[4]: https://github.com/jertel/elastalert2/actions/workflows/upload_chart.yml +[5]: https://github.com/jertel/elastalert2/discussions \ No newline at end of file From d47867d9f687f30399548c974c82715685babf15 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 20:30:13 -0400 Subject: [PATCH 0143/1065] Split out release procedure from README into its own file --- RELEASE.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/RELEASE.md b/RELEASE.md index 91cdffe9d..b54189daf 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -23,9 +23,9 @@ Maintainers, when creating a new release, follow the procedure below: - Check the box to 'Create a discussion for this release'. - Save the draft. 5. Verify that artifacts have been published: - - Python PIP package was [published][2] successfully. - - Helm chart has been [published][3] successfully. - - Docker Hub image was [published][4] successfully. + - Python PIP package was [published][3] successfully. + - Helm chart has been [published][4] successfully. + - Docker Hub image was [published][2] successfully. 6. Wait a minimum of a few hours for community feedback in case someone notices a problem with the the upcoming release. 7. Publish the release. From 14a4c93a5800be4998205a3c4e10cdfed33b3f92 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 20:34:00 -0400 Subject: [PATCH 0144/1065] Prepare release 2.0.3 to verify publish actions --- RELEASE.md | 2 +- chart/elastalert2/Chart.yaml | 4 ++-- chart/elastalert2/README.md | 2 +- chart/elastalert2/values.yaml | 2 +- setup.py | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/RELEASE.md b/RELEASE.md index b54189daf..fe9756616 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -25,7 +25,7 @@ Maintainers, when creating a new release, follow the procedure below: 5. Verify that artifacts have been published: - Python PIP package was [published][3] successfully. - Helm chart has been [published][4] successfully. - - Docker Hub image was [published][2] successfully. + - Docker Hub image was [tagged][2] successfully. 6. Wait a minimum of a few hours for community feedback in case someone notices a problem with the the upcoming release. 7. Publish the release. diff --git a/chart/elastalert2/Chart.yaml b/chart/elastalert2/Chart.yaml index 813b4808c..4d469c10c 100644 --- a/chart/elastalert2/Chart.yaml +++ b/chart/elastalert2/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: Elastalert 2 is a simple framework for alerting on anomalies, spikes, or other patterns of interest from data in Elasticsearch. name: elastalert2 -version: 2.0.2 -appVersion: 2.0.2 +version: 2.0.3 +appVersion: 2.0.3 home: https://github.com/jertel/elastalert2 sources: - https://github.com/jertel/elastalert2 diff --git a/chart/elastalert2/README.md b/chart/elastalert2/README.md index a3dd5c912..86132ff55 100644 --- a/chart/elastalert2/README.md +++ b/chart/elastalert2/README.md @@ -47,7 +47,7 @@ The command removes all the Kubernetes components associated with the chart and | Parameter | Description | Default | |----------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------|---------------------------------| | `image.repository` | docker image | jertel/elastalert-docker | -| `image.tag` | docker image tag | 2.0.2 | +| `image.tag` | docker image tag | 2.0.3 | | `image.pullPolicy` | image pull policy | IfNotPresent | | `podAnnotations` | Annotations to be added to pods | {} | | `command` | command override for container | `NULL` | diff --git a/chart/elastalert2/values.yaml b/chart/elastalert2/values.yaml index c3a24acbb..d54f54bd5 100644 --- a/chart/elastalert2/values.yaml +++ b/chart/elastalert2/values.yaml @@ -25,7 +25,7 @@ image: # docker image repository: jertel/elastalert2 # docker image tag - tag: 2.0.2 + tag: 2.0.3 pullPolicy: IfNotPresent resources: {} diff --git a/setup.py b/setup.py index 058ca5313..eab968325 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ base_dir = os.path.dirname(__file__) setup( name='elastalert2', - version='2.0.2', + version='2.0.3', description='Runs custom filters on Elasticsearch and alerts on matches', setup_requires='setuptools', license='Apache 2.0', From 92f84f7a8340bedf8d3cb94e27381ac015300aae Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 24 Apr 2021 22:25:25 -0400 Subject: [PATCH 0145/1065] Use consistent description for Helm Chart and Python PIP library --- chart/elastalert2/Chart.yaml | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/chart/elastalert2/Chart.yaml b/chart/elastalert2/Chart.yaml index 4d469c10c..23f24baf7 100644 --- a/chart/elastalert2/Chart.yaml +++ b/chart/elastalert2/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v1 -description: Elastalert 2 is a simple framework for alerting on anomalies, spikes, or other patterns of interest from data in Elasticsearch. +description: Automated rule-based alerting for Elasticsearch name: elastalert2 version: 2.0.3 appVersion: 2.0.3 diff --git a/setup.py b/setup.py index eab968325..6c3f620ad 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ setup( name='elastalert2', version='2.0.3', - description='Runs custom filters on Elasticsearch and alerts on matches', + description='Automated rule-based alerting for Elasticsearch', setup_requires='setuptools', license='Apache 2.0', classifiers=[ From 59a1e56f7fec37f56672c453a52f75aff628ce0c Mon Sep 17 00:00:00 2001 From: StribPav <123rpv@gmail.com> Date: Mon, 26 Apr 2021 01:03:51 +0300 Subject: [PATCH 0146/1065] Delete extra line --- setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.py b/setup.py index 2967f54b6..6c3f620ad 100644 --- a/setup.py +++ b/setup.py @@ -45,7 +45,6 @@ 'python-dateutil>=2.6.0,<2.7.0', 'PyYAML>=5.1', 'requests>=2.10.0', - 'tzlocal<3.0', 'stomp.py>=4.1.17', 'texttable>=0.8.8', 'twilio>=6.0.0,<6.1', From 74c04bca9e4cb5d9947b5bde82d0597bd992944b Mon Sep 17 00:00:00 2001 From: Cameron Duff Date: Mon, 26 Apr 2021 08:08:26 +1000 Subject: [PATCH 0147/1065] Remove 'twilio_from_number' from rquired options of Twilio Alerter --- elastalert/alerts.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index db3988442..ea904f2d3 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1468,14 +1468,14 @@ def get_info(self): class TwilioAlerter(Alerter): - required_options = frozenset(['twilio_account_sid', 'twilio_auth_token', 'twilio_to_number', 'twilio_from_number']) + required_options = frozenset(['twilio_account_sid', 'twilio_auth_token', 'twilio_to_number']) def __init__(self, rule): super(TwilioAlerter, self).__init__(rule) self.twilio_account_sid = self.rule['twilio_account_sid'] self.twilio_auth_token = self.rule['twilio_auth_token'] self.twilio_to_number = self.rule['twilio_to_number'] - self.twilio_from_number = self.rule['twilio_from_number'] + self.twilio_from_number = self.rule.get('twilio_from_number') def alert(self, matches): client = TwilioClient(self.twilio_account_sid, self.twilio_auth_token) From 5b327ba0234726d645ed8d2d159a4e3ea12243ab Mon Sep 17 00:00:00 2001 From: Cameron Duff Date: Mon, 26 Apr 2021 08:09:56 +1000 Subject: [PATCH 0148/1065] Retrieve new options for twilio alerter --- elastalert/alerts.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index ea904f2d3..7f9da1eb2 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1476,6 +1476,8 @@ def __init__(self, rule): self.twilio_auth_token = self.rule['twilio_auth_token'] self.twilio_to_number = self.rule['twilio_to_number'] self.twilio_from_number = self.rule.get('twilio_from_number') + self.twilio_message_service_sid = self.rule.get('twilio_message_service_sid') + self.twilio_use_copilot = self.rule.get('twilio_use_copilot', False) def alert(self, matches): client = TwilioClient(self.twilio_account_sid, self.twilio_auth_token) From 9bdfd0e6976ad6d3fc2a3c8e821994a409b68b29 Mon Sep 17 00:00:00 2001 From: Cameron Duff Date: Mon, 26 Apr 2021 08:10:52 +1000 Subject: [PATCH 0149/1065] Implement Twilio Copilot Alerting Maintain compatibility with Twilio SMS API, validate new settings conditionally using the 'twilio_use_copilot' flag. Throws EAException in case of an incorrect combination of settings. --- elastalert/alerts.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 7f9da1eb2..76aa60ab3 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1483,10 +1483,20 @@ def alert(self, matches): client = TwilioClient(self.twilio_account_sid, self.twilio_auth_token) try: - client.messages.create(body=self.rule['name'], - to=self.twilio_to_number, - from_=self.twilio_from_number) + if self.twilio_use_copilot: + if self.twilio_message_service_sid == None: + raise EAException("Twilio Copilot requires the 'twilio_message_service_sid' option") + client.messages.create(body=self.rule['name'], + to=self.twilio_to_number, + messaging_service_sid=self.twilio_message_service_sid) + else: + if self.twilio_from_number == None: + raise EAException("Twilio SMS requires the 'twilio_from_number' option") + + client.messages.create(body=self.rule['name'], + to=self.twilio_to_number, + from_=self.twilio_from_number) except TwilioRestException as e: raise EAException("Error posting to twilio: %s" % e) From 93776b3962051b828342c9f524e4136de220da2d Mon Sep 17 00:00:00 2001 From: Cameron Duff Date: Mon, 26 Apr 2021 08:12:08 +1000 Subject: [PATCH 0150/1065] fix comparions to None for Travis CI and remote trailing whitespae --- elastalert/alerts.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 76aa60ab3..7d09272bc 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1484,19 +1484,19 @@ def alert(self, matches): try: if self.twilio_use_copilot: - if self.twilio_message_service_sid == None: + if self.twilio_message_service_sid is None: raise EAException("Twilio Copilot requires the 'twilio_message_service_sid' option") client.messages.create(body=self.rule['name'], to=self.twilio_to_number, messaging_service_sid=self.twilio_message_service_sid) else: - if self.twilio_from_number == None: + if self.twilio_from_number is None: raise EAException("Twilio SMS requires the 'twilio_from_number' option") client.messages.create(body=self.rule['name'], to=self.twilio_to_number, - from_=self.twilio_from_number) + from_=self.twilio_from_number) except TwilioRestException as e: raise EAException("Error posting to twilio: %s" % e) From 60cff59b739182b910a2243a32cafbaa3d53b561 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 26 Apr 2021 07:05:38 +0100 Subject: [PATCH 0151/1065] Add a changelog to keep track of version differences I thought this might be useful to keep track of changes over time. I am happy to maintain it, but ideally we could encourage PR authors to add their changes as they go. This might also be useful to get an idea of the appropriate version change, depending on the significance of the included changes. --- CHANGELOG.md | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 CHANGELOG.md diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 000000000..660ce7312 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,11 @@ +# 2.0.4 (unreleased) + +## Breaking changes + +## New features +- [Add support for statsd metrics][0] +- [Add support for multiple imports of rules via recursive import][0] + +## Other changes + +[0]: https://github.com/jertel/elastalert2/pull/83 From cf12800a57c783cf1d02cc4e5abb2387d45c390d Mon Sep 17 00:00:00 2001 From: Cameron Duff Date: Mon, 26 Apr 2021 16:51:31 +1000 Subject: [PATCH 0152/1065] document updated options to twilio alerter --- docs/source/ruletypes.rst | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index cb4d0196d..0ce990482 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2009,9 +2009,12 @@ The alerter has one optional argument: Twilio ~~~~~~ -Twilio alerter will trigger an incident to a mobile phone as sms from your twilio phone number. Alert name will arrive as sms once this option is chosen. +Twilio alerter will trigger an incident to a mobile phone as an sms from your twilio phone number. The sms will contain the alert name. You may use either twilio SMS or twilio copilot +to send the message, controlled by the ``twilio_use_copilot`` option. -The alerter requires the following option: +Note that when twilio copilot *is* used the ``twilio_message_service_sid`` option is required. Likewise, when *not* using twilio copilot, the ``twilio_from_number`` option is required. + +The alerter requires the following options: ``twilio_account_sid``: This is sid of your twilio account. @@ -2019,7 +2022,13 @@ The alerter requires the following option: ``twilio_to_number``: The phone number where you would like send the notification. -``twilio_from_number``: Your twilio phone number from which message will be sent. +Either one of + * ``twilio_from_number``: Your twilio phone number from which message will be sent. + * ``twilio_message_service_sid``: The SID of your twilio message service. + +Optional: + +``twilio_use_copilot``: Whether or not to use twilio copilot, False by default. Splunk On-Call (Formerly VictorOps) From 82518e8be832fcf423054b1369086dec8237a341 Mon Sep 17 00:00:00 2001 From: Cameron Duff Date: Mon, 26 Apr 2021 18:24:55 +1000 Subject: [PATCH 0153/1065] add new twilio settings to schema --- elastalert/schema.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index ce23645ae..e474cbc62 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -327,6 +327,8 @@ properties: twilio_auth_token: {type: string} twilio_to_number: {type: string} twilio_from_number: {type: string} + twilio_message_service_sid: {type: string} + twilio_use_copilot: {type: boolean} ### VictorOps victorops_api_key: {type: string} From 607d90e4bcae4e020d213e211e1ea8aa3ff48e50 Mon Sep 17 00:00:00 2001 From: Cameron Duff Date: Mon, 26 Apr 2021 19:05:28 +1000 Subject: [PATCH 0154/1065] add example usage of twilio alerter configuration --- docs/source/ruletypes.rst | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 0ce990482..0a407f4b8 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2030,6 +2030,22 @@ Optional: ``twilio_use_copilot``: Whether or not to use twilio copilot, False by default. +Example usage:: + + alert: + - twilio # With Copilot + twilio_use_copilot: True + twilio_to_number: "0123456789" + twilio_auth_token: "abcdefghijklmnopqrstuvwxyz012345" + twilio_account_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" + twilio_message_service_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" + + - twilio # With Legacy SMS + twilio_use_copilot: False + twilio_to_number: "0123456789" + twilio_from_number: "9876543210" + twilio_auth_token: "abcdefghijklmnopqrstuvwxyz012345" + twilio_account_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" Splunk On-Call (Formerly VictorOps) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From fb7d11631267e98b66a1f33c03df9fe93e276947 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Mon, 26 Apr 2021 08:43:18 -0400 Subject: [PATCH 0155/1065] Switch to use github actions instead of DockerHub for building images, to avoid needing to git clone the repo --- .github/workflows/publish_image.yml | 44 +++++++++++++++++++++++++++++ Dockerfile | 11 ++++---- 2 files changed, 50 insertions(+), 5 deletions(-) create mode 100644 .github/workflows/publish_image.yml diff --git a/.github/workflows/publish_image.yml b/.github/workflows/publish_image.yml new file mode 100644 index 000000000..ffcd8c494 --- /dev/null +++ b/.github/workflows/publish_image.yml @@ -0,0 +1,44 @@ +name: publish_image + +on: + push: + # Publish `master` as Docker `latest` image. + branches: + - master + + tags: + - 2.* + +env: + IMAGE_NAME: elastalert2 + +jobs: + push: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Publish image + run: docker build . --file Dockerfile --tag $IMAGE_NAME + + - name: Log into registry + run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login docker.pkg.github.com -u ${{ github.actor }} --password-stdin + + - name: Push image + run: | + IMAGE_ID=docker.pkg.github.com/${{ github.repository }}/$IMAGE_NAME + + # Change all uppercase to lowercase + IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]') + + # Strip git ref prefix from version + VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,') + + # Use Docker `latest` tag convention + [ "$VERSION" == "master" ] && VERSION=latest + + echo IMAGE_ID=$IMAGE_ID + echo VERSION=$VERSION + + docker tag $IMAGE_NAME $IMAGE_ID:$VERSION + docker push $IMAGE_ID:$VERSION \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 9b6924ce3..3ace09dc6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,14 +1,15 @@ FROM python:alpine as builder -LABEL description="Elastalert 2 suitable for Kubernetes and Helm" +LABEL description="Elastalert 2 Official Image" LABEL maintainer="Jason Ertel (jertel at codesim.com)" RUN apk --update upgrade && \ - apk add git && \ - rm -rf /var/cache/apk/* + rm -rf /var/cache/apk/* && \ + mkdir -p /tmp/elastalert + +COPY . /tmp/elastalert RUN mkdir -p /opt/elastalert && \ - git clone https://github.com/jertel/elastalert2 /tmp/elastalert && \ cd /tmp/elastalert && \ pip install setuptools wheel && \ python setup.py sdist bdist_wheel @@ -18,7 +19,7 @@ FROM python:alpine COPY --from=builder /tmp/elastalert/dist/*.tar.gz /tmp/ RUN apk --update upgrade && \ - apk add gcc libffi-dev musl-dev python3-dev openssl-dev tzdata libmagic cargo && \ + apk add gcc libffi-dev musl-dev python3-dev openssl-dev tzdata libmagic cargo jq curl && \ pip install /tmp/*.tar.gz && \ apk del gcc libffi-dev musl-dev python3-dev openssl-dev cargo && \ rm -rf /var/cache/apk/* From 281fcbb7f57071acef94035d356b85038fb19a44 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Mon, 26 Apr 2021 08:44:08 -0400 Subject: [PATCH 0156/1065] Switch to use github actions instead of DockerHub for building images, to avoid needing to git clone the repo --- .github/workflows/publish_image.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish_image.yml b/.github/workflows/publish_image.yml index ffcd8c494..aa7a57fee 100644 --- a/.github/workflows/publish_image.yml +++ b/.github/workflows/publish_image.yml @@ -18,7 +18,7 @@ jobs: steps: - uses: actions/checkout@v2 - - name: Publish image + - name: Build image run: docker build . --file Dockerfile --tag $IMAGE_NAME - name: Log into registry From 92af6397b477d564ce10147ac92de4954d437455 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Mon, 26 Apr 2021 09:01:28 -0400 Subject: [PATCH 0157/1065] Push to both GitHub registry and Docker Hub registry --- .github/workflows/publish_image.yml | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish_image.yml b/.github/workflows/publish_image.yml index aa7a57fee..086ca5420 100644 --- a/.github/workflows/publish_image.yml +++ b/.github/workflows/publish_image.yml @@ -11,6 +11,7 @@ on: env: IMAGE_NAME: elastalert2 + DOCKER_REPO: jertel/elastalert2 jobs: push: @@ -21,9 +22,12 @@ jobs: - name: Build image run: docker build . --file Dockerfile --tag $IMAGE_NAME - - name: Log into registry + - name: Log into GitHub Registry run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login docker.pkg.github.com -u ${{ github.actor }} --password-stdin + - name: Log into Docker Registry + run: echo "${{ secrets.DOCKER_TOKEN }}" | docker login -u ${{ secrets.DOCKER_USERNAME }} --password-stdin + - name: Push image run: | IMAGE_ID=docker.pkg.github.com/${{ github.repository }}/$IMAGE_NAME @@ -40,5 +44,10 @@ jobs: echo IMAGE_ID=$IMAGE_ID echo VERSION=$VERSION + # Push to GitHub Package docker tag $IMAGE_NAME $IMAGE_ID:$VERSION - docker push $IMAGE_ID:$VERSION \ No newline at end of file + docker push $IMAGE_ID:$VERSION + + # Push to Docker Hub + docker tag $IMAGE_NAME $DOCKER_REPO:$VERSION + docker push $DOCKER_REPO:$VERSION \ No newline at end of file From b2ad5d1306d225961097537c78983067afff2c54 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Mon, 26 Apr 2021 09:42:57 -0400 Subject: [PATCH 0158/1065] Add note to README about the latest tag for container images --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 463b238bb..a108bed08 100644 --- a/README.md +++ b/README.md @@ -25,6 +25,8 @@ The current status of the CI workflow: If you're interested in a pre-built Docker image check out the [elastalert2][2] project on Docker Hub. +Be aware that the `latest` tag of the image represents the latest commit into the master branch. If you prefer to upgrade more slowly you will need utilize a versioned tag, such as `2.0.3` instead. + A properly configured elastalert_config.json file must be mounted into the container during startup of the container. Use the [example file][1] provided as a template, and once saved locally to a file such as `/tmp/elastalert.yaml`, run the container as follows: ```bash @@ -42,7 +44,7 @@ See the Helm chart [README.md](chart/elastalert2/README.md) for information on i ## License -Elastalert is licensed under the [Apache License, Version 2.0][5]. +Elastalert 2 is licensed under the [Apache License, Version 2.0][5]. [0]: https://github.com/yelp/elastalert [1]: https://github.com/jertel/elastalert2/blob/master/config.yaml.example From 75da39f60443c30333e0f06747c7d914b2b6571d Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Mon, 26 Apr 2021 09:55:11 -0400 Subject: [PATCH 0159/1065] Add missing config.yaml for Helm chart by providing more precise .gitignore rule --- .gitignore | 2 +- chart/elastalert2/templates/config.yaml | 49 +++++++++++++++++++++++++ 2 files changed, 50 insertions(+), 1 deletion(-) create mode 100644 chart/elastalert2/templates/config.yaml diff --git a/.gitignore b/.gitignore index 269474d73..badcb7a3c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,4 @@ -config.yaml +/config.yaml .tox/ .coverage .idea/* diff --git a/chart/elastalert2/templates/config.yaml b/chart/elastalert2/templates/config.yaml new file mode 100644 index 000000000..d4f3d27ab --- /dev/null +++ b/chart/elastalert2/templates/config.yaml @@ -0,0 +1,49 @@ +{{- if not .Values.secretConfigName }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ template "elastalert.fullname" . }}-config + labels: + app: {{ template "elastalert.name" . }} + chart: {{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }} + release: {{ .Release.Name }} + heritage: {{ .Release.Service }} +data: + elastalert_config: |- + --- + rules_folder: /opt/rules + scan_subdirectories: false + run_every: + minutes: {{ .Values.runIntervalMins }} +{{- if .Values.realertIntervalMins }} + realert: + minutes: {{ .Values.realertIntervalMins }} +{{- end }} + buffer_time: + minutes: {{ .Values.bufferTimeMins }} + es_host: {{ .Values.elasticsearch.host }} + es_port: {{ .Values.elasticsearch.port }} +{{- if .Values.elasticsearch.username }} + es_username: {{ .Values.elasticsearch.username }} +{{- end }} +{{- if .Values.elasticsearch.password }} + es_password: {{ .Values.elasticsearch.password }} +{{- end }} + writeback_index: {{ .Values.writebackIndex }} + use_ssl: {{ .Values.elasticsearch.useSsl }} + verify_certs: {{ .Values.elasticsearch.verifyCerts }} +{{- if .Values.elasticsearch.clientCert }} + client_cert: {{ .Values.elasticsearch.clientCert }} +{{- end }} +{{- if .Values.elasticsearch.clientKey }} + client_key: {{ .Values.elasticsearch.clientKey }} +{{- end }} +{{- if .Values.elasticsearch.caCerts }} + ca_certs: {{ .Values.elasticsearch.caCerts }} +{{- end }} + alert_time_limit: + minutes: {{ .Values.alertRetryLimitMins }} +{{- if .Values.extraConfigOptions }} +{{ toYaml .Values.extraConfigOptions | indent 4 }} +{{- end }} +{{- end }} From 91eb952094b864d7480300027b551b862cf7956f Mon Sep 17 00:00:00 2001 From: Rob Rankin Date: Mon, 26 Apr 2021 14:35:01 +0000 Subject: [PATCH 0160/1065] Pin Python elasticsearch to 7.0.0 --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index d495170d2..48490346e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ boto3>=1.4.4 cffi>=1.11.5 configparser>=3.5.0 croniter>=0.3.16 -elasticsearch>=7.0.0,<8.0.0 +elasticsearch==7.0.0 envparse>=0.2.0 exotel>=0.1.3 Jinja2==2.11.3 diff --git a/setup.py b/setup.py index 2436ed79f..c88e770ae 100644 --- a/setup.py +++ b/setup.py @@ -31,7 +31,7 @@ 'boto3>=1.4.4', 'configparser>=3.5.0', 'croniter>=0.3.16', - 'elasticsearch>=7.0.0,<8.0.0', + 'elasticsearch==7.0.0', 'envparse>=0.2.0', 'exotel>=0.1.3', 'jira>=2.0.0', From 04a330788924890f47befe4aec2789455e1a427e Mon Sep 17 00:00:00 2001 From: Cameron Duff Date: Tue, 27 Apr 2021 09:07:00 +1000 Subject: [PATCH 0161/1065] update twilio module --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index d495170d2..e2ebc5e81 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,5 +22,5 @@ requests>=2.10.0 stomp.py>=4.1.17 texttable>=0.8.8 statsd-tags==3.2.1.post1 -twilio>=6.0.0,<6.1 +twilio>=6.0.0,<6.58 tzlocal<3.0 diff --git a/setup.py b/setup.py index 2436ed79f..eb56bee90 100644 --- a/setup.py +++ b/setup.py @@ -47,7 +47,7 @@ 'requests>=2.10.0', 'stomp.py>=4.1.17', 'texttable>=0.8.8', - 'twilio>=6.0.0,<6.1', + 'twilio>=6.0.0,<6.58', 'cffi>=1.11.5', 'statsd-tags==3.2.1.post1', 'tzlocal<3.0' From 61b7479cad04d1150c5695e7067dcc4be9da7deb Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Tue, 27 Apr 2021 22:23:06 +0900 Subject: [PATCH 0162/1065] Update pylint requirement from <2.8 to <2.9 --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 5ceb8762a..66bcc3861 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,7 +4,7 @@ flake8 m2r2 pluggy>=0.12.0 pre-commit -pylint<2.8 +pylint<2.9 pytest<3.7.0 setuptools sphinx_rtd_theme From a1103fcc0bdc99165789ec48874909449d7ef3ab Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Tue, 27 Apr 2021 23:25:14 +0900 Subject: [PATCH 0163/1065] python-dateutil <2.7.0,>=2.6.0 to >=2.6.0,<2.9.0 --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 48490346e..17bf18273 100644 --- a/requirements.txt +++ b/requirements.txt @@ -16,7 +16,7 @@ prison>=0.1.2 prometheus_client>=0.10.1 py-zabbix>=1.1.3 PyStaticConfiguration>=0.10.3 -python-dateutil>=2.6.0,<2.7.0 +python-dateutil>=2.6.0,<2.9.0 PyYAML>=5.1 requests>=2.10.0 stomp.py>=4.1.17 diff --git a/setup.py b/setup.py index c88e770ae..8fa352cf9 100644 --- a/setup.py +++ b/setup.py @@ -42,7 +42,7 @@ 'prometheus_client>=0.10.1', 'py-zabbix>=1.1.3', 'PyStaticConfiguration>=0.10.3', - 'python-dateutil>=2.6.0,<2.7.0', + 'python-dateutil>=2.6.0,<2.9.0', 'PyYAML>=5.1', 'requests>=2.10.0', 'stomp.py>=4.1.17', From 5d3c7ba3729f9034aed319b090a631e1a03dd81e Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Tue, 27 Apr 2021 12:18:42 -0400 Subject: [PATCH 0164/1065] Removing change to ES lib due to potential conflicts --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 44fbd055b..4870f4042 100644 --- a/setup.py +++ b/setup.py @@ -31,7 +31,7 @@ 'boto3>=1.4.4', 'configparser>=3.5.0', 'croniter>=0.3.16', - 'elasticsearch>=7.5.0', + 'elasticsearch==7.0.0', 'envparse>=0.2.0', 'exotel>=0.1.3', 'jira>=2.0.0', From b07d31dce0de20896dac4bcdb1a858c6f9f9777c Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Tue, 27 Apr 2021 12:28:57 -0400 Subject: [PATCH 0165/1065] Add new ssl_show_warn option to documentation and example files --- config.yaml.example | 3 +++ docs/source/elastalert.rst | 2 ++ docs/source/ruletypes.rst | 2 ++ docs/source/running_elastalert.rst | 2 ++ 4 files changed, 9 insertions(+) diff --git a/config.yaml.example b/config.yaml.example index 973a3e9d5..36fd1b12d 100644 --- a/config.yaml.example +++ b/config.yaml.example @@ -42,6 +42,9 @@ es_port: 9200 # Verify TLS certificates #verify_certs: True +# Show TLS or certificate related warnings +#ssl_show_warn: True + # GET request with body is the default option for Elasticsearch. # If it fails for some reason, you can pass 'GET', 'POST' or 'source'. # See http://elasticsearch-py.readthedocs.io/en/master/connection.html?highlight=send_get_body_as#transport diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 6b5bfe31f..163084f5c 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -132,6 +132,8 @@ The environment variable ``ES_USE_SSL`` will override this field. ``verify_certs``: Optional; whether or not to verify TLS certificates; set to ``True`` or ``False``. The default is ``True``. +``ssl_show_warn``: Optional; suppress TLS and certificate related warnings; set to ``True`` or ``False``. The default is ``True``. + ``client_cert``: Optional; path to a PEM certificate to use as the client certificate. ``client_key``: Optional; path to a private key file to use as the client key. diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 296e42ab4..19d0d5474 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -34,6 +34,8 @@ Rule Configuration Cheat Sheet +--------------------------------------------------------------+ | | ``verify_certs`` (boolean, default True) | | +--------------------------------------------------------------+ | +| ``ssl_show_warn`` (boolean, default True) | | ++--------------------------------------------------------------+ | | ``es_username`` (string, no default) | | +--------------------------------------------------------------+ | | ``es_password`` (string, no default) | | diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index 683f23543..1ea32b15e 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -57,6 +57,8 @@ Next, open up config.yaml.example. In it, you will find several configuration op ``verify_certs``: Optional; whether or not to verify TLS certificates; set to ``True`` or ``False``. The default is ``True`` +``ssl_show_warn``: Optional; suppress TLS and certificate related warnings; set to ``True`` or ``False``. The default is ``True``. + ``client_cert``: Optional; path to a PEM certificate to use as the client certificate ``client_key``: Optional; path to a private key file to use as the client key From 36414559e213169e2fea59aee67b037ce4145a6d Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Tue, 27 Apr 2021 12:44:19 -0400 Subject: [PATCH 0166/1065] Update changelog with changes for 2.0.4 --- CHANGELOG.md | 34 ++++++++++++++++++++++++++++++---- 1 file changed, 30 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 660ce7312..8d7a59d7b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,11 +1,37 @@ -# 2.0.4 (unreleased) +# Upcoming release ## Breaking changes +- None ## New features -- [Add support for statsd metrics][0] -- [Add support for multiple imports of rules via recursive import][0] +- TBD - [#000](https://github.com/jertel/elastalert2/pull/000) - @some_elastic_contributor_tbd ## Other changes +- None -[0]: https://github.com/jertel/elastalert2/pull/83 +# 2.0.4 + +## Breaking changes +- None + +## New features +- Update python-dateutil requirement from <2.7.0,>=2.6.0 to >=2.6.0,<2.9.0 - [#96](https://github.com/jertel/elastalert2/pull/96) - @nsano-rururu +- Update pylint requirement from <2.8 to <2.9 - [#95](https://github.com/jertel/elastalert2/pull/95) - @nsano-rururu +- Pin ES library to 7.0.0 due to upcoming newer library conflicts - [#90](https://github.com/jertel/elastalert2/pull/90) - @robrankin +- Re-introduce CHANGELOG.md to project - [#88](https://github.com/jertel/elastalert2/pull/88) - @ferozsalam +- Add option for suppressing TLS warnings - [#87](https://github.com/jertel/elastalert2/pull/87) - @alvarolmedo +- Add support for Twilio Copilot - [#86](https://github.com/jertel/elastalert2/pull/86) - @cdmastercom +- Support bearer token authentication with ES - [#85](https://github.com/jertel/elastalert2/pull/85) - @StribPav +- Add support for statsd metrics - [#83](https://github.com/jertel/elastalert2/pull/83) - @eladamitpxi +- Add support for multiple imports of rules via recursive import - [#83](https://github.com/jertel/elastalert2/pull/83) - @eladamitpxi +- Specify search size of 0 to improve efficiency of searches - [#82](https://github.com/jertel/elastalert2/pull/82) - @clyfish +- Add alert handler to create Datadog events - [#81](https://github.com/jertel/elastalert2/pull/81) - @3vanlock + +## Other changes + +- Added missing Helm chart config.yaml template file. +- Update .gitignore with more precise rule for /config.yaml file. +- Now publishing container images to both DockerHub and to GitHub Packages for redundancy. +- Container images are now built and published via GitHub actions instead of relying on DockerHub's automated builds. +- Update PIP library description and Helm chart description to be consistent. +- Continue updates to change references from _Elastalert_ to _Elastalert 2_ \ No newline at end of file From 60e5c6782aa8d3397047a1f3db922796da65a292 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Tue, 27 Apr 2021 12:57:22 -0400 Subject: [PATCH 0167/1065] Update contribution guidelines to include more specific instructions; Rename RELEASES.md to CONTRIBUTING.md for consistency with the general community --- README.md | 10 +++------- RELEASE.md | 36 ------------------------------------ 2 files changed, 3 insertions(+), 43 deletions(-) delete mode 100644 RELEASE.md diff --git a/README.md b/README.md index a108bed08..2d8555c70 100644 --- a/README.md +++ b/README.md @@ -5,21 +5,17 @@ but become mostly stale when the Yelp team ceased using Elastalert. Elastalert 2 is backwards compatible with the original Elastalert rules. +![CI Workflow](https://github.com/jertel/elastalert/workflows/master_build_test/badge.svg) + ## Documentation Documentation, including an FAQ, for Elastalert 2 can be found on [readthedocs.com][3]. This is the place to start if you're not familiar with Elastalert at all. The full list of platforms that Elastalert can fire alerts into can be found [in the documentation][4]. - ## Contributing -PRs are welcome, but must include tests, when possible. PRs will not be merged if they do not pass -the automated CI workflows. - -The current status of the CI workflow: - -![CI Workflow](https://github.com/jertel/elastalert/workflows/master_build_test/badge.svg) +Please see our [contributing guidelines](CONTRIBUTING.md). ## Docker diff --git a/RELEASE.md b/RELEASE.md deleted file mode 100644 index fe9756616..000000000 --- a/RELEASE.md +++ /dev/null @@ -1,36 +0,0 @@ -# Releases - -As Elastalert 2 is a community-maintained project, releases will typically contain unrelated contributions without a common theme. It's up to the maintainers to determine when the project is ready for a release, however, if you are looking to use a newly merged feature that hasn't yet been released, feel free to open a [discussion][5] and let us know. - -Maintainers, when creating a new release, follow the procedure below: - -1. Determine an appropriate new version number in the format _a.b.c_, using the following guidelines: - - The major version (a) should not change. - - The minor version (b) should be incremented if a new feature has been added or if a bug fix will have a significant user-impact. Reset the patch version to zero if the minor version is incremented. - - The patch version (c) should be incremented when low-impact bugs are fixed, or security vulnerabilities are patched. -2. Ensure the following are updated _before_ publishing/tagging the new release: - - [setup.py](setup.py): Match the version to the new release version - - [Chart.yaml](chart/elastalert2/Chart.yaml): Match chart version and the app version to the new release version (typically keep them in sync) - - [values.yaml](chart/elastalert2/values.yaml): Match the default image version to the new release version. - - [README.md](chart/elastalert2/README.md): Match the default image version to the new release version. -3. Double-check that the Docker image successfully built the latest image. -4. Create a [new][1] release. - - The title (and tag) of the release will be the same value as the new version determined in step 1. - - The description of the release will contain a bulleted list of all merged pull requests, in the following format: - `- change description #PR - @committer` - Ex: - `- Added new Foobar alerts #12345 - @jertel` - - Check the box to 'Create a discussion for this release'. - - Save the draft. -5. Verify that artifacts have been published: - - Python PIP package was [published][3] successfully. - - Helm chart has been [published][4] successfully. - - Docker Hub image was [tagged][2] successfully. -6. Wait a minimum of a few hours for community feedback in case someone notices a problem with the the upcoming release. -7. Publish the release. - -[1]: https://github.com/jertel/elastalert2/releases/new -[2]: https://hub.docker.com/r/jertel/elastalert2/builds -[3]: https://github.com/jertel/elastalert2/actions/workflows/python-publish.yml -[4]: https://github.com/jertel/elastalert2/actions/workflows/upload_chart.yml -[5]: https://github.com/jertel/elastalert2/discussions \ No newline at end of file From 1f0025c3498d58f8f8d47c36c752e9521b4e61c5 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Tue, 27 Apr 2021 13:02:27 -0400 Subject: [PATCH 0168/1065] Update repo with new release version --- CONTRIBUTING.md | 46 +++++++++++++++++++++++++++++++++++ README.md | 2 +- chart/elastalert2/Chart.yaml | 4 +-- chart/elastalert2/README.md | 2 +- chart/elastalert2/values.yaml | 2 +- setup.py | 2 +- 6 files changed, 52 insertions(+), 6 deletions(-) create mode 100644 CONTRIBUTING.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..219945f6f --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,46 @@ +# Contributing to Elastalert 2 + +## Introduction + +PRs are welcome, but must include tests, when possible. PRs will not be merged if they do not pass +the automated CI workflows. + +Before submitting the PR review that you have included the following changes, where applicable: +- Documentation: If you're adding new functionality, any new configuration options should be documented appropriately in the docs/ folder. +- Helm Chart: If your new feature introduces settings consider adding those to the Helm chart [README.md](chart/elastalert2/README.md) and [values.yaml](chart/elastalert2/values.yaml) +- Examples: If your new feature includes new configuration options, review the [Example config file](config.yaml.example) to see if they should be added there for consistency with other configuration options. +- Change log: Describe your contribution to the appropriate section(s) for the _Upcoming release_, in the [CHANGELOG.md](CHANGELOG.md) file. + +## Releases + +As Elastalert 2 is a community-maintained project, releases will typically contain unrelated contributions without a common theme. It's up to the maintainers to determine when the project is ready for a release, however, if you are looking to use a newly merged feature that hasn't yet been released, feel free to open a [discussion][5] and let us know. + +Maintainers, when creating a new release, follow the procedure below: + +1. Determine an appropriate new version number in the format _a.b.c_, using the following guidelines: + - The major version (a) should not change. + - The minor version (b) should be incremented if a new feature has been added or if a bug fix will have a significant user-impact. Reset the patch version to zero if the minor version is incremented. + - The patch version (c) should be incremented when low-impact bugs are fixed, or security vulnerabilities are patched. +2. Ensure the following are updated _before_ publishing/tagging the new release: + - [setup.py](setup.py): Match the version to the new release version + - [Chart.yaml](chart/elastalert2/Chart.yaml): Match chart version and the app version to the new release version (typically keep them in sync) + - [values.yaml](chart/elastalert2/values.yaml): Match the default image version to the new release version. + - [README.md](chart/elastalert2/README.md): Match the default image version to the new release version. + - [CHANGELOG.md](CHANGELOG.md): This must contain all PRs and any other relevent notes about this release. +3. Create a [new][1] release. + - The title (and tag) of the release will be the same value as the new version determined in step 1. + - Paste the new version change notes from CHANGELOG.md into the description field. + - Check the box to 'Create a discussion for this release'. + - Save the draft. +4. Verify that artifacts have been published: + - Python PIP package was [published][3] successfully. + - Helm chart has been [published][4] successfully. + - Container image was [built and published][2] successfully. +5. Wait a minimum of a few hours for community feedback in case someone notices a problem with the the upcoming release. +6. Publish the release. + +[1]: https://github.com/jertel/elastalert2/releases/new +[2]: https://github.com/jertel/elastalert2/actions/workflows/publish_image.yml +[3]: https://github.com/jertel/elastalert2/actions/workflows/python-publish.yml +[4]: https://github.com/jertel/elastalert2/actions/workflows/upload_chart.yml +[5]: https://github.com/jertel/elastalert2/discussions \ No newline at end of file diff --git a/README.md b/README.md index 2d8555c70..b1a778293 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ Please see our [contributing guidelines](CONTRIBUTING.md). If you're interested in a pre-built Docker image check out the [elastalert2][2] project on Docker Hub. -Be aware that the `latest` tag of the image represents the latest commit into the master branch. If you prefer to upgrade more slowly you will need utilize a versioned tag, such as `2.0.3` instead. +Be aware that the `latest` tag of the image represents the latest commit into the master branch. If you prefer to upgrade more slowly you will need utilize a versioned tag, such as `2.0.4` instead. A properly configured elastalert_config.json file must be mounted into the container during startup of the container. Use the [example file][1] provided as a template, and once saved locally to a file such as `/tmp/elastalert.yaml`, run the container as follows: diff --git a/chart/elastalert2/Chart.yaml b/chart/elastalert2/Chart.yaml index 23f24baf7..2e1a05f8e 100644 --- a/chart/elastalert2/Chart.yaml +++ b/chart/elastalert2/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: Automated rule-based alerting for Elasticsearch name: elastalert2 -version: 2.0.3 -appVersion: 2.0.3 +version: 2.0.4 +appVersion: 2.0.4 home: https://github.com/jertel/elastalert2 sources: - https://github.com/jertel/elastalert2 diff --git a/chart/elastalert2/README.md b/chart/elastalert2/README.md index 86132ff55..3591e516c 100644 --- a/chart/elastalert2/README.md +++ b/chart/elastalert2/README.md @@ -47,7 +47,7 @@ The command removes all the Kubernetes components associated with the chart and | Parameter | Description | Default | |----------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------|---------------------------------| | `image.repository` | docker image | jertel/elastalert-docker | -| `image.tag` | docker image tag | 2.0.3 | +| `image.tag` | docker image tag | 2.0.4 | | `image.pullPolicy` | image pull policy | IfNotPresent | | `podAnnotations` | Annotations to be added to pods | {} | | `command` | command override for container | `NULL` | diff --git a/chart/elastalert2/values.yaml b/chart/elastalert2/values.yaml index d54f54bd5..e2d839877 100644 --- a/chart/elastalert2/values.yaml +++ b/chart/elastalert2/values.yaml @@ -25,7 +25,7 @@ image: # docker image repository: jertel/elastalert2 # docker image tag - tag: 2.0.3 + tag: 2.0.4 pullPolicy: IfNotPresent resources: {} diff --git a/setup.py b/setup.py index 4870f4042..c1dcc7bcb 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ base_dir = os.path.dirname(__file__) setup( name='elastalert2', - version='2.0.3', + version='2.0.4', description='Automated rule-based alerting for Elasticsearch', setup_requires='setuptools', license='Apache 2.0', From 6b1f9b5fb7106849e570bb1c526117dd2b1f8ff8 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Tue, 27 Apr 2021 13:02:51 -0400 Subject: [PATCH 0169/1065] Update repo with new release version --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 219945f6f..46643b88a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -35,7 +35,7 @@ Maintainers, when creating a new release, follow the procedure below: 4. Verify that artifacts have been published: - Python PIP package was [published][3] successfully. - Helm chart has been [published][4] successfully. - - Container image was [built and published][2] successfully. + - Container image was [published][2] successfully. 5. Wait a minimum of a few hours for community feedback in case someone notices a problem with the the upcoming release. 6. Publish the release. From 0dbfbb87599cb537f3cfb12cab64c298e600f1b1 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Tue, 27 Apr 2021 13:17:21 -0400 Subject: [PATCH 0170/1065] Clarify contribution guidelines; improve action triggers for releases --- .github/workflows/python-publish.yml | 5 +++-- .github/workflows/upload_chart.yml | 5 +++-- CONTRIBUTING.md | 5 +---- 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index 0b003bac8..f36aec395 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -4,8 +4,9 @@ name: upload_python_package on: - release: - types: [created] + push: + tags: + - 2.* jobs: deploy: diff --git a/.github/workflows/upload_chart.yml b/.github/workflows/upload_chart.yml index 7cba508b6..ba29b719f 100644 --- a/.github/workflows/upload_chart.yml +++ b/.github/workflows/upload_chart.yml @@ -1,8 +1,9 @@ name: upload_chart on: - release: - types: [created] + push: + tags: + - 2.* jobs: build: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 46643b88a..52c1dddae 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -27,17 +27,14 @@ Maintainers, when creating a new release, follow the procedure below: - [values.yaml](chart/elastalert2/values.yaml): Match the default image version to the new release version. - [README.md](chart/elastalert2/README.md): Match the default image version to the new release version. - [CHANGELOG.md](CHANGELOG.md): This must contain all PRs and any other relevent notes about this release. -3. Create a [new][1] release. +3. Publish a [new][1] release. - The title (and tag) of the release will be the same value as the new version determined in step 1. - Paste the new version change notes from CHANGELOG.md into the description field. - Check the box to 'Create a discussion for this release'. - - Save the draft. 4. Verify that artifacts have been published: - Python PIP package was [published][3] successfully. - Helm chart has been [published][4] successfully. - Container image was [published][2] successfully. -5. Wait a minimum of a few hours for community feedback in case someone notices a problem with the the upcoming release. -6. Publish the release. [1]: https://github.com/jertel/elastalert2/releases/new [2]: https://github.com/jertel/elastalert2/actions/workflows/publish_image.yml From 06818877e2e25144f3c3bfa35362081f9d9c4130 Mon Sep 17 00:00:00 2001 From: Greg Mackinnon Date: Thu, 29 Apr 2021 15:14:05 -0400 Subject: [PATCH 0171/1065] python:slim-buster Dockerfile, and canonical file paths --- Dockerfile | 38 ++++++++++----------- Dockerfile-alpine | 37 ++++++++++++++++++++ README.md | 10 +++--- chart/elastalert2/templates/config.yaml | 2 +- chart/elastalert2/templates/deployment.yaml | 7 ++-- chart/elastalert2/values.yaml | 4 +-- 6 files changed, 68 insertions(+), 30 deletions(-) create mode 100644 Dockerfile-alpine diff --git a/Dockerfile b/Dockerfile index 3ace09dc6..1f0144e7a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,12 +1,8 @@ -FROM python:alpine as builder +FROM python:slim-buster as builder LABEL description="Elastalert 2 Official Image" LABEL maintainer="Jason Ertel (jertel at codesim.com)" -RUN apk --update upgrade && \ - rm -rf /var/cache/apk/* && \ - mkdir -p /tmp/elastalert - COPY . /tmp/elastalert RUN mkdir -p /opt/elastalert && \ @@ -14,23 +10,27 @@ RUN mkdir -p /opt/elastalert && \ pip install setuptools wheel && \ python setup.py sdist bdist_wheel -FROM python:alpine +FROM python:slim-buster COPY --from=builder /tmp/elastalert/dist/*.tar.gz /tmp/ -RUN apk --update upgrade && \ - apk add gcc libffi-dev musl-dev python3-dev openssl-dev tzdata libmagic cargo jq curl && \ - pip install /tmp/*.tar.gz && \ - apk del gcc libffi-dev musl-dev python3-dev openssl-dev cargo && \ - rm -rf /var/cache/apk/* - -RUN mkdir -p /opt/elastalert && \ - echo "#!/bin/sh" >> /opt/elastalert/run.sh && \ - echo "set -e" >> /opt/elastalert/run.sh && \ - echo "elastalert-create-index --config /opt/config/elastalert_config.yaml" >> /opt/elastalert/run.sh && \ - echo "elastalert --config /opt/config/elastalert_config.yaml \"\$@\"" >> /opt/elastalert/run.sh && \ - chmod +x /opt/elastalert/run.sh - +RUN apt-get update && apt-get -y upgrade &&\ + apt-get install -y tzdata cargo libmagic1 jq curl &&\ + rm -rf /var/lib/apt/lists/* &&\ + pip install /tmp/*.tar.gz &&\ + rm -rf /tmp/* &&\ + mkdir -p /opt/elastalert &&\ + echo "#!/bin/sh" >> /opt/elastalert/run.sh &&\ + echo "set -e" >> /opt/elastalert/run.sh &&\ + echo "elastalert-create-index --config /opt/elastalert/config.yaml" \ + >> /opt/elastalert/run.sh &&\ + echo "elastalert --config /opt/elastalert/config.yaml \"\$@\"" \ + >> /opt/elastalert/run.sh &&\ + chmod +x /opt/elastalert/run.sh &&\ + useradd -u 1000 -M -b /opt/elastalert -s /sbin/nologin \ + -c "ElastAlert User" elastalert + +USER elastalert ENV TZ "UTC" WORKDIR /opt/elastalert diff --git a/Dockerfile-alpine b/Dockerfile-alpine new file mode 100644 index 000000000..3ace09dc6 --- /dev/null +++ b/Dockerfile-alpine @@ -0,0 +1,37 @@ +FROM python:alpine as builder + +LABEL description="Elastalert 2 Official Image" +LABEL maintainer="Jason Ertel (jertel at codesim.com)" + +RUN apk --update upgrade && \ + rm -rf /var/cache/apk/* && \ + mkdir -p /tmp/elastalert + +COPY . /tmp/elastalert + +RUN mkdir -p /opt/elastalert && \ + cd /tmp/elastalert && \ + pip install setuptools wheel && \ + python setup.py sdist bdist_wheel + +FROM python:alpine + +COPY --from=builder /tmp/elastalert/dist/*.tar.gz /tmp/ + +RUN apk --update upgrade && \ + apk add gcc libffi-dev musl-dev python3-dev openssl-dev tzdata libmagic cargo jq curl && \ + pip install /tmp/*.tar.gz && \ + apk del gcc libffi-dev musl-dev python3-dev openssl-dev cargo && \ + rm -rf /var/cache/apk/* + +RUN mkdir -p /opt/elastalert && \ + echo "#!/bin/sh" >> /opt/elastalert/run.sh && \ + echo "set -e" >> /opt/elastalert/run.sh && \ + echo "elastalert-create-index --config /opt/config/elastalert_config.yaml" >> /opt/elastalert/run.sh && \ + echo "elastalert --config /opt/config/elastalert_config.yaml \"\$@\"" >> /opt/elastalert/run.sh && \ + chmod +x /opt/elastalert/run.sh + +ENV TZ "UTC" + +WORKDIR /opt/elastalert +ENTRYPOINT ["/opt/elastalert/run.sh"] diff --git a/README.md b/README.md index b1a778293..4f07595d3 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # Elastalert 2 Elastalert 2 is the supported fork of [Elastalert][0], which had been maintained by the Yelp team -but become mostly stale when the Yelp team ceased using Elastalert. +but become mostly stale when the Yelp team ceased using Elastalert. Elastalert 2 is backwards compatible with the original Elastalert rules. @@ -23,14 +23,15 @@ If you're interested in a pre-built Docker image check out the [elastalert2][2] Be aware that the `latest` tag of the image represents the latest commit into the master branch. If you prefer to upgrade more slowly you will need utilize a versioned tag, such as `2.0.4` instead. -A properly configured elastalert_config.json file must be mounted into the container during startup of the container. Use the [example file][1] provided as a template, and once saved locally to a file such as `/tmp/elastalert.yaml`, run the container as follows: +A properly configured config.yaml file must be mounted into the container during startup of the container. Use the [example file][1] provided as a template, and once saved locally to a file such as `/tmp/elastalert.yaml`, run the container as follows: ```bash -docker run -d -v /tmp/elastalert.yaml:/opt/config/elastalert_config.yaml jertel/elastalert2 +docker run -d -v /tmp/elastalert.yaml:/opt/elastalert/config.yaml jertel/elastalert2 ``` To build the image locally, install Docker and then run the following command: -``` + +```bash docker build . -t elastalert ``` @@ -48,4 +49,3 @@ Elastalert 2 is licensed under the [Apache License, Version 2.0][5]. [3]: https://elastalert2.readthedocs.io/ [4]: https://elastalert2.readthedocs.io/en/latest/ruletypes.html#alerts [5]: http://www.apache.org/licenses/LICENSE-2 - diff --git a/chart/elastalert2/templates/config.yaml b/chart/elastalert2/templates/config.yaml index d4f3d27ab..bf844aa88 100644 --- a/chart/elastalert2/templates/config.yaml +++ b/chart/elastalert2/templates/config.yaml @@ -11,7 +11,7 @@ metadata: data: elastalert_config: |- --- - rules_folder: /opt/rules + rules_folder: /opt/elastalert/rules scan_subdirectories: false run_every: minutes: {{ .Values.runIntervalMins }} diff --git a/chart/elastalert2/templates/deployment.yaml b/chart/elastalert2/templates/deployment.yaml index 0d696a122..47a3faf8d 100644 --- a/chart/elastalert2/templates/deployment.yaml +++ b/chart/elastalert2/templates/deployment.yaml @@ -42,9 +42,10 @@ spec: {{- end }} volumeMounts: - name: config - mountPath: '/opt/config' + mountPath: '/opt/elastalert/config.yaml' + subPath: config.yaml - name: rules - mountPath: '/opt/rules' + mountPath: '/opt/elastalert/rules' {{- if .Values.elasticsearch.certsVolumeMounts }} {{ toYaml .Values.elasticsearch.certsVolumeMounts | indent 10 }} {{- end }} @@ -115,7 +116,7 @@ spec: {{- end }} items: - key: elastalert_config - path: elastalert_config.yaml + path: config.yaml {{- if .Values.elasticsearch.certsVolumes }} {{ toYaml .Values.elasticsearch.certsVolumes | indent 8 }} {{- end }} diff --git a/chart/elastalert2/values.yaml b/chart/elastalert2/values.yaml index e2d839877..702e7542b 100644 --- a/chart/elastalert2/values.yaml +++ b/chart/elastalert2/values.yaml @@ -94,7 +94,7 @@ extraConfigOptions: {} #type: Opaque #stringData: # elastalert_config: |- -# rules_folder: /opt/rules +# rules_folder: /opt/elastalert/rules # scan_subdirectories: false # run_every: # minutes: 1 @@ -223,6 +223,6 @@ extraVolumes: [] extraVolumeMounts: [] # - name: smtp-auth - # mountPath: /opt/config-smtp/smtp_auth.yaml + # mountPath: /opt/elastalert/config-smtp/smtp_auth.yaml # subPath: smtp_auth.yaml # readOnly: true From 3e85eef03e15e18250ac491c5a7925aac8ea7e0a Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Thu, 29 Apr 2021 16:15:42 -0400 Subject: [PATCH 0172/1065] Prevent deepcopy of Jinja Template object --- elastalert/test_rule.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/elastalert/test_rule.py b/elastalert/test_rule.py index af1eaa497..d019eadde 100644 --- a/elastalert/test_rule.py +++ b/elastalert/test_rule.py @@ -417,7 +417,18 @@ def run_rule_test(self): with open(args.json, 'r') as data_file: self.data = json.loads(data_file.read()) else: - hits = self.test_file(copy.deepcopy(rule_yaml), args) + # Temporarily remove the jinja_template, if it exists, to avoid deepcopy issues + template = rule_yaml.get("jinja_template") + rule_yaml["jinja_template"] = None + + # Copy the rule object without the template in it + copied_rule = copy.deepcopy(rule_yaml) + + # Set the template back onto the original rule object and the newly copied object + rule_yaml["jinja_template"] = template + copied_rule["jinja_template"] = template + + hits = self.test_file(copied_rule, args) if hits and args.formatted_output: self.formatted_output['results'] = json.loads(json.dumps(hits)) if hits and args.save: From 2422bedc10b56dc78be029920ab0ad797fee4ba2 Mon Sep 17 00:00:00 2001 From: Greg Mackinnon Date: Thu, 29 Apr 2021 16:26:15 -0400 Subject: [PATCH 0173/1065] version and documentation updates. --- CHANGELOG.md | 13 +++++++++++++ chart/elastalert2/Chart.yaml | 4 ++-- chart/elastalert2/values.yaml | 2 +- setup.py | 2 +- 4 files changed, 17 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8d7a59d7b..52c2de1b1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,19 @@ ## Other changes - None +# 2.0.5 + +## Breaking changes +- Default base path changed to `/opt/elastalert` in the Dockerfile and in Helm charts. + +## New features + +## Other changes +- Dockerfile base image changed to python/slim-buster to take advantage of pre-build python wheels and accelerate build times. +- Dockerfile now creates and runs as a non-root user "elastalert". +- tmp files and dev packages removed from the final container image. +- Documentation updates in support of the modified container base path. + # 2.0.4 ## Breaking changes diff --git a/chart/elastalert2/Chart.yaml b/chart/elastalert2/Chart.yaml index 2e1a05f8e..f7f7dfbd8 100644 --- a/chart/elastalert2/Chart.yaml +++ b/chart/elastalert2/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: Automated rule-based alerting for Elasticsearch name: elastalert2 -version: 2.0.4 -appVersion: 2.0.4 +version: 2.0.5 +appVersion: 2.0.5 home: https://github.com/jertel/elastalert2 sources: - https://github.com/jertel/elastalert2 diff --git a/chart/elastalert2/values.yaml b/chart/elastalert2/values.yaml index 702e7542b..bd721dd29 100644 --- a/chart/elastalert2/values.yaml +++ b/chart/elastalert2/values.yaml @@ -25,7 +25,7 @@ image: # docker image repository: jertel/elastalert2 # docker image tag - tag: 2.0.4 + tag: 2.0.5 pullPolicy: IfNotPresent resources: {} diff --git a/setup.py b/setup.py index c1dcc7bcb..dc6c8acf1 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ base_dir = os.path.dirname(__file__) setup( name='elastalert2', - version='2.0.4', + version='2.0.5', description='Automated rule-based alerting for Elasticsearch', setup_requires='setuptools', license='Apache 2.0', From e878590f69567c04a263076c11cde07031e76e8f Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Thu, 29 Apr 2021 16:44:46 -0400 Subject: [PATCH 0174/1065] Clarified that only maintainers need to be concerned with the release process --- CONTRIBUTING.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 52c1dddae..03d7f82e7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -13,6 +13,8 @@ Before submitting the PR review that you have included the following changes, wh ## Releases +This section is only applicable to "maintainers". PR "contributors" do not need to follow the below procedure. + As Elastalert 2 is a community-maintained project, releases will typically contain unrelated contributions without a common theme. It's up to the maintainers to determine when the project is ready for a release, however, if you are looking to use a newly merged feature that hasn't yet been released, feel free to open a [discussion][5] and let us know. Maintainers, when creating a new release, follow the procedure below: From b569460036a1a2f40b3320272161b4956e06d01a Mon Sep 17 00:00:00 2001 From: Greg Mackinnon Date: Thu, 29 Apr 2021 20:15:22 -0400 Subject: [PATCH 0175/1065] updated CHANGELOG.md - Dockerfile base image marked as breaking change. --- CHANGELOG.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 52c2de1b1..cef0b144c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,14 +10,15 @@ - None # 2.0.5 - ## Breaking changes +- Dockerfile Base image changed from python/alpine to python/slim-buster to take advantage of pre-build python wheels and accelerate build times. - Default base path changed to `/opt/elastalert` in the Dockerfile and in Helm charts. ## New features +- None ## Other changes -- Dockerfile base image changed to python/slim-buster to take advantage of pre-build python wheels and accelerate build times. +- Dockerfile base image changed to python/slim-buster - Dockerfile now creates and runs as a non-root user "elastalert". - tmp files and dev packages removed from the final container image. - Documentation updates in support of the modified container base path. From 26c6e59e0b27c1919861058a62533a88e0832d6b Mon Sep 17 00:00:00 2001 From: Greg Mackinnon Date: Thu, 29 Apr 2021 20:39:42 -0400 Subject: [PATCH 0176/1065] Reverting version tags --- CHANGELOG.md | 5 ++--- chart/elastalert2/Chart.yaml | 4 ++-- chart/elastalert2/values.yaml | 2 +- setup.py | 2 +- 4 files changed, 6 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cef0b144c..b4dfd8386 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,16 +9,15 @@ ## Other changes - None -# 2.0.5 +# 2.x.x ## Breaking changes -- Dockerfile Base image changed from python/alpine to python/slim-buster to take advantage of pre-build python wheels and accelerate build times. +- Dockerfile Base image changed from `python/alpine` to `python/slim-buster` to take advantage of pre-build python wheels and accelerate build times. - Default base path changed to `/opt/elastalert` in the Dockerfile and in Helm charts. ## New features - None ## Other changes -- Dockerfile base image changed to python/slim-buster - Dockerfile now creates and runs as a non-root user "elastalert". - tmp files and dev packages removed from the final container image. - Documentation updates in support of the modified container base path. diff --git a/chart/elastalert2/Chart.yaml b/chart/elastalert2/Chart.yaml index f7f7dfbd8..2e1a05f8e 100644 --- a/chart/elastalert2/Chart.yaml +++ b/chart/elastalert2/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: Automated rule-based alerting for Elasticsearch name: elastalert2 -version: 2.0.5 -appVersion: 2.0.5 +version: 2.0.4 +appVersion: 2.0.4 home: https://github.com/jertel/elastalert2 sources: - https://github.com/jertel/elastalert2 diff --git a/chart/elastalert2/values.yaml b/chart/elastalert2/values.yaml index bd721dd29..702e7542b 100644 --- a/chart/elastalert2/values.yaml +++ b/chart/elastalert2/values.yaml @@ -25,7 +25,7 @@ image: # docker image repository: jertel/elastalert2 # docker image tag - tag: 2.0.5 + tag: 2.0.4 pullPolicy: IfNotPresent resources: {} diff --git a/setup.py b/setup.py index dc6c8acf1..c1dcc7bcb 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ base_dir = os.path.dirname(__file__) setup( name='elastalert2', - version='2.0.5', + version='2.0.4', description='Automated rule-based alerting for Elasticsearch', setup_requires='setuptools', license='Apache 2.0', From 5bbd7df775b228045c800db0a5ee36a256ee5680 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 30 Apr 2021 21:23:05 +0900 Subject: [PATCH 0177/1065] Add support for AWS SES --- docs/source/ruletypes.rst | 59 +++++++++++++++++++++ elastalert/alerts.py | 105 ++++++++++++++++++++++++++++++++++++++ elastalert/loaders.py | 3 +- 3 files changed, 166 insertions(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 19d0d5474..bd145aeab 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1735,6 +1735,65 @@ Example usage:: Environment: '$VAR' # environment variable Message: { field: message } # field in the first match +AWS SES +~~~~~~~ + +The AWS SES alerter is similar to Email alerter but uses AWS SES to send emails. The AWS SES alerter can use AWS credentials +from the rule yaml, standard AWS config files or environment variables. + +AWS SES requires one option: + +``ses_email``: An address or list of addresses to sent the alert to. + +``ses_from_addr``: This sets the From header in the email. + +Optional: + +``ses_aws_access_key``: An access key to connect to AWS SES with. + +``ses_aws_secret_key``: The secret key associated with the access key. + +``ses_aws_region``: The AWS region in which the AWS SES resource is located. Default is us-east-1 + +``ses_aws_profile``: The AWS profile to use. If none specified, the default will be used. + +``ses_email_reply_to``: This sets the Reply-To header in the email. + +``ses_cc``: This adds the CC emails to the list of recipients. By default, this is left empty. + +``ses_bcc``: This adds the BCC emails to the list of recipients but does not show up in the email message. By default, this is left empty. + +Example When not using aws_profile usage:: + + alert: + - "ses" + ses_aws_access_key_id: "XXXXXXXXXXXXXXXXXX'" + ses_aws_secret_access_key: "YYYYYYYYYYYYYYYYYYYY" + ses_aws_region: "us-east-1" + ses_from_addr: "xxxx1@xxx.com" + ses_email: "xxxx1@xxx.com" + +Example When to use aws_profile usage:: + + # Create ~/.aws/credentials + + [default] + aws_access_key_id = xxxxxxxxxxxxxxxxxxxx + aws_secret_access_key = yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy + + # Create ~/.aws/config + + [default] + region = us-east-1 + + # alert rule setting + + alert: + - "ses" + ses_aws_profile: "default" + ses_from_addr: "xxxx1@xxx.com" + ses_email: "xxxx1@xxx.com" + AWS SNS ~~~~~~~ diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 7d09272bc..baa16e90d 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2274,3 +2274,108 @@ def alert(self, matches): def get_info(self): return {'type': 'datadog'} + + +class SesAlerter(Alerter): + """ Sends an email alert using AWS SES """ + required_options = frozenset(['ses_email', 'ses_from_addr']) + + def __init__(self, *args): + super(SesAlerter, self).__init__(*args) + + self.aws_access_key_id = self.rule.get('ses_aws_access_key_id') + self.aws_secret_access_key = self.rule.get('ses_aws_secret_access_key') + self.aws_region = self.rule.get('ses_aws_region', 'us-east-1') + self.aws_profile = self.rule.get('ses_aws_profile', '') + + self.from_addr = self.rule.get('ses_from_addr') + + # Convert email to a list if it isn't already + if isinstance(self.rule['ses_email'], str): + self.rule['ses_email'] = [self.rule['ses_email']] + + # If there is a cc then also convert it a list if it isn't + cc = self.rule.get('ses_cc') + if cc and isinstance(cc, str): + self.rule['ses_cc'] = [self.rule['ses_cc']] + + # If there is a bcc then also convert it to a list if it isn't + bcc = self.rule.get('ses_bcc') + if bcc and isinstance(bcc, str): + self.rule['ses_bcc'] = [self.rule['ses_bcc']] + + # If there is a email_reply_to then also convert it to a list if it isn't + reply_to = self.rule.get('ses_email_reply_to') + if reply_to and isinstance(reply_to, str): + self.rule['ses_email_reply_to'] = [self.rule['ses_email_reply_to']] + + add_suffix = self.rule.get('ses_email_add_domain') + if add_suffix and not add_suffix.startswith('@'): + self.rule['ses_email_add_domain'] = '@' + add_suffix + + def alert(self, matches): + body = self.create_alert_body(matches) + + to_addr = self.rule['ses_email'] + if 'ses_email_from_field' in self.rule: + recipient = lookup_es_key(matches[0], self.rule['ses_email_from_field']) + if isinstance(recipient, str): + if '@' in recipient: + to_addr = [recipient] + elif 'ses_email_add_domain' in self.rule: + to_addr = [recipient + self.rule['ses_email_add_domain']] + elif isinstance(recipient, list): + to_addr = recipient + if 'ses_email_add_domain' in self.rule: + to_addr = [name + self.rule['ses_email_add_domain'] for name in to_addr] + + if self.aws_profile != '': + session = boto3.Session(profile_name=self.aws_profile) + else: + session = boto3.Session( + aws_access_key_id=self.aws_access_key_id, + aws_secret_access_key=self.aws_secret_access_key, + region_name=self.aws_region + ) + + client = session.client('ses') + try: + client.send_email( + Source=self.from_addr, + Destination={ + 'ToAddresses': to_addr, + 'CcAddresses': self.rule.get('ses_cc', []), + 'BccAddresses': self.rule.get('ses_bcc', []) + }, + Message={ + 'Subject': { + 'Charset': 'UTF-8', + 'Data': self.create_title(matches), + }, + 'Body': { + 'Text': { + 'Charset': 'UTF-8', + 'Data': body, + } + } + }, + ReplyToAddresses=self.rule.get('ses_email_reply_to', [])) + except Exception as e: + raise EAException("Error sending ses: %s" % (e,)) + + elastalert_logger.info("Sent ses to %s" % (to_addr,)) + + def create_default_title(self, matches): + subject = 'Elastalert2: %s' % (self.rule['name']) + + # If the rule has a query_key, add that value plus timestamp to subject + if 'query_key' in self.rule: + qk = matches[0].get(self.rule['query_key']) + if qk: + subject += ' - %s' % (qk) + + return subject + + def get_info(self): + return {'type': 'ses', + 'recipients': self.rule['ses_email']} diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 0cbd0d26c..67e2d9a89 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -86,7 +86,8 @@ class RulesLoader(object): 'discord': alerts.DiscordAlerter, 'dingtalk': alerts.DingTalkAlerter, 'chatwork': alerts.ChatworkAlerter, - 'datadog': alerts.DatadogAlerter + 'datadog': alerts.DatadogAlerter, + 'ses': alerts.SesAlerter } # A partial ordering of alert types. Relative order will be preserved in the resulting alerts list From f7bfdb2c57710dd18d2fde21562c6a5c17f9d552 Mon Sep 17 00:00:00 2001 From: Greg Mackinnon Date: Fri, 30 Apr 2021 09:05:00 -0400 Subject: [PATCH 0178/1065] removed system packages from dockerfile --- Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 1f0144e7a..eb384bf2f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,7 +15,8 @@ FROM python:slim-buster COPY --from=builder /tmp/elastalert/dist/*.tar.gz /tmp/ RUN apt-get update && apt-get -y upgrade &&\ - apt-get install -y tzdata cargo libmagic1 jq curl &&\ + #apt-get install -y tzdata cargo libmagic1 jq curl &&\ + apt-get -y autoremove &&\ rm -rf /var/lib/apt/lists/* &&\ pip install /tmp/*.tar.gz &&\ rm -rf /tmp/* &&\ From 7c36d42e92466d75958e91be1b5f74b057717dce Mon Sep 17 00:00:00 2001 From: Greg Mackinnon Date: Fri, 30 Apr 2021 11:15:21 -0400 Subject: [PATCH 0179/1065] CHANGELOG update --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b4dfd8386..5994c5238 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ # 2.x.x ## Breaking changes - Dockerfile Base image changed from `python/alpine` to `python/slim-buster` to take advantage of pre-build python wheels and accelerate build times. +- System packages removed from the Dockerfile: All dev packages, cargo, libmagic, jq, curl. Image size reduced to 244Mb. - Default base path changed to `/opt/elastalert` in the Dockerfile and in Helm charts. ## New features From 36bc17cc4e2bbb8c5741c2b858141f335b187fc3 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 1 May 2021 09:22:18 -0400 Subject: [PATCH 0180/1065] Remove old alpine Dockerfile (this can always be pulled from git history if someone needs it); Allow custom UID/GID for elastalert user in Dockerfile; Re-add jq and curl to Dockerfile --- CHANGELOG.md | 20 +++++++++++--------- Dockerfile | 35 ++++++++++++++++++++--------------- Dockerfile-alpine | 37 ------------------------------------- 3 files changed, 31 insertions(+), 61 deletions(-) delete mode 100644 Dockerfile-alpine diff --git a/CHANGELOG.md b/CHANGELOG.md index 5994c5238..16a7554fd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,4 @@ -# Upcoming release +# Template ## Breaking changes - None @@ -9,19 +9,21 @@ ## Other changes - None -# 2.x.x +# Upcoming Release + ## Breaking changes -- Dockerfile Base image changed from `python/alpine` to `python/slim-buster` to take advantage of pre-build python wheels and accelerate build times. -- System packages removed from the Dockerfile: All dev packages, cargo, libmagic, jq, curl. Image size reduced to 244Mb. -- Default base path changed to `/opt/elastalert` in the Dockerfile and in Helm charts. +- Dockerfile refactor for performance and size improvements - [#102](https://github.com/jertel/elastalert2/pull/102) - @jgregmac + - Dockerfile base image changed from `python/alpine` to `python/slim-buster` to take advantage of pre-build python wheels, accelerate build times, and reduce image size. If you have customized an image, based on jertel/elastalert2, you may need to make adjustments. + - Default base path changed to `/opt/elastalert` in the Dockerfile and in Helm charts. Update your volume binds accordingly. + - Dockerfile now runs as a non-root user "elastalert". Ensure your volumes are accessible by this non-root user. + - System packages removed from the Dockerfile: All dev packages, cargo, libmagic. Image size reduced to 250Mb. + - `tmp` files and dev packages removed from the final container image. ## New features -- None +- Added support for alerting via Amazon Simple Email System (SES) - [#105](https://github.com/jertel/elastalert2/pull/105) - @nsano-rururu ## Other changes -- Dockerfile now creates and runs as a non-root user "elastalert". -- tmp files and dev packages removed from the final container image. -- Documentation updates in support of the modified container base path. +- Fix issue with testing alerts that contain Jinja templates - [#101](https://github.com/jertel/elastalert2/pull/101) - @jertel # 2.0.4 diff --git a/Dockerfile b/Dockerfile index eb384bf2f..268cf78a1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,26 +12,31 @@ RUN mkdir -p /opt/elastalert && \ FROM python:slim-buster +ARG GID=1000 +ARG UID=1000 +ARG USERNAME=elastalert + COPY --from=builder /tmp/elastalert/dist/*.tar.gz /tmp/ -RUN apt-get update && apt-get -y upgrade &&\ - #apt-get install -y tzdata cargo libmagic1 jq curl &&\ - apt-get -y autoremove &&\ - rm -rf /var/lib/apt/lists/* &&\ - pip install /tmp/*.tar.gz &&\ - rm -rf /tmp/* &&\ - mkdir -p /opt/elastalert &&\ - echo "#!/bin/sh" >> /opt/elastalert/run.sh &&\ - echo "set -e" >> /opt/elastalert/run.sh &&\ +RUN apt-get update && apt-get -y upgrade && \ + apt-get install -y jq curl && \ + apt-get -y autoremove && \ + rm -rf /var/lib/apt/lists/* && \ + pip install /tmp/*.tar.gz && \ + rm -rf /tmp/* && \ + mkdir -p /opt/elastalert && \ + echo "#!/bin/sh" >> /opt/elastalert/run.sh && \ + echo "set -e" >> /opt/elastalert/run.sh && \ echo "elastalert-create-index --config /opt/elastalert/config.yaml" \ - >> /opt/elastalert/run.sh &&\ + >> /opt/elastalert/run.sh && \ echo "elastalert --config /opt/elastalert/config.yaml \"\$@\"" \ - >> /opt/elastalert/run.sh &&\ - chmod +x /opt/elastalert/run.sh &&\ - useradd -u 1000 -M -b /opt/elastalert -s /sbin/nologin \ - -c "ElastAlert User" elastalert + >> /opt/elastalert/run.sh && \ + chmod +x /opt/elastalert/run.sh && \ + groupadd -g ${GID} ${USERNAME} && \ + useradd -u ${UID} -g ${GID} -M -b /opt/elastalert -s /sbin/nologin \ + -c "Elastalert2 User" ${USERNAME} -USER elastalert +USER ${USERNAME} ENV TZ "UTC" WORKDIR /opt/elastalert diff --git a/Dockerfile-alpine b/Dockerfile-alpine deleted file mode 100644 index 3ace09dc6..000000000 --- a/Dockerfile-alpine +++ /dev/null @@ -1,37 +0,0 @@ -FROM python:alpine as builder - -LABEL description="Elastalert 2 Official Image" -LABEL maintainer="Jason Ertel (jertel at codesim.com)" - -RUN apk --update upgrade && \ - rm -rf /var/cache/apk/* && \ - mkdir -p /tmp/elastalert - -COPY . /tmp/elastalert - -RUN mkdir -p /opt/elastalert && \ - cd /tmp/elastalert && \ - pip install setuptools wheel && \ - python setup.py sdist bdist_wheel - -FROM python:alpine - -COPY --from=builder /tmp/elastalert/dist/*.tar.gz /tmp/ - -RUN apk --update upgrade && \ - apk add gcc libffi-dev musl-dev python3-dev openssl-dev tzdata libmagic cargo jq curl && \ - pip install /tmp/*.tar.gz && \ - apk del gcc libffi-dev musl-dev python3-dev openssl-dev cargo && \ - rm -rf /var/cache/apk/* - -RUN mkdir -p /opt/elastalert && \ - echo "#!/bin/sh" >> /opt/elastalert/run.sh && \ - echo "set -e" >> /opt/elastalert/run.sh && \ - echo "elastalert-create-index --config /opt/config/elastalert_config.yaml" >> /opt/elastalert/run.sh && \ - echo "elastalert --config /opt/config/elastalert_config.yaml \"\$@\"" >> /opt/elastalert/run.sh && \ - chmod +x /opt/elastalert/run.sh - -ENV TZ "UTC" - -WORKDIR /opt/elastalert -ENTRYPOINT ["/opt/elastalert/run.sh"] From e5cb295bca1ea3f0429b127ad314e444981a896a Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 1 May 2021 09:32:26 -0400 Subject: [PATCH 0181/1065] Auto tag images with major version so users can follow along latest release without pulling unreleased commits --- .github/workflows/publish_image.yml | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/.github/workflows/publish_image.yml b/.github/workflows/publish_image.yml index 086ca5420..1b2e9d5b8 100644 --- a/.github/workflows/publish_image.yml +++ b/.github/workflows/publish_image.yml @@ -50,4 +50,14 @@ jobs: # Push to Docker Hub docker tag $IMAGE_NAME $DOCKER_REPO:$VERSION - docker push $DOCKER_REPO:$VERSION \ No newline at end of file + docker push $DOCKER_REPO:$VERSION + + if [[ "$VERSION" == "2."* ]]; then + # Push to GitHub Package + docker tag $IMAGE_NAME $IMAGE_ID:2 + docker push $IMAGE_ID:2 + + # Push to Docker Hub + docker tag $IMAGE_NAME $DOCKER_REPO:2 + docker push $DOCKER_REPO:2 + fi \ No newline at end of file From da813b7c7f31b9bfa513ecba07d973156643c290 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 1 May 2021 09:33:34 -0400 Subject: [PATCH 0182/1065] Auto tag images with major version so users can follow along latest release without pulling unreleased commits --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 4f07595d3..8c9484d86 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ Please see our [contributing guidelines](CONTRIBUTING.md). If you're interested in a pre-built Docker image check out the [elastalert2][2] project on Docker Hub. -Be aware that the `latest` tag of the image represents the latest commit into the master branch. If you prefer to upgrade more slowly you will need utilize a versioned tag, such as `2.0.4` instead. +Be aware that the `latest` tag of the image represents the latest commit into the master branch. If you prefer to upgrade more slowly you will need utilize a versioned tag, such as `2.0.4` instead, or `2` if you are comfortable with always using the latest released version of Elastalert2. A properly configured config.yaml file must be mounted into the container during startup of the container. Use the [example file][1] provided as a template, and once saved locally to a file such as `/tmp/elastalert.yaml`, run the container as follows: From aa8695d6b140979db0041415f33ffc42ee9b27c8 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 1 May 2021 09:44:34 -0400 Subject: [PATCH 0183/1065] Correct license URL --- README.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 8c9484d86..09b28f947 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ The full list of platforms that Elastalert can fire alerts into can be found [in ## Contributing -Please see our [contributing guidelines](CONTRIBUTING.md). +Please see our [contributing guidelines][6]. ## Docker @@ -37,7 +37,7 @@ docker build . -t elastalert ## Kubernetes -See the Helm chart [README.md](chart/elastalert2/README.md) for information on installing this application into an existing Kubernetes cluster. +See the Helm chart [README.md][7] for information on installing this application into an existing Kubernetes cluster. ## License @@ -48,4 +48,6 @@ Elastalert 2 is licensed under the [Apache License, Version 2.0][5]. [2]: https://hub.docker.com/r/jertel/elastalert2 [3]: https://elastalert2.readthedocs.io/ [4]: https://elastalert2.readthedocs.io/en/latest/ruletypes.html#alerts -[5]: http://www.apache.org/licenses/LICENSE-2 +[5]: https://www.apache.org/licenses/LICENSE-2.0 +[6]: https://github.com/jertel/elastalert2/blob/master/CONTRIBUTING.md +[7]: https://github.com/jertel/elastalert2/chart/elastalert2/README.md \ No newline at end of file From c7bd38764c65175817b3b2b55ccd3175b26acb00 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 1 May 2021 09:58:23 -0400 Subject: [PATCH 0184/1065] Update all references of Elastalert to be consistent with the documented casing of ElastAlert --- CHANGELOG.md | 2 +- CONTRIBUTING.md | 4 ++-- Dockerfile | 6 +++--- README.md | 16 ++++++++-------- chart/elastalert2/README.md | 14 +++++++------- chart/elastalert2/templates/NOTES.txt | 2 +- chart/elastalert2/values.yaml | 6 +++--- docs/source/elastalert.rst | 4 ++-- docs/source/ruletypes.rst | 18 +++++++++--------- elastalert/alerts.py | 2 +- tests/alerts_test.py | 8 ++++---- 11 files changed, 41 insertions(+), 41 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 16a7554fd..673df3172 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -50,4 +50,4 @@ - Now publishing container images to both DockerHub and to GitHub Packages for redundancy. - Container images are now built and published via GitHub actions instead of relying on DockerHub's automated builds. - Update PIP library description and Helm chart description to be consistent. -- Continue updates to change references from _Elastalert_ to _Elastalert 2_ \ No newline at end of file +- Continue updates to change references from _ElastAlert_ to _ElastAlert 2_ \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 03d7f82e7..9e2162c1b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,4 +1,4 @@ -# Contributing to Elastalert 2 +# Contributing to ElastAlert 2 ## Introduction @@ -15,7 +15,7 @@ Before submitting the PR review that you have included the following changes, wh This section is only applicable to "maintainers". PR "contributors" do not need to follow the below procedure. -As Elastalert 2 is a community-maintained project, releases will typically contain unrelated contributions without a common theme. It's up to the maintainers to determine when the project is ready for a release, however, if you are looking to use a newly merged feature that hasn't yet been released, feel free to open a [discussion][5] and let us know. +As ElastAlert 2 is a community-maintained project, releases will typically contain unrelated contributions without a common theme. It's up to the maintainers to determine when the project is ready for a release, however, if you are looking to use a newly merged feature that hasn't yet been released, feel free to open a [discussion][5] and let us know. Maintainers, when creating a new release, follow the procedure below: diff --git a/Dockerfile b/Dockerfile index 268cf78a1..3d7dfb15a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ FROM python:slim-buster as builder -LABEL description="Elastalert 2 Official Image" -LABEL maintainer="Jason Ertel (jertel at codesim.com)" +LABEL description="ElastAlert 2 Official Image" +LABEL maintainer="Jason Ertel" COPY . /tmp/elastalert @@ -34,7 +34,7 @@ RUN apt-get update && apt-get -y upgrade && \ chmod +x /opt/elastalert/run.sh && \ groupadd -g ${GID} ${USERNAME} && \ useradd -u ${UID} -g ${GID} -M -b /opt/elastalert -s /sbin/nologin \ - -c "Elastalert2 User" ${USERNAME} + -c "ElastAlert 2 User" ${USERNAME} USER ${USERNAME} ENV TZ "UTC" diff --git a/README.md b/README.md index 09b28f947..20f9531f0 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,17 @@ -# Elastalert 2 +# ElastAlert 2 -Elastalert 2 is the supported fork of [Elastalert][0], which had been maintained by the Yelp team -but become mostly stale when the Yelp team ceased using Elastalert. +ElastAlert 2 is the supported fork of [ElastAlert][0], which had been maintained by the Yelp team +but become mostly stale when the Yelp team ceased using ElastAlert. -Elastalert 2 is backwards compatible with the original Elastalert rules. +ElastAlert 2 is backwards compatible with the original ElastAlert rules. ![CI Workflow](https://github.com/jertel/elastalert/workflows/master_build_test/badge.svg) ## Documentation -Documentation, including an FAQ, for Elastalert 2 can be found on [readthedocs.com][3]. This is the place to start if you're not familiar with Elastalert at all. +Documentation, including an FAQ, for ElastAlert 2 can be found on [readthedocs.com][3]. This is the place to start if you're not familiar with ElastAlert 2 at all. -The full list of platforms that Elastalert can fire alerts into can be found [in the documentation][4]. +The full list of platforms that ElastAlert 2 can fire alerts into can be found [in the documentation][4]. ## Contributing @@ -21,7 +21,7 @@ Please see our [contributing guidelines][6]. If you're interested in a pre-built Docker image check out the [elastalert2][2] project on Docker Hub. -Be aware that the `latest` tag of the image represents the latest commit into the master branch. If you prefer to upgrade more slowly you will need utilize a versioned tag, such as `2.0.4` instead, or `2` if you are comfortable with always using the latest released version of Elastalert2. +Be aware that the `latest` tag of the image represents the latest commit into the master branch. If you prefer to upgrade more slowly you will need utilize a versioned tag, such as `2.0.4` instead, or `2` if you are comfortable with always using the latest released version of ElastAlert 2. A properly configured config.yaml file must be mounted into the container during startup of the container. Use the [example file][1] provided as a template, and once saved locally to a file such as `/tmp/elastalert.yaml`, run the container as follows: @@ -41,7 +41,7 @@ See the Helm chart [README.md][7] for information on installing this application ## License -Elastalert 2 is licensed under the [Apache License, Version 2.0][5]. +ElastAlert 2 is licensed under the [Apache License, Version 2.0][5]. [0]: https://github.com/yelp/elastalert [1]: https://github.com/jertel/elastalert2/blob/master/config.yaml.example diff --git a/chart/elastalert2/README.md b/chart/elastalert2/README.md index 3591e516c..bb73c7192 100644 --- a/chart/elastalert2/README.md +++ b/chart/elastalert2/README.md @@ -1,7 +1,7 @@ -# Elastalert 2 Helm Chart for Kubernetes +# ElastAlert 2 Helm Chart for Kubernetes -An Elastalert 2 helm chart is available in the jertel Helm repository, and can be installed into an existing Kubernetes cluster by following the instructions below. +An ElastAlert 2 helm chart is available in the jertel Helm repository, and can be installed into an existing Kubernetes cluster by following the instructions below. ## Installing the Chart @@ -17,7 +17,7 @@ Next, install the chart with a release name, such as _elastalert2_: helm install elastalert2 jertel/elastalert2 ``` -The command deploys Elastalert 2 on the Kubernetes cluster in the default configuration. The [configuration](#configuration) section lists the parameters that can be configured during installation. +The command deploys ElastAlert 2 on the Kubernetes cluster in the default configuration. The [configuration](#configuration) section lists the parameters that can be configured during installation. See the comment in the default `values.yaml` for specifying a `writebackIndex` for ES 5.x. @@ -34,7 +34,7 @@ PUT /elastalert/_mapping/elastalert ## Uninstalling the Chart -To uninstall/delete the Elastalert 2 deployment: +To uninstall/delete the ElastAlert 2 deployment: ```console helm delete elastalert2 --purge @@ -68,8 +68,8 @@ The command removes all the Kubernetes components associated with the chart and | `elasticsearch.certsVolumes` | certs volumes, required to mount ssl certificates when elasticsearch has tls enabled | `NULL` | | `elasticsearch.certsVolumeMounts` | mount certs volumes, required to mount ssl certificates when elasticsearch has tls enabled | `NULL` | | `extraConfigOptions` | Additional options to propagate to all rules, cannot be `alert`, `type`, `name` or `index` | `{}` | -| `secretConfigName` | name of the secret which holds the Elastalert config. **Note:** this will completely overwrite the generated config | `NULL` | -| `secretRulesName` | name of the secret which holds the Elastalert rules. **Note:** this will overwrite the generated rules | `NULL` | +| `secretConfigName` | name of the secret which holds the ElastAlert config. **Note:** this will completely overwrite the generated config | `NULL` | +| `secretRulesName` | name of the secret which holds the ElastAlert rules. **Note:** this will overwrite the generated rules | `NULL` | | `secretRulesList` | a list of rules to enable from the secret | [] | | `optEnv` | Additional pod environment variable definitions | [] | | `extraVolumes` | Additional volume definitions | [] | @@ -79,7 +79,7 @@ The command removes all the Kubernetes components associated with the chart and | `serviceAccount.annotations` | ServiceAccount annotations | | | `podSecurityPolicy.create` | Create pod security policy resources | `false` | | `resources` | Container resource requests and limits | {} | -| `rules` | Rule and alert configuration for Elastalert | {} example shown in values.yaml | +| `rules` | Rule and alert configuration for ElastAlert 2 | {} example shown in values.yaml | | `runIntervalMins` | Default interval between alert checks, in minutes | 1 | | `realertIntervalMins` | Time between alarms for same rule, in minutes | `NULL` | | `alertRetryLimitMins` | Time to retry failed alert deliveries, in minutes | 2880 (2 days) | diff --git a/chart/elastalert2/templates/NOTES.txt b/chart/elastalert2/templates/NOTES.txt index 7b1c2985d..5557d351e 100644 --- a/chart/elastalert2/templates/NOTES.txt +++ b/chart/elastalert2/templates/NOTES.txt @@ -1 +1 @@ -1. Elastalert is now running against: {{ .Values.elasticsearch.host }}:{{ .Values.elasticsearch.port }} \ No newline at end of file +1. ElastAlert 2 is now running against: {{ .Values.elasticsearch.host }}:{{ .Values.elasticsearch.port }} \ No newline at end of file diff --git a/chart/elastalert2/values.yaml b/chart/elastalert2/values.yaml index 702e7542b..46a6c9fa2 100644 --- a/chart/elastalert2/values.yaml +++ b/chart/elastalert2/values.yaml @@ -82,7 +82,7 @@ extraConfigOptions: {} # kibana_url: https://kibana.yourdomain.com # slack_webhook_url: dummy -# To load Elastalert 2 config via secret, uncomment the line below +# To load ElastAlert 2 config via secret, uncomment the line below # secretConfigName: elastalert-config-secret # Example of a secret config @@ -111,7 +111,7 @@ extraConfigOptions: {} # slack_channel_override: '#alerts' -# To load Elastalert's rules via secret, uncomment the line below +# To load ElastAlert's rules via secret, uncomment the line below #secretRulesName: elastalert-rules-secret # Additionally, you must specificy which rules to load from the secret @@ -181,7 +181,7 @@ rules: {} # - "pagerduty" # pagerduty: # pagerduty_service_key: dummy - # pagerduty_client_name: Elastalert Deadman Switch + # pagerduty_client_name: ElastAlert Deadman Switch serviceAccount: # Specifies whether a service account should be created diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 163084f5c..9e141d053 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -1,5 +1,5 @@ -ElastAlert - Easy & Flexible Alerting With Elasticsearch -******************************************************** +ElastAlert 2 - Automated rule-based alerting for Elasticsearch +************************************************************** ElastAlert is a simple framework for alerting on anomalies, spikes, or other patterns of interest from data in Elasticsearch. diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index bd145aeab..f2e4e23ce 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -376,7 +376,7 @@ Then, assuming an aggregation window of 10 minutes, if you receive the following This should result in 2 alerts: One containing alice's two events, sent at ``2016-09-20T00:10:00`` and one containing bob's one event sent at ``2016-09-20T00:16:00`` -For aggregations, there can sometimes be a large number of documents present in the viewing medium (email, jira ticket, etc..). If you set the ``summary_table_fields`` field, Elastalert will provide a summary of the specified fields from all the results. +For aggregations, there can sometimes be a large number of documents present in the viewing medium (email, jira ticket, etc..). If you set the ``summary_table_fields`` field, ElastAlert 2 will provide a summary of the specified fields from all the results. For example, if you wish to summarize the usernames and event_types that appear in the documents so that you can see the most relevant fields at a quick glance, you can set:: @@ -384,7 +384,7 @@ For example, if you wish to summarize the usernames and event_types that appear - my_data.username - my_data.event_type -Then, for the same sample data shown above listing alice and bob's events, Elastalert will provide the following summary table in the alert medium:: +Then, for the same sample data shown above listing alice and bob's events, ElastAlert 2 will provide the following summary table in the alert medium:: +------------------+--------------------+ | my_data.username | my_data.event_type | @@ -2006,7 +2006,7 @@ Optional: ``pagerduty_incident_key``: If not set PagerDuty will trigger a new incident for each alert sent. If set to a unique string per rule PagerDuty will identify the incident that this event should be applied. If there's no open (i.e. unresolved) incident with this key, a new one will be created. If there's already an open incident with a matching key, this event will be appended to that incident's log. -``pagerduty_incident_key_args``: If set, and ``pagerduty_incident_key`` is a formattable string, Elastalert will format the incident key based on the provided array of fields from the rule or match. +``pagerduty_incident_key_args``: If set, and ``pagerduty_incident_key`` is a formattable string, ElastAlert 2 will format the incident key based on the provided array of fields from the rule or match. ``pagerduty_proxy``: By default ElastAlert will not use a network proxy to send notifications to PagerDuty. Set this option using ``hostname:port`` if you need to use a proxy. @@ -2020,21 +2020,21 @@ See https://developer.pagerduty.com/docs/events-api-v2/trigger-events/ ``pagerduty_v2_payload_class``: Sets the class of the payload. (the event type in PagerDuty) -``pagerduty_v2_payload_class_args``: If set, and ``pagerduty_v2_payload_class`` is a formattable string, Elastalert will format the class based on the provided array of fields from the rule or match. +``pagerduty_v2_payload_class_args``: If set, and ``pagerduty_v2_payload_class`` is a formattable string, ElastAlert 2 will format the class based on the provided array of fields from the rule or match. ``pagerduty_v2_payload_component``: Sets the component of the payload. (what program/interface/etc the event came from) -``pagerduty_v2_payload_component_args``: If set, and ``pagerduty_v2_payload_component`` is a formattable string, Elastalert will format the component based on the provided array of fields from the rule or match. +``pagerduty_v2_payload_component_args``: If set, and ``pagerduty_v2_payload_component`` is a formattable string, ElastAlert 2 will format the component based on the provided array of fields from the rule or match. ``pagerduty_v2_payload_group``: Sets the logical grouping (e.g. app-stack) -``pagerduty_v2_payload_group_args``: If set, and ``pagerduty_v2_payload_group`` is a formattable string, Elastalert will format the group based on the provided array of fields from the rule or match. +``pagerduty_v2_payload_group_args``: If set, and ``pagerduty_v2_payload_group`` is a formattable string, ElastAlert 2 will format the group based on the provided array of fields from the rule or match. ``pagerduty_v2_payload_severity``: Sets the severity of the page. (defaults to `critical`, valid options: `critical`, `error`, `warning`, `info`) ``pagerduty_v2_payload_source``: Sets the source of the event, preferably the hostname or fqdn. -``pagerduty_v2_payload_source_args``: If set, and ``pagerduty_v2_payload_source`` is a formattable string, Elastalert will format the source based on the provided array of fields from the rule or match. +``pagerduty_v2_payload_source_args``: If set, and ``pagerduty_v2_payload_source`` is a formattable string, ElastAlert 2 will format the source based on the provided array of fields from the rule or match. ``pagerduty_v2_payload_custom_details``: List of keys:values to use as the content of the custom_details payload. Example - ip:clientip will map the value from the clientip index of Elasticsearch to JSON key named ip. @@ -2420,8 +2420,8 @@ Required: ``zbx_sender_host``: The address where zabbix server is running. ``zbx_sender_port``: The port where zabbix server is listenning. -``zbx_host``: This field setup the host in zabbix that receives the value sent by Elastalert. -``zbx_key``: This field setup the key in the host that receives the value sent by Elastalert. +``zbx_host``: This field setup the host in zabbix that receives the value sent by ElastAlert 2. +``zbx_key``: This field setup the key in the host that receives the value sent by ElastAlert 2. Discord diff --git a/elastalert/alerts.py b/elastalert/alerts.py index baa16e90d..646323d87 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2366,7 +2366,7 @@ def alert(self, matches): elastalert_logger.info("Sent ses to %s" % (to_addr,)) def create_default_title(self, matches): - subject = 'Elastalert2: %s' % (self.rule['name']) + subject = 'ElastAlert 2: %s' % (self.rule['name']) # If the rule has a query_key, add that value plus timestamp to subject if 'query_key' in self.rule: diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 4f56ad1a7..01afdb7f7 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -2119,7 +2119,7 @@ def test_alerta_no_auth(ea): 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], 'alerta_event': "ProbeUP", 'alerta_group': "Health", - 'alerta_origin': "Elastalert", + 'alerta_origin': "ElastAlert 2", 'alerta_severity': "debug", 'alerta_text': "Probe %(hostname)s is UP at %(logdate)s GMT", 'alerta_value': "UP", @@ -2143,7 +2143,7 @@ def test_alerta_no_auth(ea): alert.alert([match]) expected_data = { - "origin": "Elastalert", + "origin": "ElastAlert 2", "resource": "elastalert", "severity": "debug", "service": ["elastalert"], @@ -2217,7 +2217,7 @@ def test_alerta_new_style(ea): 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], 'alerta_event': "ProbeUP", 'alerta_group': "Health", - 'alerta_origin': "Elastalert", + 'alerta_origin': "ElastAlert 2", 'alerta_severity': "debug", 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", 'alerta_value': "UP", @@ -2241,7 +2241,7 @@ def test_alerta_new_style(ea): alert.alert([match]) expected_data = { - "origin": "Elastalert", + "origin": "ElastAlert 2", "resource": "elastalert", "severity": "debug", "service": ["elastalert"], From ef4cbcb440621878bb17a44ced3dc7c8d9d766bb Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 1 May 2021 10:06:58 -0400 Subject: [PATCH 0185/1065] Improve Docker build instructions --- CHANGELOG.md | 1 + README.md | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 673df3172..c67b00740 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,7 @@ ## Other changes - Fix issue with testing alerts that contain Jinja templates - [#101](https://github.com/jertel/elastalert2/pull/101) - @jertel +- Updated all references of Elastalert to use the mixed case ElastAlert, as that is the most prevalent formatting found in the documentation. # 2.0.4 diff --git a/README.md b/README.md index 20f9531f0..15f34a9a2 100644 --- a/README.md +++ b/README.md @@ -29,10 +29,10 @@ A properly configured config.yaml file must be mounted into the container during docker run -d -v /tmp/elastalert.yaml:/opt/elastalert/config.yaml jertel/elastalert2 ``` -To build the image locally, install Docker and then run the following command: +To build the image locally run the following command: ```bash -docker build . -t elastalert +docker build . -t elastalert2 ``` ## Kubernetes From 97cd7fd355736d9d1bb53ea7602cca1a504151ac Mon Sep 17 00:00:00 2001 From: fberrez_ Date: Mon, 3 May 2021 16:28:42 +0200 Subject: [PATCH 0186/1065] docs: add limit_execution --- docs/source/ruletypes.rst | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index f2e4e23ce..f100866d3 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -50,7 +50,9 @@ Rule Configuration Cheat Sheet +--------------------------------------------------------------+ | | ``es_send_get_body_as`` (string, default "GET") | | +--------------------------------------------------------------+ | -| ``aggregation`` (time, no default) | | +| ``aggregation`` (time, no default) | | ++--------------------------------------------------------------+ | +| ``limit_execution`` (string, no default) | | +--------------------------------------------------------------+ | | ``description`` (string, default empty string) | | +--------------------------------------------------------------+ | @@ -400,6 +402,15 @@ Then, for the same sample data shown above listing alice and bob's events, Elast past events will result in different alerts than if elastalert had been running while those events occured. This behavior can be changed by setting ``aggregate_by_match_time``. +limit_execution +^^^^^^^^^^^^^^^ + +``limit_execution``: This option allows you to activate the rule during a limited period of time. This uses the cron format. + +For example, if you wish to activate the rule from monday to friday, between 10am to 6pm:: + + limit_execution: "* 10-18 * * 1-5" + aggregate_by_match_time ^^^^^^^^^^^^^^^^^^^^^^^ From ceb4af3d62740e35cea61a258c9d993060a3c79d Mon Sep 17 00:00:00 2001 From: fberrez Date: Mon, 3 May 2021 16:28:42 +0200 Subject: [PATCH 0187/1065] docs: add limit_execution --- docs/source/ruletypes.rst | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index f2e4e23ce..f100866d3 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -50,7 +50,9 @@ Rule Configuration Cheat Sheet +--------------------------------------------------------------+ | | ``es_send_get_body_as`` (string, default "GET") | | +--------------------------------------------------------------+ | -| ``aggregation`` (time, no default) | | +| ``aggregation`` (time, no default) | | ++--------------------------------------------------------------+ | +| ``limit_execution`` (string, no default) | | +--------------------------------------------------------------+ | | ``description`` (string, default empty string) | | +--------------------------------------------------------------+ | @@ -400,6 +402,15 @@ Then, for the same sample data shown above listing alice and bob's events, Elast past events will result in different alerts than if elastalert had been running while those events occured. This behavior can be changed by setting ``aggregate_by_match_time``. +limit_execution +^^^^^^^^^^^^^^^ + +``limit_execution``: This option allows you to activate the rule during a limited period of time. This uses the cron format. + +For example, if you wish to activate the rule from monday to friday, between 10am to 6pm:: + + limit_execution: "* 10-18 * * 1-5" + aggregate_by_match_time ^^^^^^^^^^^^^^^^^^^^^^^ From db1ef5ff7fdc60531ca6f318a8e7bae4e9503144 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Wed, 5 May 2021 00:15:23 +0900 Subject: [PATCH 0188/1065] Added test code to alerts.py ChatworkAlerter DingTalkAlerter DingTalkAlerter GitterAlerter GoogleChatAlerter LineNotifyAlerter MattermostAlerter PagerTreeAlerter ServiceNowAlerter TelegramAlerter VictorOpsAlerter --- tests/alerts_test.py | 1020 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 1020 insertions(+) diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 4f56ad1a7..20f5bad7c 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -3,6 +3,8 @@ import datetime import json import subprocess +import re +import uuid import mock import pytest @@ -11,17 +13,28 @@ from elastalert.alerts import AlertaAlerter from elastalert.alerts import Alerter from elastalert.alerts import BasicMatchString +from elastalert.alerts import ChatworkAlerter from elastalert.alerts import CommandAlerter from elastalert.alerts import DatadogAlerter +from elastalert.alerts import DingTalkAlerter +from elastalert.alerts import DiscordAlerter from elastalert.alerts import EmailAlerter +from elastalert.alerts import GitterAlerter +from elastalert.alerts import GoogleChatAlerter from elastalert.alerts import HTTPPostAlerter from elastalert.alerts import JiraAlerter from elastalert.alerts import JiraFormattedMatchString +from elastalert.alerts import LineNotifyAlerter +from elastalert.alerts import MattermostAlerter from elastalert.alerts import MsTeamsAlerter from elastalert.alerts import PagerDutyAlerter +from elastalert.alerts import PagerTreeAlerter +from elastalert.alerts import ServiceNowAlerter from elastalert.alerts import SlackAlerter +from elastalert.alerts import TelegramAlerter from elastalert.loaders import FileRulesLoader from elastalert.opsgenie import OpsGenieAlerter +from elastalert.alerts import VictorOpsAlerter from elastalert.util import ts_add from elastalert.util import ts_now @@ -2333,3 +2346,1010 @@ def test_datadog_alerter(): ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data + + +def test_pagertree(): + rule = { + 'name': 'Test PagerTree Rule', + 'type': 'any', + 'pagertree_integration_url': 'https://api.pagertree.com/integration/xxxxx', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerTreeAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'event_type': 'create', + 'Id': str(uuid.uuid4()), + 'Title': 'Test PagerTree Rule', + 'Description': 'Test PagerTree Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + + mock_post_request.assert_called_once_with( + rule['pagertree_integration_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + uuid4hex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) + match = uuid4hex.match(actual_data['Id']) + assert bool(match) is True + assert expected_data["event_type"] == actual_data['event_type'] + assert expected_data["Title"] == actual_data['Title'] + assert expected_data["Description"] == actual_data['Description'] + + +def test_line_notify(): + rule = { + 'name': 'Test LineNotify Rule', + 'type': 'any', + 'linenotify_access_token': 'xxxxx', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = LineNotifyAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'message': 'Test LineNotify Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + + mock_post_request.assert_called_once_with( + 'https://notify-api.line.me/api/notify', + data=mock.ANY, + headers={ + 'Content-Type': 'application/x-www-form-urlencoded', + 'Authorization': 'Bearer {}'.format('xxxxx') + } + ) + + actual_data = mock_post_request.call_args_list[0][1]['data'] + assert expected_data == actual_data + + +def test_gitter_msg_level_default(): + rule = { + 'name': 'Test Gitter Rule', + 'type': 'any', + 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GitterAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'message': 'Test Gitter Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'level': 'error' + } + + mock_post_request.assert_called_once_with( + rule['gitter_webhook_url'], + mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) + assert expected_data == actual_data + assert 'error' in actual_data['level'] + + +def test_gitter_msg_level_info(): + rule = { + 'name': 'Test Gitter Rule', + 'type': 'any', + 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', + 'gitter_msg_level': 'info', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GitterAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'message': 'Test Gitter Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'level': 'info' + } + + mock_post_request.assert_called_once_with( + rule['gitter_webhook_url'], + mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) + assert expected_data == actual_data + assert 'info' in actual_data['level'] + + +def test_gitter_msg_level_error(): + rule = { + 'name': 'Test Gitter Rule', + 'type': 'any', + 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', + 'gitter_msg_level': 'error', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GitterAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'message': 'Test Gitter Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'level': 'error' + } + + mock_post_request.assert_called_once_with( + rule['gitter_webhook_url'], + mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) + assert expected_data == actual_data + assert 'error' in actual_data['level'] + + +def test_chatwork(): + rule = { + 'name': 'Test Chatwork Rule', + 'type': 'any', + 'chatwork_apikey': 'xxxx1', + 'chatwork_room_id': 'xxxx2', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ChatworkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'body': 'Test Chatwork Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + } + + mock_post_request.assert_called_once_with( + 'https://api.chatwork.com/v2/rooms/xxxx2/messages', + params=mock.ANY, + headers={'X-ChatWorkToken': 'xxxx1'}, + proxies=None, + auth=None + ) + + actual_data = mock_post_request.call_args_list[0][1]['params'] + assert expected_data == actual_data + + +def test_telegram(): + rule = { + 'name': 'Test Telegram Rule', + 'type': 'any', + 'telegram_bot_token': 'xxxxx1', + 'telegram_room_id': 'xxxxx2', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TelegramAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'chat_id': rule['telegram_room_id'], + 'text': '⚠ *Test Telegram Rule* ⚠ ```\nTest Telegram Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n ```', + 'parse_mode': 'markdown', + 'disable_web_page_preview': True + } + + mock_post_request.assert_called_once_with( + 'https://api.telegram.org/botxxxxx1/sendMessage', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_service_now(): + rule = { + 'name': 'Test ServiceNow Rule', + 'type': 'any', + 'username': 'ServiceNow username', + 'password': 'ServiceNow password', + 'servicenow_rest_url': 'https://xxxxxxxxxx', + 'short_description': 'ServiceNow short_description', + 'comments': 'ServiceNow comments', + 'assignment_group': 'ServiceNow assignment_group', + 'category': 'ServiceNow category', + 'subcategory': 'ServiceNow subcategory', + 'cmdb_ci': 'ServiceNow cmdb_ci', + 'caller_id': 'ServiceNow caller_id', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ServiceNowAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'description': 'Test ServiceNow Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'short_description': rule['short_description'], + 'comments': rule['comments'], + 'assignment_group': rule['assignment_group'], + 'category': rule['category'], + 'subcategory': rule['subcategory'], + 'cmdb_ci': rule['cmdb_ci'], + 'caller_id': rule['caller_id'] + } + + mock_post_request.assert_called_once_with( + rule['servicenow_rest_url'], + auth=(rule['username'], rule['password']), + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + data=mock.ANY, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_victor_ops(): + rule = { + 'name': 'Test VictorOps Rule', + 'type': 'any', + 'victorops_api_key': 'xxxx1', + 'victorops_routing_key': 'xxxx2', + 'victorops_message_type': 'INFO', + 'victorops_entity_display_name': 'no entity display name', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = VictorOpsAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'message_type': rule['victorops_message_type'], + 'entity_display_name': rule['victorops_entity_display_name'], + 'monitoring_tool': 'ElastAlert', + 'state_message': 'Test VictorOps Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + + mock_post_request.assert_called_once_with( + 'https://alert.victorops.com/integrations/generic/20131114/alert/xxxx1/xxxx2', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_google_chat_basic(): + rule = { + 'name': 'Test GoogleChat Rule', + 'type': 'any', + 'googlechat_webhook_url': 'http://xxxxxxx', + 'googlechat_format': 'basic', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GoogleChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'text': 'Test GoogleChat Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + + mock_post_request.assert_called_once_with( + rule['googlechat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_google_chat_card(): + rule = { + 'name': 'Test GoogleChat Rule', + 'type': 'any', + 'googlechat_webhook_url': 'http://xxxxxxx', + 'googlechat_format': 'card', + 'googlechat_header_title': 'xxxx1', + 'googlechat_header_subtitle': 'xxxx2', + 'googlechat_header_image': 'http://xxxx/image.png', + 'googlechat_footer_kibanalink': 'http://xxxxx/kibana', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GoogleChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'cards': [{ + 'header': { + 'title': rule['googlechat_header_title'], + 'subtitle': rule['googlechat_header_subtitle'], + 'imageUrl': rule['googlechat_header_image'] + }, + 'sections': [ + { + 'widgets': [{ + "textParagraph": { + 'text': 'Test GoogleChat Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + }] + }, + { + 'widgets': [{ + 'buttons': [{ + 'textButton': { + 'text': 'VISIT KIBANA', + 'onClick': { + 'openLink': { + 'url': rule['googlechat_footer_kibanalink'] + } + } + } + }] + }] + } + ]} + ] + } + + mock_post_request.assert_called_once_with( + rule['googlechat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_discord(): + rule = { + 'name': 'Test Discord Rule', + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'discord_embed_footer': 'footer', + 'discord_embed_icon_url': 'http://xxxx/image.png', + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', + 'color': 0xffffff, + 'footer': { + 'text': 'footer', + 'icon_url': 'http://xxxx/image.png' + } + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_discord_not_footer(): + rule = { + 'name': 'Test Discord Rule', + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', + 'color': 0xffffff + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_dingtalk_text(): + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'text', + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'msgtype': 'text', + 'text': {'content': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n'} + } + + mock_post_request.assert_called_once_with( + 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_dingtalk_markdown(): + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'markdown', + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'msgtype': 'markdown', + 'markdown': { + 'title': 'Test DingTalk', + 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + } + + mock_post_request.assert_called_once_with( + 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_dingtalk_single_action_card(): + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'single_action_card', + 'dingtalk_single_title': 'elastalert', + 'dingtalk_single_url': 'http://xxxxx2', + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'msgtype': 'actionCard', + 'actionCard': { + 'title': 'Test DingTalk', + 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'singleTitle': rule['dingtalk_single_title'], + 'singleURL': rule['dingtalk_single_url'] + } + } + + mock_post_request.assert_called_once_with( + 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_dingtalk_action_card(): + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'action_card', + 'dingtalk_single_title': 'elastalert', + 'dingtalk_single_url': 'http://xxxxx2', + 'dingtalk_btn_orientation': '1', + 'dingtalk_btns': [ + { + 'title': 'test1', + 'actionURL': 'https://xxxxx0/' + }, + { + 'title': 'test2', + 'actionURL': 'https://xxxxx1/' + } + ], + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'msgtype': 'actionCard', + 'actionCard': { + 'title': 'Test DingTalk', + 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'btnOrientation': rule['dingtalk_btn_orientation'], + 'btns': rule['dingtalk_btns'] + } + } + + mock_post_request.assert_called_once_with( + 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_Mattermost_alert_text_only(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_Mattermost_not_alert_text_only(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'exclude_fields', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [] + } + ], + 'text': 'Test Mattermost Rule\n\n', + 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + print(actual_data) + assert expected_data == actual_data + + +def test_Mattermost_msg_fields(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_msg_fields': [ + { + 'title': 'Stack', + 'value': "{0} {1}", + 'short': False, + 'args': ["type", "msg.status_code"] + }, + { + 'title': 'Name', + 'value': 'static field', + 'short': False + } + ], + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [ + {'title': 'Stack', 'value': ' ', 'short': False}, + {'title': 'Name', 'value': 'static field', 'short': False} + ], + 'text': 'Test Mattermost Rule\n\n' + } + ], 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_Mattermost_icon_url_override(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_icon_url_override': 'http://xxxx/icon.png', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], + 'username': 'elastalert', + 'icon_url': 'http://xxxx/icon.png' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_Mattermost_channel_override(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_channel_override': 'test channel', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], + 'username': 'elastalert', + 'channel': 'test channel' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_Mattermost_ignore_ssl_errors(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_ignore_ssl_errors': True, + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], + 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=False, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data From ffb096e28eecbd9933973cbfc7baec292e50ce23 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Wed, 5 May 2021 07:59:34 -0400 Subject: [PATCH 0189/1065] Add documentation to the tutorial on disabling rules --- .gitignore | 1 + docs/source/running_elastalert.rst | 8 ++++++++ 2 files changed, 9 insertions(+) diff --git a/.gitignore b/.gitignore index badcb7a3c..702514108 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,4 @@ build/ my_rules *.swp *~ +rules/ diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index 1ea32b15e..5f76213d7 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -223,3 +223,11 @@ Note that if you stop ElastAlert and then run it again later, it will look up `` at the end time of the last query. This is to prevent duplication or skipping of alerts if ElastAlert is restarted. By using the ``--debug`` flag instead of ``--verbose``, the body of email will instead be logged and the email will not be sent. In addition, the queries will not be saved to ``elastalert_status``. + +Disabling a Rule +---------------- + +To stop a rule from executing, add or adjust the `is_enabled` option inside the rule's YAML file to `false`. When ElastAlert reloads the rules it will detect that the rule has been disabled and prevent it from executing. The rule reload interval defaults to 5 minutes but can be adjusted via the `run_every` configuration option. + +Optionally, once a rule has been disabled it is safe to remove the rule file, if there is no intention of re-activating the rule. However, be aware that removing a rule file without first disabling it will _not_ disable the rule! + From 921dc49d2cb9caa791b5fab1e3ea156b2fbcfa9c Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Wed, 5 May 2021 08:08:34 -0400 Subject: [PATCH 0190/1065] Setup readthedocs configuration for v2 --- .readthedocs.yaml | 20 ++++++++++++++++++++ docs/requirements.txt | 1 + 2 files changed, 21 insertions(+) create mode 100644 .readthedocs.yaml create mode 100644 docs/requirements.txt diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 000000000..43a6e4388 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,20 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/conf.py + +# Optionally build your docs in additional formats such as PDF +formats: + - pdf + - htmlzip + +# Optionally set the version of Python and requirements required to build your docs +python: + install: + - requirements: docs/requirements.txt \ No newline at end of file diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 000000000..ecd67a4ad --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1 @@ +m2r2 \ No newline at end of file From 2ebf22d28561f04758752f65b14ce53975422855 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Wed, 5 May 2021 08:17:08 -0400 Subject: [PATCH 0191/1065] Setup readthedocs configuration for v2 --- .readthedocs.yaml | 4 ++-- docs/requirements.txt | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) delete mode 100644 docs/requirements.txt diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 43a6e4388..346241720 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -7,7 +7,7 @@ version: 2 # Build documentation in the docs/ directory with Sphinx sphinx: - configuration: docs/conf.py + configuration: docs/source/conf.py # Optionally build your docs in additional formats such as PDF formats: @@ -17,4 +17,4 @@ formats: # Optionally set the version of Python and requirements required to build your docs python: install: - - requirements: docs/requirements.txt \ No newline at end of file + - requirements: docs/source/requirements.txt \ No newline at end of file diff --git a/docs/requirements.txt b/docs/requirements.txt deleted file mode 100644 index ecd67a4ad..000000000 --- a/docs/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -m2r2 \ No newline at end of file From 1ab2868fa8f1bf344112feb0bea4b90a38c95fe4 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Wed, 5 May 2021 08:19:32 -0400 Subject: [PATCH 0192/1065] Setup readthedocs configuration for v2 --- .readthedocs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 346241720..fe9090e0c 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -17,4 +17,4 @@ formats: # Optionally set the version of Python and requirements required to build your docs python: install: - - requirements: docs/source/requirements.txt \ No newline at end of file + - requirements: docs/requirements.txt \ No newline at end of file From 2221cbfb136677fac7699d6eab93f2f32a2794fc Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Wed, 5 May 2021 08:20:42 -0400 Subject: [PATCH 0193/1065] Setup readthedocs configuration for v2 --- .readthedocs.yaml | 2 +- docs/source/requirements.txt | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 docs/source/requirements.txt diff --git a/.readthedocs.yaml b/.readthedocs.yaml index fe9090e0c..346241720 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -17,4 +17,4 @@ formats: # Optionally set the version of Python and requirements required to build your docs python: install: - - requirements: docs/requirements.txt \ No newline at end of file + - requirements: docs/source/requirements.txt \ No newline at end of file diff --git a/docs/source/requirements.txt b/docs/source/requirements.txt new file mode 100644 index 000000000..ecd67a4ad --- /dev/null +++ b/docs/source/requirements.txt @@ -0,0 +1 @@ +m2r2 \ No newline at end of file From 6eb7c3d3ea7dcbd7debfbf4abcdcd772ba4abf85 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Wed, 5 May 2021 08:22:53 -0400 Subject: [PATCH 0194/1065] Update doc refs to ElastAlert 2 --- docs/source/conf.py | 6 +++--- docs/source/index.rst | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 4a7ac542b..a26d49d03 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -34,8 +34,8 @@ master_doc = 'index' # General information about the project. -project = u'ElastAlert' -copyright = u'2014, Yelp' +project = u'ElastAlert 2' +copyright = u'2014-2021, Yelp, et al' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -171,7 +171,7 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'elastalert.tex', u'ElastAlert Documentation', + ('index', 'elastalert.tex', u'ElastAlert 2 Documentation', u'Quentin Long', 'manual'), ] diff --git a/docs/source/index.rst b/docs/source/index.rst index 17b15631d..e57df0b35 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -3,8 +3,8 @@ You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. -ElastAlert - Easy & Flexible Alerting With Elasticsearch -======================================================== +ElastAlert 2 - Automated rule-based alerting for Elasticsearch +============================================================== Contents: From 74b5dfd647a55b5e35890cbe91e728e2c5520b57 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Wed, 5 May 2021 10:26:47 -0400 Subject: [PATCH 0195/1065] Add example for detecting when a rule has stopped matching --- docs/source/recipes/faq-md.md | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/docs/source/recipes/faq-md.md b/docs/source/recipes/faq-md.md index 15f87fbad..d9aab50d4 100644 --- a/docs/source/recipes/faq-md.md +++ b/docs/source/recipes/faq-md.md @@ -236,7 +236,30 @@ that it has stopped seeing them. How can I get a "resolve" event? ========== -ElastAlert does not currently support stateful alerts or resolve events. +ElastAlert does not currently support stateful alerts or resolve events. However, if you have a rule +alerting you that a condition has occurred, such as a service being down, then you can create a +second rule that will monitor the first rule, and alert you when the first rule ceases to trigger. + +For example, assuming you already have a rule named "Service is offline" that's working today, you +can add a second rule as follows: + +``` +name: Service is back online +type: flatline +index: elastalert* +query_key: "rule_name" +filter: +- query: + query_string: + query: "rule_name:\"Service is offline\" AND matches:>0" +forget_keys: true +timeframe: + minutes: 30 +threshold: 1 +``` + +This second rule will trigger after the timeframe of 30 minutes has elapsed with no further matches +against the first rule. Can I set a warning threshold? ========== From ad0566da54763070d1a137c26c09d1ddba3dbcc4 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Thu, 6 May 2021 09:59:59 -0400 Subject: [PATCH 0196/1065] Correct documentation installation instructions --- .pre-commit-config.yaml | 6 ------ docs/source/elastalert.rst | 4 ---- docs/source/running_elastalert.rst | 10 +++++----- 3 files changed, 5 insertions(+), 15 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 95437e1bf..cf58a0ac6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,9 +17,3 @@ repos: sha: v0.3.5 hooks: - id: reorder-python-imports -- repo: git://github.com/Yelp/detect-secrets - sha: 0.9.1 - hooks: - - id: detect-secrets - args: ['--baseline', '.secrets.baseline'] - exclude: .*tests/.*|.*yelp/testing/.*|\.pre-commit-config\.yaml diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 9e141d053..422663f58 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -3,10 +3,6 @@ ElastAlert 2 - Automated rule-based alerting for Elasticsearch ElastAlert is a simple framework for alerting on anomalies, spikes, or other patterns of interest from data in Elasticsearch. -At Yelp, we use Elasticsearch, Logstash and Kibana for managing our ever increasing amount of data and logs. -Kibana is great for visualizing and querying data, but we quickly realized that it needed a companion tool for alerting -on inconsistencies in our data. Out of this need, ElastAlert was created. - If you have data being written into Elasticsearch in near real time and want to be alerted when that data matches certain patterns, ElastAlert is the tool for you. Overview diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index 5f76213d7..443ac0516 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -8,7 +8,7 @@ Requirements - Elasticsearch - ISO8601 or Unix timestamped data -- Python 3.6 +- Python 3.9 - pip, see requirements.txt - Packages on Ubuntu 18.x: build-essential python3-pip python3.6 python3.6-dev libffi-dev libssl-dev - Packages on Ubuntu 20.x: build-essential python3-pip python3.6 python3.6-dev libffi-dev libssl-dev @@ -18,13 +18,13 @@ Requirements Downloading and Configuring --------------------------- -You can either install the latest released version of ElastAlert using pip:: +You can either install the latest released version of ElastAlert 2 using pip:: - $ pip install elastalert + $ pip install elastalert2 -or you can clone the ElastAlert repository for the most recent changes:: +or you can clone the ElastAlert2 repository for the most recent changes:: - $ git clone https://github.com/Yelp/elastalert.git + $ git clone https://github.com/jertel/elastalert2.git Install the module:: From 286bb2a876482c402d845975f560f7e05f76798b Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 7 May 2021 19:10:18 +0900 Subject: [PATCH 0197/1065] Document update, test code addition, etc. requirements-dev.txt - tox 3.23.0 to 3.23.1 elastalert.rst - Corrected the corresponding alert notification names in alphabetical order ruletypes.rst - Corrected the corresponding alert notification names in alphabetical order - Fixed a mistake in the description of slack_ca_certs alerts_test.py add - test_opsgenie_tags - test_opsgenie_message - test_opsgenie_alias - test_opsgenie_subject - test_opsgenie_subject_args - test_opsgenie_priority_p1 - test_opsgenie_priority_p2 - test_opsgenie_priority_p3 - test_opsgenie_priority_p4 - test_opsgenie_priority_p5 - test_opsgenie_priority_none - test_opsgenie_proxy - test_ms_teams_proxy - test_slack_proxy - test_slack_username_override - test_slack_title_link - test_slack_title - test_slack_icon_url_override - test_slack_msg_color - test_slack_parse_override - test_slack_text_string - test_slack_alert_fields - test_slack_ca_certs - test_http_alerter_proxy - test_http_alerter_timeout - test_http_alerter_headers - test_http_alerter_post_ca_certs_true - test_http_alerter_post_ca_certs_false - test_pagerduty_alerter_v2_payload_class_args - test_pagerduty_alerter_v2_payload_component_args - test_pagerduty_alerter_v2_payload_group_args - test_pagerduty_alerter_v2_payload_source_args - test_pagerduty_alerter_v2_payload_custom_details - test_pagerduty_alerter_v2_payload_include_all_info - test_pagerduty_alerter_proxy - test_alerta_use_qk_as_resource - test_alerta_timeout - test_alerta_type - test_alerta_resource - test_alerta_service - test_alerta_environment - test_alerta_tags - test_alert_error - test_alert_get_aggregation_summary_text__maximum_width - test_pagertree_proxy - test_gitter_proxy - test_chatwork_proxy - test_telegram_proxy - test_telegram_text_maxlength - test_service_now_proxy - test_victor_ops_proxy - test_discord_proxy - test_discord_description_maxlength - test_dingtalk_proxy - test_mattermost_proxy modify - test_command - test_resolving_rule_references - test_alerta_no_auth - test_alerta_auth - test_alerta_new_style - test_alert_subject_size_limit_no_args - test_mattermost_alert_text_only - test_mattermost_not_alert_text_only - test_mattermost_msg_fields - test_mattermost_icon_url_override - test_mattermost_channel_override - test_mattermost_ignore_ssl_errors util_test.py add - test_ts_to_dt_with_format - test_dt_to_ts_with_format - test_flatten_dict - test_pytzfy --- docs/source/elastalert.rst | 37 +- docs/source/ruletypes.rst | 1274 +++++----- requirements-dev.txt | 2 +- tests/alerts_test.py | 4537 ++++++++++++++++++++++++++++-------- tests/util_test.py | 23 + 5 files changed, 4263 insertions(+), 1610 deletions(-) diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 9e141d053..263e3faaf 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -31,33 +31,36 @@ Several rule types with common monitoring paradigms are included with ElastAlert Currently, we have support built in for these alert types: +- Alerta +- AWS SES +- AWS SNS +- Chatwork - Command +- Datadog +- Debug +- Dingtalk +- Discord - Email +- Exotel +- Gitter +- GoogleChat +- HTTP POST - JIRA -- OpsGenie -- AWS SNS -- MS Teams -- Slack +- Line Notify - Mattermost -- Telegram -- GoogleChat +- Microsoft Teams +- OpsGenie - PagerDuty - PagerTree -- Exotel -- Twilio -- Splunk On-Call (Formerly VictorOps) -- Gitter +- Squadcast - ServiceNow -- Debug +- Slack +- Splunk On-Call (Formerly VictorOps) - Stomp -- Alerta -- HTTP POST -- Line Notify +- Telegram - TheHive +- Twilio - Zabbix -- Discord -- Dingtalk -- Chatwork Additional rule types and alerts can be easily imported or written. (See :ref:`Writing rule types ` and :ref:`Writing alerts `) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index f100866d3..bb4462061 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1480,6 +1480,220 @@ come from an individual event, usually the one which triggers the alert. When using ``alert_text_args``, you can access nested fields and index into arrays. For example, if your match was ``{"data": {"ips": ["127.0.0.1", "12.34.56.78"]}}``, then by using ``"data.ips[1]"`` in ``alert_text_args``, it would replace value with ``"12.34.56.78"``. This can go arbitrarily deep into fields and will still work on keys that contain dots themselves. +Alerter +~~~~~~~ + +For all Alerter subclasses, you may reference values from a top-level rule property in your Alerter fields by referring to the property name surrounded by dollar signs. This can be useful when you have rule-level properties that you would like to reference many times in your alert. For example: + +Example usage:: + + jira_priority: $priority$ + jira_alert_owner: $owner$ + +Alerta +~~~~~~ + +Alerta alerter will post an alert in the Alerta server instance through the alert API endpoint. +See https://docs.alerta.io/en/latest/api/alert.html for more details on the Alerta JSON format. + +For Alerta 5.0 + +Required: + +``alerta_api_url``: API server URL. + +Optional: + +``alerta_api_key``: This is the api key for alerta server, sent in an ``Authorization`` HTTP header. If not defined, no Authorization header is sent. + +``alerta_use_qk_as_resource``: If true and query_key is present, this will override ``alerta_resource`` field with the ``query_key value`` (Can be useful if ``query_key`` is a hostname). + +``alerta_use_match_timestamp``: If true, it will use the timestamp of the first match as the ``createTime`` of the alert. otherwise, the current server time is used. + +``alerta_api_skip_ssl``: Defaults to False. + +``alert_missing_value``: Text to replace any match field not found when formating strings. Defaults to ````. + +The following options dictate the values of the API JSON payload: + +``alerta_severity``: Defaults to "warning". + +``alerta_timeout``: Defaults 84600 (1 Day). + +``alerta_type``: Defaults to "elastalert". + +The following options use Python-like string syntax ``{}`` or ``%()s`` to access parts of the match, similar to the CommandAlerter. Ie: "Alert for {clientip}". +If the referenced key is not found in the match, it is replaced by the text indicated by the option ``alert_missing_value``. + +``alerta_resource``: Defaults to "elastalert". + +``alerta_service``: Defaults to "elastalert". + +``alerta_origin``: Defaults to "elastalert". + +``alerta_environment``: Defaults to "Production". + +``alerta_group``: Defaults to "". + +``alerta_correlate``: Defaults to an empty list. + +``alerta_tags``: Defaults to an empty list. + +``alerta_event``: Defaults to the rule's name. + +``alerta_text``: Defaults to the rule's text according to its type. + +``alerta_value``: Defaults to "". + +The ``attributes`` dictionary is built by joining the lists from ``alerta_attributes_keys`` and ``alerta_attributes_values``, considered in order. + + +Example usage using old-style format:: + + alert: + - alerta + alerta_api_url: "http://youralertahost/api/alert" + alerta_attributes_keys: ["hostname", "TimestampEvent", "senderIP" ] + alerta_attributes_values: ["%(key)s", "%(logdate)s", "%(sender_ip)s" ] + alerta_correlate: ["ProbeUP","ProbeDOWN"] + alerta_event: "ProbeUP" + alerta_text: "Probe %(hostname)s is UP at %(logdate)s GMT" + alerta_value: "UP" + +Example usage using new-style format:: + + alert: + - alerta + alerta_attributes_values: ["{key}", "{logdate}", "{sender_ip}" ] + alerta_text: "Probe {hostname} is UP at {logdate} GMT" + +AWS SES +~~~~~~~ + +The AWS SES alerter is similar to Email alerter but uses AWS SES to send emails. The AWS SES alerter can use AWS credentials +from the rule yaml, standard AWS config files or environment variables. + +AWS SES requires one option: + +``ses_email``: An address or list of addresses to sent the alert to. + +``ses_from_addr``: This sets the From header in the email. + +Optional: + +``ses_aws_access_key``: An access key to connect to AWS SES with. + +``ses_aws_secret_key``: The secret key associated with the access key. + +``ses_aws_region``: The AWS region in which the AWS SES resource is located. Default is us-east-1 + +``ses_aws_profile``: The AWS profile to use. If none specified, the default will be used. + +``ses_email_reply_to``: This sets the Reply-To header in the email. + +``ses_cc``: This adds the CC emails to the list of recipients. By default, this is left empty. + +``ses_bcc``: This adds the BCC emails to the list of recipients but does not show up in the email message. By default, this is left empty. + +Example When not using aws_profile usage:: + + alert: + - "ses" + ses_aws_access_key_id: "XXXXXXXXXXXXXXXXXX'" + ses_aws_secret_access_key: "YYYYYYYYYYYYYYYYYYYY" + ses_aws_region: "us-east-1" + ses_from_addr: "xxxx1@xxx.com" + ses_email: "xxxx1@xxx.com" + +Example When to use aws_profile usage:: + + # Create ~/.aws/credentials + + [default] + aws_access_key_id = xxxxxxxxxxxxxxxxxxxx + aws_secret_access_key = yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy + + # Create ~/.aws/config + + [default] + region = us-east-1 + + # alert rule setting + + alert: + - "ses" + ses_aws_profile: "default" + ses_from_addr: "xxxx1@xxx.com" + ses_email: "xxxx1@xxx.com" + +AWS SNS +~~~~~~~ + +The AWS SNS alerter will send an AWS SNS notification. The body of the notification is formatted the same as with other alerters. +The AWS SNS alerter uses boto3 and can use credentials in the rule yaml, in a standard AWS credential and config files, or +via environment variables. See http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html for details. + +AWS SNS requires one option: + +``sns_topic_arn``: The SNS topic's ARN. For example, ``arn:aws:sns:us-east-1:123456789:somesnstopic`` + +Optional: + +``sns_aws_access_key_id``: An access key to connect to SNS with. + +``sns_aws_secret_access_key``: The secret key associated with the access key. + +``sns_aws_region``: The AWS region in which the SNS resource is located. Default is us-east-1 + +``sns_aws_profile``: The AWS profile to use. If none specified, the default will be used. + +Example When not using aws_profile usage:: + + alert: + - sns + sns_topic_arn: 'arn:aws:sns:us-east-1:123456789:somesnstopic' + sns_aws_access_key_id: 'XXXXXXXXXXXXXXXXXX'' + sns_aws_secret_access_key: 'YYYYYYYYYYYYYYYYYYYY' + sns_aws_region: 'us-east-1' # You must nest aws_region within your alert configuration so it is not used to sign AWS requests. + +Example When to use aws_profile usage:: + + # Create ~/.aws/credentials + + [default] + aws_access_key_id = xxxxxxxxxxxxxxxxxxxx + aws_secret_access_key = yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy + + # Create ~/.aws/config + + [default] + region = us-east-1 + + # alert rule setting + + alert: + - sns + sns_topic_arn: 'arn:aws:sns:us-east-1:123456789:somesnstopic' + sns_aws_profile: 'default' + +Chatwork +~~~~~~~~ + +Chatwork will send notification to a Chatwork application. The body of the notification is formatted the same as with other alerters. + +Required: + +``chatwork_apikey``: ChatWork API KEY. + +``chatwork_room_id``: The ID of the room you are talking to in Chatwork. How to find the room ID is the part of the number after "rid" at the end of the URL of the browser. + +Example usage:: + + alert: + - "chatwork" + chatwork_apikey: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + chatwork_room_id: "xxxxxxxxx" + Command ~~~~~~~ @@ -1523,7 +1737,6 @@ Example usage using new-style format:: - command command: ["/bin/send_alert", "--username", "{match[username]}"] - Datadog ~~~~~~~ @@ -1532,9 +1745,116 @@ a message that is longer than 4000 characters, only his first 4000 characters wi This alert requires two additional options: -``datadog_api_key``: [Datadog API key](https://docs.datadoghq.com/account_management/api-app-keys/#api-keys) +``datadog_api_key``: [Datadog API key](https://docs.datadoghq.com/account_management/api-app-keys/#api-keys) + +``datadog_app_key``: [Datadog application key](https://docs.datadoghq."com/account_management/api-app-keys/#application-keys) + +Example usage:: + + alert: + - "datadog" + datadog_api_key: "Datadog API Key" + datadog_app_key: "Datadog APP Key" + +Debug +~~~~~ + +The debug alerter will log the alert information using the Python logger at the info level. It is logged into a Python Logger object with the name ``elastalert`` that can be easily accessed using the ``getLogger`` command. + +Dingtalk +~~~~~~~~ + +Dingtalk will send notification to a Dingtalk application. The body of the notification is formatted the same as with other alerters. + +Required: + +``dingtalk_access_token``: Dingtalk access token. + +``dingtalk_msgtype``: Dingtalk msgtype. ``text``, ``markdown``, ``single_action_card``, ``action_card``. + +dingtalk_msgtype single_action_card Required: + +``dingtalk_single_title``: The title of a single button.. + +``dingtalk_single_url``: Jump link for a single button. + +dingtalk_msgtype action_card Required: + +``dingtalk_btns``: Button. + +dingtalk_msgtype action_card Optional: + +``dingtalk_btn_orientation``: "0": Buttons are arranged vertically "1": Buttons are arranged horizontally. + +Example msgtype : text:: + + alert: + - "dingtalk" + dingtalk_access_token: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + dingtalk_msgtype: "text" + + +Example msgtype : markdown:: + + alert: + - "dingtalk" + dingtalk_access_token: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + dingtalk_msgtype: "markdown" + + +Example msgtype : single_action_card:: + + alert: + - "dingtalk" + dingtalk_access_token: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + dingtalk_msgtype: "single_action_card" + dingtalk_single_title: "test3" + dingtalk_single_url: "https://xxxx.xxx" + + +Example msgtype : action_card:: + + alert: + - "dingtalk" + dingtalk_access_token: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + dingtalk_msgtype: "action_card" + dingtalk_btn_orientation: "0" + dingtalk_btns: [{"title": "a", "actionURL": "https://xxxx1.xxx"}, {"title": "b", "actionURL": "https://xxxx2.xxx"}] + +Discord +~~~~~~~ + +Discord will send notification to a Discord application. The body of the notification is formatted the same as with other alerters. + +Required: + +``discord_webhook_url``: The webhook URL. + +Optional: + +``discord_emoji_title``: By default ElastAlert will use the ``:warning:`` emoji when posting to the channel. You can use a different emoji per ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If slack_icon_url_override parameter is provided, emoji is ignored. + +``discord_proxy``: By default ElastAlert will not use a network proxy to send notifications to Discord. Set this option using ``hostname:port`` if you need to use a proxy. + +``discord_proxy_login``: The Discord proxy auth username. + +``discord_proxy_password``: The Discord proxy auth username. + +``discord_embed_color``: embed color. By default ``0xffffff``. + +``discord_embed_footer``: embed footer. + +``discord_embed_icon_url``: You can provide icon_url to use custom image. Provide absolute address of the pciture. + +Example usage:: -``datadog_app_key``: [Datadog application key](https://docs.datadoghq.com/account_management/api-app-keys/#application-keys) + alert: + - "discord" + discord_webhook_url: "Your discord webhook url" + discord_emoji_title: ":lock:" + discord_embed_color: 0xE24D42 + discord_embed_footer: "Message sent by ElastAlert from your computer" + discord_embed_icon_url: "https://humancoders-formations.s3.amazonaws.com/uploads/course/logo/38/thumb_bigger_formation-elasticsearch.png" Email ~~~~~ @@ -1588,341 +1908,239 @@ by the smtp server. ``email_format``: If set to ``html``, the email's MIME type will be set to HTML, and HTML content should correctly render. If you use this, you need to put your own HTML into ``alert_text`` and use ``alert_text_type: alert_text_jinja`` Or ``alert_text_type: alert_text_only``. -Jira -~~~~ - -The JIRA alerter will open a ticket on jira whenever an alert is triggered. You must have a service account for ElastAlert to connect with. -The credentials of the service account are loaded from a separate file. The ticket number will be written to the alert pipeline, and if it -is followed by an email alerter, a link will be included in the email. - -This alert requires four additional options: - -``jira_server``: The hostname of the JIRA server. - -``jira_project``: The project to open the ticket under. - -``jira_issuetype``: The type of issue that the ticket will be filed as. Note that this is case sensitive. - -``jira_account_file``: The path to the file which contains JIRA account credentials. - -For an example JIRA account file, see ``example_rules/jira_acct.yaml``. The account file is also yaml formatted and must contain two fields: - -``user``: The username. - -``password``: The password. - -Optional: - -``jira_assignee``: Assigns an issue to a user. - -``jira_component``: The name of the component or components to set the ticket to. This can be a single string or a list of strings. This is provided for backwards compatibility and will eventually be deprecated. It is preferable to use the plural ``jira_components`` instead. - -``jira_components``: The name of the component or components to set the ticket to. This can be a single string or a list of strings. - -``jira_description``: Similar to ``alert_text``, this text is prepended to the JIRA description. - -``jira_label``: The label or labels to add to the JIRA ticket. This can be a single string or a list of strings. This is provided for backwards compatibility and will eventually be deprecated. It is preferable to use the plural ``jira_labels`` instead. - -``jira_labels``: The label or labels to add to the JIRA ticket. This can be a single string or a list of strings. - -``jira_priority``: The index of the priority to set the issue to. In the JIRA dropdown for priorities, 0 would represent the first priority, -1 the 2nd, etc. - -``jira_watchers``: A list of user names to add as watchers on a JIRA ticket. This can be a single string or a list of strings. - -``jira_bump_tickets``: If true, ElastAlert search for existing tickets newer than ``jira_max_age`` and comment on the ticket with -information about the alert instead of opening another ticket. ElastAlert finds the existing ticket by searching by summary. If the -summary has changed or contains special characters, it may fail to find the ticket. If you are using a custom ``alert_subject``, -the two summaries must be exact matches, except by setting ``jira_ignore_in_title``, you can ignore the value of a field when searching. -For example, if the custom subject is "foo occured at bar", and "foo" is the value field X in the match, you can set ``jira_ignore_in_title`` -to "X" and it will only bump tickets with "bar" in the subject. Defaults to false. - -``jira_ignore_in_title``: ElastAlert will attempt to remove the value for this field from the JIRA subject when searching for tickets to bump. -See ``jira_bump_tickets`` description above for an example. - -``jira_max_age``: If ``jira_bump_tickets`` is true, the maximum age of a ticket, in days, such that ElastAlert will comment on the ticket -instead of opening a new one. Default is 30 days. - -``jira_bump_not_in_statuses``: If ``jira_bump_tickets`` is true, a list of statuses the ticket must **not** be in for ElastAlert to comment on -the ticket instead of opening a new one. For example, to prevent comments being added to resolved or closed tickets, set this to 'Resolved' -and 'Closed'. This option should not be set if the ``jira_bump_in_statuses`` option is set. +Exotel +~~~~~~ -Example usage:: +Developers in India can use Exotel alerter, it will trigger an incident to a mobile phone as sms from your exophone. Alert name along with the message body will be sent as an sms. - jira_bump_not_in_statuses: - - Resolved - - Closed +The alerter requires the following option: -``jira_bump_in_statuses``: If ``jira_bump_tickets`` is true, a list of statuses the ticket *must be in* for ElastAlert to comment on -the ticket instead of opening a new one. For example, to only comment on 'Open' tickets -- and thus not 'In Progress', 'Analyzing', -'Resolved', etc. tickets -- set this to 'Open'. This option should not be set if the ``jira_bump_not_in_statuses`` option is set. +``exotel_account_sid``: This is sid of your Exotel account. -Example usage:: +``exotel_auth_token``: Auth token assosiated with your Exotel account. - jira_bump_in_statuses: - - Open +If you don't know how to find your accound sid and auth token, refer - https://support.exotel.com/support/solutions/articles/3000023019-how-to-find-my-exotel-token-and-exotel-sid -``jira_bump_only``: Only update if a ticket is found to bump. This skips ticket creation for rules where you only want to affect existing tickets. +``exotel_to_number``: The phone number where you would like send the notification. -Example usage:: +``exotel_from_number``: Your exophone number from which message will be sent. - jira_bump_only: true +The alerter has one optional argument: -``jira_transition_to``: If ``jira_bump_tickets`` is true, Transition this ticket to the given Status when bumping. Must match the text of your JIRA implementation's Status field. +``exotel_message_body``: Message you want to send in the sms, is you don't specify this argument only the rule name is sent Example usage:: - jira_transition_to: 'Fixed' - + alert: + - "exotel" + exotel_account_sid: "Exotel Account sid" + exotel_auth_token: "Exotel Auth token" + exotel_to_number: "Exotel to Number" + exotel_from_number: "Exotel from Numbeer" +Gitter +~~~~~~ -``jira_bump_after_inactivity``: If this is set, ElastAlert will only comment on tickets that have been inactive for at least this many days. -It only applies if ``jira_bump_tickets`` is true. Default is 0 days. +Gitter alerter will send a notification to a predefined Gitter channel. The body of the notification is formatted the same as with other alerters. -Arbitrary Jira fields: +The alerter requires the following option: -ElastAlert supports setting any arbitrary JIRA field that your jira issue supports. For example, if you had a custom field, called "Affected User", you can set it by providing that field name in ``snake_case`` prefixed with ``jira_``. These fields can contain primitive strings or arrays of strings. Note that when you create a custom field in your JIRA server, internally, the field is represented as ``customfield_1111``. In elastalert, you may refer to either the public facing name OR the internal representation. +``gitter_webhook_url``: The webhook URL that includes your auth data and the ID of the channel (room) you want to post to. Go to the Integration Settings +of the channel https://gitter.im/ORGA/CHANNEL#integrations , click 'CUSTOM' and copy the resulting URL. -In addition, if you would like to use a field in the alert as the value for a custom JIRA field, use the field name plus a # symbol in front. For example, if you wanted to set a custom JIRA field called "user" to the value of the field "username" from the match, you would use the following. +Optional: -Example:: +``gitter_msg_level``: By default the alert will be posted with the 'error' level. You can use 'info' if you want the messages to be black instead of red. - jira_user: "#username" +``gitter_proxy``: By default ElastAlert will not use a network proxy to send notifications to Gitter. Set this option using ``hostname:port`` if you need to use a proxy. Example usage:: - jira_arbitrary_singular_field: My Name - jira_arbitrary_multivalue_field: - - Name 1 - - Name 2 - jira_customfield_12345: My Custom Value - jira_customfield_9999: - - My Custom Value 1 - - My Custom Value 2 - -OpsGenie -~~~~~~~~ - -OpsGenie alerter will create an alert which can be used to notify Operations people of issues or log information. An OpsGenie ``API`` -integration must be created in order to acquire the necessary ``opsgenie_key`` rule variable. Currently the OpsGenieAlerter only creates -an alert, however it could be extended to update or close existing alerts. + alert: + - "gitter" + gitter_webhook_url: "Your Gitter Webhook URL" + gitter_msg_level: "error" -It is necessary for the user to create an OpsGenie Rest HTTPS API `integration page `_ in order to create alerts. +GoogleChat +~~~~~~~~~~ +GoogleChat alerter will send a notification to a predefined GoogleChat channel. The body of the notification is formatted the same as with other alerters. -The OpsGenie alert requires one option: +The alerter requires the following options: -``opsgenie_key``: The randomly generated API Integration key created by OpsGenie. +``googlechat_webhook_url``: The webhook URL that includes the channel (room) you want to post to. Go to the Google Chat website https://chat.google.com and choose the channel in which you wish to receive the notifications. Select 'Configure Webhooks' to create a new webhook or to copy the URL from an existing one. You can use a list of URLs to send to multiple channels. Optional: -``opsgenie_account``: The OpsGenie account to integrate with. -``opsgenie_addr``: The OpsGenie URL to to connect against, default is ``https://api.opsgenie.com/v2/alerts`` -``opsgenie_recipients``: A list OpsGenie recipients who will be notified by the alert. -``opsgenie_recipients_args``: Map of arguments used to format opsgenie_recipients. -``opsgenie_default_receipients``: List of default recipients to notify when the formatting of opsgenie_recipients is unsuccesful. -``opsgenie_teams``: A list of OpsGenie teams to notify (useful for schedules with escalation). -``opsgenie_teams_args``: Map of arguments used to format opsgenie_teams (useful for assigning the alerts to teams based on some data) -``opsgenie_default_teams``: List of default teams to notify when the formatting of opsgenie_teams is unsuccesful. -``opsgenie_tags``: A list of tags for this alert. - -``opsgenie_message``: Set the OpsGenie message to something other than the rule name. The message can be formatted with fields from the first match e.g. "Error occurred for {app_name} at {timestamp}.". - -``opsgenie_alias``: Set the OpsGenie alias. The alias can be formatted with fields from the first match e.g "{app_name} error". - -``opsgenie_subject``: A string used to create the title of the OpsGenie alert. Can use Python string formatting. - -``opsgenie_subject_args``: A list of fields to use to format ``opsgenie_subject`` if it contains formaters. - -``opsgenie_priority``: Set the OpsGenie priority level. Possible values are P1, P2, P3, P4, P5. - -``opsgenie_details``: Map of custom key/value pairs to include in the alert's details. The value can sourced from either fields in the first match, environment variables, or a constant value. +``googlechat_format``: Formatting for the notification. Can be either 'card' or 'basic' (default). -``opsgenie_proxy``: By default ElastAlert will not use a network proxy to send notifications to OpsGenie. Set this option using ``hostname:port`` if you need to use a proxy. +``googlechat_header_title``: Sets the text for the card header title. (Only used if format=card) -Example usage:: +``googlechat_header_subtitle``: Sets the text for the card header subtitle. (Only used if format=card) - opsgenie_details: - Author: 'Bob Smith' # constant value - Environment: '$VAR' # environment variable - Message: { field: message } # field in the first match +``googlechat_header_image``: URL for the card header icon. (Only used if format=card) -AWS SES -~~~~~~~ +``googlechat_footer_kibanalink``: URL to Kibana to include in the card footer. (Only used if format=card) -The AWS SES alerter is similar to Email alerter but uses AWS SES to send emails. The AWS SES alerter can use AWS credentials -from the rule yaml, standard AWS config files or environment variables. +HTTP POST +~~~~~~~~~ -AWS SES requires one option: +This alert type will send results to a JSON endpoint using HTTP POST. The key names are configurable so this is compatible with almost any endpoint. By default, the JSON will contain all the items from the match, unless you specify http_post_payload, in which case it will only contain those items. -``ses_email``: An address or list of addresses to sent the alert to. +Required: -``ses_from_addr``: This sets the From header in the email. +``http_post_url``: The URL to POST. Optional: -``ses_aws_access_key``: An access key to connect to AWS SES with. - -``ses_aws_secret_key``: The secret key associated with the access key. - -``ses_aws_region``: The AWS region in which the AWS SES resource is located. Default is us-east-1 +``http_post_payload``: List of keys:values to use as the content of the POST. Example - ip:clientip will map the value from the clientip index of Elasticsearch to JSON key named ip. If not defined, all the Elasticsearch keys will be sent. -``ses_aws_profile``: The AWS profile to use. If none specified, the default will be used. +``http_post_static_payload``: Key:value pairs of static parameters to be sent, along with the Elasticsearch results. Put your authentication or other information here. -``ses_email_reply_to``: This sets the Reply-To header in the email. +``http_post_headers``: Key:value pairs of headers to be sent as part of the request. -``ses_cc``: This adds the CC emails to the list of recipients. By default, this is left empty. +``http_post_proxy``: URL of proxy, if required. -``ses_bcc``: This adds the BCC emails to the list of recipients but does not show up in the email message. By default, this is left empty. +``http_post_all_values``: Boolean of whether or not to include every key value pair from the match in addition to those in http_post_payload and http_post_static_payload. Defaults to True if http_post_payload is not specified, otherwise False. -Example When not using aws_profile usage:: +``http_post_timeout``: The timeout value, in seconds, for making the post. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. - alert: - - "ses" - ses_aws_access_key_id: "XXXXXXXXXXXXXXXXXX'" - ses_aws_secret_access_key: "YYYYYYYYYYYYYYYYYYYY" - ses_aws_region: "us-east-1" - ses_from_addr: "xxxx1@xxx.com" - ses_email: "xxxx1@xxx.com" +Example usage:: -Example When to use aws_profile usage:: + alert: post + http_post_url: "http://example.com/api" + http_post_payload: + ip: clientip + http_post_static_payload: + apikey: abc123 + http_post_headers: + authorization: Basic 123dr3234 - # Create ~/.aws/credentials +JIRA +~~~~ - [default] - aws_access_key_id = xxxxxxxxxxxxxxxxxxxx - aws_secret_access_key = yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy +The JIRA alerter will open a ticket on jira whenever an alert is triggered. You must have a service account for ElastAlert to connect with. +The credentials of the service account are loaded from a separate file. The ticket number will be written to the alert pipeline, and if it +is followed by an email alerter, a link will be included in the email. - # Create ~/.aws/config +This alert requires four additional options: - [default] - region = us-east-1 +``jira_server``: The hostname of the JIRA server. - # alert rule setting +``jira_project``: The project to open the ticket under. - alert: - - "ses" - ses_aws_profile: "default" - ses_from_addr: "xxxx1@xxx.com" - ses_email: "xxxx1@xxx.com" +``jira_issuetype``: The type of issue that the ticket will be filed as. Note that this is case sensitive. -AWS SNS -~~~~~~~ +``jira_account_file``: The path to the file which contains JIRA account credentials. -The AWS SNS alerter will send an AWS SNS notification. The body of the notification is formatted the same as with other alerters. -The AWS SNS alerter uses boto3 and can use credentials in the rule yaml, in a standard AWS credential and config files, or -via environment variables. See http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html for details. +For an example JIRA account file, see ``example_rules/jira_acct.yaml``. The account file is also yaml formatted and must contain two fields: -AWS SNS requires one option: +``user``: The username. -``sns_topic_arn``: The SNS topic's ARN. For example, ``arn:aws:sns:us-east-1:123456789:somesnstopic`` +``password``: The password. Optional: -``sns_aws_access_key_id``: An access key to connect to SNS with. - -``sns_aws_secret_access_key``: The secret key associated with the access key. - -``sns_aws_region``: The AWS region in which the SNS resource is located. Default is us-east-1 - -``sns_aws_profile``: The AWS profile to use. If none specified, the default will be used. - -Example When not using aws_profile usage:: +``jira_assignee``: Assigns an issue to a user. - alert: - - sns - sns_topic_arn: 'arn:aws:sns:us-east-1:123456789:somesnstopic' - sns_aws_access_key_id: 'XXXXXXXXXXXXXXXXXX'' - sns_aws_secret_access_key: 'YYYYYYYYYYYYYYYYYYYY' - sns_aws_region: 'us-east-1' # You must nest aws_region within your alert configuration so it is not used to sign AWS requests. - -Example When to use aws_profile usage:: +``jira_component``: The name of the component or components to set the ticket to. This can be a single string or a list of strings. This is provided for backwards compatibility and will eventually be deprecated. It is preferable to use the plural ``jira_components`` instead. - # Create ~/.aws/credentials +``jira_components``: The name of the component or components to set the ticket to. This can be a single string or a list of strings. - [default] - aws_access_key_id = xxxxxxxxxxxxxxxxxxxx - aws_secret_access_key = yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy +``jira_description``: Similar to ``alert_text``, this text is prepended to the JIRA description. - # Create ~/.aws/config +``jira_label``: The label or labels to add to the JIRA ticket. This can be a single string or a list of strings. This is provided for backwards compatibility and will eventually be deprecated. It is preferable to use the plural ``jira_labels`` instead. - [default] - region = us-east-1 +``jira_labels``: The label or labels to add to the JIRA ticket. This can be a single string or a list of strings. - # alert rule setting +``jira_priority``: The index of the priority to set the issue to. In the JIRA dropdown for priorities, 0 would represent the first priority, +1 the 2nd, etc. - alert: - - sns - sns_topic_arn: 'arn:aws:sns:us-east-1:123456789:somesnstopic' - sns_aws_profile: 'default' +``jira_watchers``: A list of user names to add as watchers on a JIRA ticket. This can be a single string or a list of strings. -MS Teams -~~~~~~~~ +``jira_bump_tickets``: If true, ElastAlert search for existing tickets newer than ``jira_max_age`` and comment on the ticket with +information about the alert instead of opening another ticket. ElastAlert finds the existing ticket by searching by summary. If the +summary has changed or contains special characters, it may fail to find the ticket. If you are using a custom ``alert_subject``, +the two summaries must be exact matches, except by setting ``jira_ignore_in_title``, you can ignore the value of a field when searching. +For example, if the custom subject is "foo occured at bar", and "foo" is the value field X in the match, you can set ``jira_ignore_in_title`` +to "X" and it will only bump tickets with "bar" in the subject. Defaults to false. -MS Teams alerter will send a notification to a predefined Microsoft Teams channel. +``jira_ignore_in_title``: ElastAlert will attempt to remove the value for this field from the JIRA subject when searching for tickets to bump. +See ``jira_bump_tickets`` description above for an example. -The alerter requires the following options: +``jira_max_age``: If ``jira_bump_tickets`` is true, the maximum age of a ticket, in days, such that ElastAlert will comment on the ticket +instead of opening a new one. Default is 30 days. -``ms_teams_webhook_url``: The webhook URL that includes your auth data and the ID of the channel you want to post to. Go to the Connectors -menu in your channel and configure an Incoming Webhook, then copy the resulting URL. You can use a list of URLs to send to multiple channels. +``jira_bump_not_in_statuses``: If ``jira_bump_tickets`` is true, a list of statuses the ticket must **not** be in for ElastAlert to comment on +the ticket instead of opening a new one. For example, to prevent comments being added to resolved or closed tickets, set this to 'Resolved' +and 'Closed'. This option should not be set if the ``jira_bump_in_statuses`` option is set. -``ms_teams_alert_summary``: Summary should be configured according to `MS documentation `_, although it seems not displayed by Teams currently. +Example usage:: -Optional: + jira_bump_not_in_statuses: + - Resolved + - Closed -``ms_teams_theme_color``: By default the alert will be posted without any color line. To add color, set this attribute to a HTML color value e.g. ``#ff0000`` for red. +``jira_bump_in_statuses``: If ``jira_bump_tickets`` is true, a list of statuses the ticket *must be in* for ElastAlert to comment on +the ticket instead of opening a new one. For example, to only comment on 'Open' tickets -- and thus not 'In Progress', 'Analyzing', +'Resolved', etc. tickets -- set this to 'Open'. This option should not be set if the ``jira_bump_not_in_statuses`` option is set. -``ms_teams_proxy``: By default ElastAlert will not use a network proxy to send notifications to MS Teams. Set this option using ``hostname:port`` if you need to use a proxy. +Example usage:: -``ms_teams_alert_fixed_width``: By default this is ``False`` and the notification will be sent to MS Teams as-is. Teams supports a partial Markdown implementation, which means asterisk, underscore and other characters may be interpreted as Markdown. Currenlty, Teams does not fully implement code blocks. Setting this attribute to ``True`` will enable line by line code blocks. It is recommended to enable this to get clearer notifications in Teams. + jira_bump_in_statuses: + - Open -Slack -~~~~~ +``jira_bump_only``: Only update if a ticket is found to bump. This skips ticket creation for rules where you only want to affect existing tickets. -Slack alerter will send a notification to a predefined Slack channel. The body of the notification is formatted the same as with other alerters. +Example usage:: -The alerter requires the following option: + jira_bump_only: true -``slack_webhook_url``: The webhook URL that includes your auth data and the ID of the channel (room) you want to post to. Go to the Incoming Webhooks -section in your Slack account https://XXXXX.slack.com/services/new/incoming-webhook , choose the channel, click 'Add Incoming Webhooks Integration' -and copy the resulting URL. You can use a list of URLs to send to multiple channels. +``jira_transition_to``: If ``jira_bump_tickets`` is true, Transition this ticket to the given Status when bumping. Must match the text of your JIRA implementation's Status field. -Optional: +Example usage:: -``slack_username_override``: By default Slack will use your username when posting to the channel. Use this option to change it (free text). + jira_transition_to: 'Fixed' -``slack_channel_override``: Incoming webhooks have a default channel, but it can be overridden. A public channel can be specified "#other-channel", and a Direct Message with "@username". -``slack_emoji_override``: By default ElastAlert will use the :ghost: emoji when posting to the channel. You can use a different emoji per -ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If slack_icon_url_override parameter is provided, emoji is ignored. -``slack_icon_url_override``: By default ElastAlert will use the :ghost: emoji when posting to the channel. You can provide icon_url to use custom image. -Provide absolute address of the pciture. +``jira_bump_after_inactivity``: If this is set, ElastAlert will only comment on tickets that have been inactive for at least this many days. +It only applies if ``jira_bump_tickets`` is true. Default is 0 days. -``slack_msg_color``: By default the alert will be posted with the 'danger' color. You can also use 'good' or 'warning' colors. +Arbitrary Jira fields: -``slack_parse_override``: By default the notification message is escaped 'none'. You can also use 'full'. +ElastAlert supports setting any arbitrary JIRA field that your jira issue supports. For example, if you had a custom field, called "Affected User", you can set it by providing that field name in ``snake_case`` prefixed with ``jira_``. These fields can contain primitive strings or arrays of strings. Note that when you create a custom field in your JIRA server, internally, the field is represented as ``customfield_1111``. In elastalert, you may refer to either the public facing name OR the internal representation. -``slack_text_string``: Notification message you want to add. +In addition, if you would like to use a field in the alert as the value for a custom JIRA field, use the field name plus a # symbol in front. For example, if you wanted to set a custom JIRA field called "user" to the value of the field "username" from the match, you would use the following. -``slack_proxy``: By default ElastAlert will not use a network proxy to send notifications to Slack. Set this option using ``hostname:port`` if you need to use a proxy. +Example:: -``slack_alert_fields``: You can add additional fields to your slack alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. + jira_user: "#username" -``slack_ignore_ssl_errors``: By default ElastAlert will verify SSL certificate. Set this option to False if you want to ignore SSL errors. +Example usage:: -``slack_title``: Sets a title for the message, this shows up as a blue text at the start of the message + jira_arbitrary_singular_field: My Name + jira_arbitrary_multivalue_field: + - Name 1 + - Name 2 + jira_customfield_12345: My Custom Value + jira_customfield_9999: + - My Custom Value 1 + - My Custom Value 2 -``slack_title_link``: You can add a link in your Slack notification by setting this to a valid URL. Requires slack_title to be set. +Line Notify +~~~~~~~~~~~ -``slack_timeout``: You can specify a timeout value, in seconds, for making communicating with Slack. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. +Line Notify will send notification to a Line application. The body of the notification is formatted the same as with other alerters. -``slack_attach_kibana_discover_url``: Enables the attachment of the ``kibana_discover_url`` to the slack notification. The config ``generate_kibana_discover_url`` must also be ``True`` in order to generate the url. Defaults to ``False``. +Required: -``slack_kibana_discover_color``: The color of the Kibana Discover url attachment. Defaults to ``#ec4b98``. +``linenotify_access_token``: The access token that you got from https://notify-bot.line.me/my/ -``slack_kibana_discover_title``: The title of the Kibana Discover url attachment. Defaults to ``Discover in Kibana``. +Example usage:: -``slack_ca_certs``: path to a CA cert bundle to use to verify SSL connections. + alert: + - "linenotify" + linenotify_access_token: "Your linenotify access token" Mattermost ~~~~~~~~~~ @@ -1953,47 +2171,90 @@ Provide absolute address of the picture or Base64 data url. ``mattermost_msg_fields``: You can add fields to your Mattermost alerts using this option. You can specify the title using `title` and the text value using `value`. Additionally you can specify whether this field should be a `short` field using `short: true`. If you set `args` and `value` is a formattable string, ElastAlert will format the incident key based on the provided array of fields from the rule or match. See https://docs.mattermost.com/developer/message-attachments.html#fields for more information. +Example mattermost_msg_fields:: -Telegram -~~~~~~~~ -Telegram alerter will send a notification to a predefined Telegram username or channel. The body of the notification is formatted the same as with other alerters. + mattermost_msg_fields: + - title: Stack + value: "{0} {1}" # interpolate fields mentioned in args + short: false + args: ["type", "msg.status_code"] # fields from doc + - title: Name + value: static field + short: false -The alerter requires the following two options: +Microsoft Teams +~~~~~~~~~~~~~~~ -``telegram_bot_token``: The token is a string along the lines of ``110201543:AAHdqTcvCH1vGWJxfSeofSAs0K5PALDsaw`` that will be required to authorize the bot and send requests to the Bot API. You can learn about obtaining tokens and generating new ones in this document https://core.telegram.org/bots#6-botfather +Microsoft Teams alerter will send a notification to a predefined Microsoft Teams channel. -``telegram_room_id``: Unique identifier for the target chat or username of the target channel using telegram chat_id (in the format "-xxxxxxxx") +The alerter requires the following options: + +``ms_teams_webhook_url``: The webhook URL that includes your auth data and the ID of the channel you want to post to. Go to the Connectors +menu in your channel and configure an Incoming Webhook, then copy the resulting URL. You can use a list of URLs to send to multiple channels. + +``ms_teams_alert_summary``: Summary should be configured according to `MS documentation `_, although it seems not displayed by Teams currently. Optional: -``telegram_api_url``: Custom domain to call Telegram Bot API. Default to api.telegram.org +``ms_teams_theme_color``: By default the alert will be posted without any color line. To add color, set this attribute to a HTML color value e.g. ``#ff0000`` for red. -``telegram_proxy``: By default ElastAlert will not use a network proxy to send notifications to Telegram. Set this option using ``hostname:port`` if you need to use a proxy. +``ms_teams_proxy``: By default ElastAlert will not use a network proxy to send notifications to MS Teams. Set this option using ``hostname:port`` if you need to use a proxy. -``telegram_proxy_login``: The Telegram proxy auth username. +``ms_teams_alert_fixed_width``: By default this is ``False`` and the notification will be sent to MS Teams as-is. Teams supports a partial Markdown implementation, which means asterisk, underscore and other characters may be interpreted as Markdown. Currenlty, Teams does not fully implement code blocks. Setting this attribute to ``True`` will enable line by line code blocks. It is recommended to enable this to get clearer notifications in Teams. -``telegram_proxy_pass``: The Telegram proxy auth password. +Example usage:: -GoogleChat -~~~~~~~~~~ -GoogleChat alerter will send a notification to a predefined GoogleChat channel. The body of the notification is formatted the same as with other alerters. + alert: + - "ms_teams" + ms_teams_alert_summary: "Alert" + ms_teams_theme_color: "#6600ff" + ms_teams_webhook_url: "MS Teams Webhook URL" -The alerter requires the following options: +OpsGenie +~~~~~~~~ -``googlechat_webhook_url``: The webhook URL that includes the channel (room) you want to post to. Go to the Google Chat website https://chat.google.com and choose the channel in which you wish to receive the notifications. Select 'Configure Webhooks' to create a new webhook or to copy the URL from an existing one. You can use a list of URLs to send to multiple channels. +OpsGenie alerter will create an alert which can be used to notify Operations people of issues or log information. An OpsGenie ``API`` +integration must be created in order to acquire the necessary ``opsgenie_key`` rule variable. Currently the OpsGenieAlerter only creates +an alert, however it could be extended to update or close existing alerts. + +It is necessary for the user to create an OpsGenie Rest HTTPS API `integration page `_ in order to create alerts. + +The OpsGenie alert requires one option: + +``opsgenie_key``: The randomly generated API Integration key created by OpsGenie. Optional: -``googlechat_format``: Formatting for the notification. Can be either 'card' or 'basic' (default). +``opsgenie_account``: The OpsGenie account to integrate with. +``opsgenie_addr``: The OpsGenie URL to to connect against, default is ``https://api.opsgenie.com/v2/alerts`` +``opsgenie_recipients``: A list OpsGenie recipients who will be notified by the alert. +``opsgenie_recipients_args``: Map of arguments used to format opsgenie_recipients. +``opsgenie_default_receipients``: List of default recipients to notify when the formatting of opsgenie_recipients is unsuccesful. +``opsgenie_teams``: A list of OpsGenie teams to notify (useful for schedules with escalation). +``opsgenie_teams_args``: Map of arguments used to format opsgenie_teams (useful for assigning the alerts to teams based on some data) +``opsgenie_default_teams``: List of default teams to notify when the formatting of opsgenie_teams is unsuccesful. +``opsgenie_tags``: A list of tags for this alert. -``googlechat_header_title``: Sets the text for the card header title. (Only used if format=card) +``opsgenie_message``: Set the OpsGenie message to something other than the rule name. The message can be formatted with fields from the first match e.g. "Error occurred for {app_name} at {timestamp}.". -``googlechat_header_subtitle``: Sets the text for the card header subtitle. (Only used if format=card) +``opsgenie_alias``: Set the OpsGenie alias. The alias can be formatted with fields from the first match e.g "{app_name} error". -``googlechat_header_image``: URL for the card header icon. (Only used if format=card) +``opsgenie_subject``: A string used to create the title of the OpsGenie alert. Can use Python string formatting. -``googlechat_footer_kibanalink``: URL to Kibana to include in the card footer. (Only used if format=card) +``opsgenie_subject_args``: A list of fields to use to format ``opsgenie_subject`` if it contains formaters. + +``opsgenie_priority``: Set the OpsGenie priority level. Possible values are P1, P2, P3, P4, P5. + +``opsgenie_details``: Map of custom key/value pairs to include in the alert's details. The value can sourced from either fields in the first match, environment variables, or a constant value. + +``opsgenie_proxy``: By default ElastAlert will not use a network proxy to send notifications to OpsGenie. Set this option using ``hostname:port`` if you need to use a proxy. + +Example usage:: + opsgenie_details: + Author: 'Bob Smith' # constant value + Environment: '$VAR' # environment variable + Message: { field: message } # field in the first match PagerDuty ~~~~~~~~~ @@ -2051,7 +2312,6 @@ See https://developer.pagerduty.com/docs/events-api-v2/trigger-events/ ``pagerduty_v2_payload_include_all_info``: If True, this will include the entire Elasticsearch document as a custom detail field called "information" in the PagerDuty alert. - PagerTree ~~~~~~~~~ @@ -2061,107 +2321,28 @@ The alerter requires the following options: ``pagertree_integration_url``: URL generated by PagerTree for the integration. -``pagertree_proxy``: By default ElastAlert will not use a network proxy to send notifications to PagerTree. Set this option using hostname:port if you need to use a proxy. - -Exotel -~~~~~~ - -Developers in India can use Exotel alerter, it will trigger an incident to a mobile phone as sms from your exophone. Alert name along with the message body will be sent as an sms. - -The alerter requires the following option: - -``exotel_account_sid``: This is sid of your Exotel account. - -``exotel_auth_token``: Auth token assosiated with your Exotel account. - -If you don't know how to find your accound sid and auth token, refer - https://support.exotel.com/support/solutions/articles/3000023019-how-to-find-my-exotel-token-and-exotel-sid - -``exotel_to_number``: The phone number where you would like send the notification. - -``exotel_from_number``: Your exophone number from which message will be sent. - -The alerter has one optional argument: - -``exotel_message_body``: Message you want to send in the sms, is you don't specify this argument only the rule name is sent - - -Twilio -~~~~~~ - -Twilio alerter will trigger an incident to a mobile phone as an sms from your twilio phone number. The sms will contain the alert name. You may use either twilio SMS or twilio copilot -to send the message, controlled by the ``twilio_use_copilot`` option. - -Note that when twilio copilot *is* used the ``twilio_message_service_sid`` option is required. Likewise, when *not* using twilio copilot, the ``twilio_from_number`` option is required. - -The alerter requires the following options: - -``twilio_account_sid``: This is sid of your twilio account. - -``twilio_auth_token``: Auth token assosiated with your twilio account. - -``twilio_to_number``: The phone number where you would like send the notification. - -Either one of - * ``twilio_from_number``: Your twilio phone number from which message will be sent. - * ``twilio_message_service_sid``: The SID of your twilio message service. - -Optional: - -``twilio_use_copilot``: Whether or not to use twilio copilot, False by default. +``pagertree_proxy``: By default ElastAlert will not use a network proxy to send notifications to PagerTree. Set this option using ``hostname:port`` if you need to use a proxy. Example usage:: alert: - - twilio # With Copilot - twilio_use_copilot: True - twilio_to_number: "0123456789" - twilio_auth_token: "abcdefghijklmnopqrstuvwxyz012345" - twilio_account_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" - twilio_message_service_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" - - - twilio # With Legacy SMS - twilio_use_copilot: False - twilio_to_number: "0123456789" - twilio_from_number: "9876543210" - twilio_auth_token: "abcdefghijklmnopqrstuvwxyz012345" - twilio_account_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" - -Splunk On-Call (Formerly VictorOps) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Splunk On-Call (Formerly VictorOps) alerter will trigger an incident to a predefined Splunk On-Call (Formerly VictorOps) routing key. The body of the notification is formatted the same as with other alerters. - -The alerter requires the following options: - -``victorops_api_key``: API key generated under the 'REST Endpoint' in the Integrations settings. - -``victorops_routing_key``: Splunk On-Call (Formerly VictorOps) routing key to route the alert to. - -``victorops_message_type``: Splunk On-Call (Formerly VictorOps) field to specify severity level. Must be one of the following: INFO, WARNING, ACKNOWLEDGEMENT, CRITICAL, RECOVERY - -Optional: - -``victorops_entity_id``: The identity of the incident used by Splunk On-Call (Formerly VictorOps) to correlate incidents throughout the alert lifecycle. If not defined, Splunk On-Call (Formerly VictorOps) will assign a random string to each alert. - -``victorops_entity_display_name``: Human-readable name of alerting entity to summarize incidents without affecting the life-cycle workflow. - -``victorops_proxy``: By default ElastAlert will not use a network proxy to send notifications to Splunk On-Call (Formerly VictorOps). Set this option using ``hostname:port`` if you need to use a proxy. - -Gitter -~~~~~~ - -Gitter alerter will send a notification to a predefined Gitter channel. The body of the notification is formatted the same as with other alerters. + - "pagertree" + pagertree_integration_url: "PagerTree Integration URL" -The alerter requires the following option: +Squadcast +~~~~~~~~~ -``gitter_webhook_url``: The webhook URL that includes your auth data and the ID of the channel (room) you want to post to. Go to the Integration Settings -of the channel https://gitter.im/ORGA/CHANNEL#integrations , click 'CUSTOM' and copy the resulting URL. +Alerts can be sent to Squadcast using the `http post` method described above and Squadcast will process it and send Phone, SMS, Email and Push notifications to the relevant person(s) and let them take actions. -Optional: +Configuration variables in rules YAML file:: -``gitter_msg_level``: By default the alert will be posted with the 'error' level. You can use 'info' if you want the messages to be black instead of red. + alert: post + http_post_url: + http_post_static_payload: + Title: + http_post_all_values: true -``gitter_proxy``: By default ElastAlert will not use a network proxy to send notifications to Gitter. Set this option using ``hostname:port`` if you need to use a proxy. +For more details, you can refer the `Squadcast documentation `_. ServiceNow ~~~~~~~~~~ @@ -2195,186 +2376,174 @@ Optional: ``servicenow_proxy``: By default ElastAlert will not use a network proxy to send notifications to ServiceNow. Set this option using ``hostname:port`` if you need to use a proxy. +Example usage:: -Debug -~~~~~ - -The debug alerter will log the alert information using the Python logger at the info level. It is logged into a Python Logger object with the name ``elastalert`` that can be easily accessed using the ``getLogger`` command. + alert: + - "servicenow" + servicenow_rest_url: "servicenow rest url" + username: "user" + password: "password" + short_description: "xxxxxx" + comments: "xxxxxx" + assignment_group: "xxxxxx" + category: "xxxxxx" + subcategory: "xxxxxx" + cmdb_ci: "xxxxxx" + caller_id: "xxxxxx" -Stomp +Slack ~~~~~ -This alert type will use the STOMP protocol in order to push a message to a broker like ActiveMQ or RabbitMQ. The message body is a JSON string containing the alert details. -The default values will work with a pristine ActiveMQ installation. - -The alerter requires the following options: - -``stomp_hostname``: The STOMP host to use, defaults to localhost. - -``stomp_hostport``: The STOMP port to use, defaults to 61613. +Slack alerter will send a notification to a predefined Slack channel. The body of the notification is formatted the same as with other alerters. -``stomp_login``: The STOMP login to use, defaults to admin. +The alerter requires the following option: -``stomp_password``: The STOMP password to use, defaults to admin. +``slack_webhook_url``: The webhook URL that includes your auth data and the ID of the channel (room) you want to post to. Go to the Incoming Webhooks +section in your Slack account https://XXXXX.slack.com/services/new/incoming-webhook , choose the channel, click 'Add Incoming Webhooks Integration' +and copy the resulting URL. You can use a list of URLs to send to multiple channels. Optional: -``stomp_ssl``: Connect the STOMP host using TLS, defaults to False. - -``stomp_destination``: The STOMP destination to use, defaults to /queue/ALERT - -The stomp_destination field depends on the broker, the /queue/ALERT example is the nomenclature used by ActiveMQ. Each broker has its own logic. +``slack_username_override``: By default Slack will use your username when posting to the channel. Use this option to change it (free text). -Alerta -~~~~~~ +``slack_channel_override``: Incoming webhooks have a default channel, but it can be overridden. A public channel can be specified "#other-channel", and a Direct Message with "@username". -Alerta alerter will post an alert in the Alerta server instance through the alert API endpoint. -See https://docs.alerta.io/en/latest/api/alert.html for more details on the Alerta JSON format. +``slack_emoji_override``: By default ElastAlert will use the ``:ghost:`` emoji when posting to the channel. You can use a different emoji per +ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If slack_icon_url_override parameter is provided, emoji is ignored. -For Alerta 5.0 +``slack_icon_url_override``: By default ElastAlert will use the ``:ghost:`` emoji when posting to the channel. You can provide icon_url to use custom image. +Provide absolute address of the pciture. -Required: +``slack_msg_color``: By default the alert will be posted with the 'danger' color. You can also use 'good' or 'warning' colors. -``alerta_api_url``: API server URL. +``slack_parse_override``: By default the notification message is escaped 'none'. You can also use 'full'. -Optional: +``slack_text_string``: Notification message you want to add. -``alerta_api_key``: This is the api key for alerta server, sent in an ``Authorization`` HTTP header. If not defined, no Authorization header is sent. +``slack_proxy``: By default ElastAlert will not use a network proxy to send notifications to Slack. Set this option using ``hostname:port`` if you need to use a proxy. -``alerta_use_qk_as_resource``: If true and query_key is present, this will override ``alerta_resource`` field with the ``query_key value`` (Can be useful if ``query_key`` is a hostname). +``slack_alert_fields``: You can add additional fields to your slack alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. -``alerta_use_match_timestamp``: If true, it will use the timestamp of the first match as the ``createTime`` of the alert. otherwise, the current server time is used. +Example slack_alert_fields:: -``alerta_api_skip_ssl``: Defaults to False. + slack_alert_fields: + - title: Host + value: monitor.host + short: true + - title: Status + value: monitor.status + short: true + - title: Zone + value: beat.name + short: true -``alert_missing_value``: Text to replace any match field not found when formating strings. Defaults to ````. +``slack_ignore_ssl_errors``: By default ElastAlert will verify SSL certificate. Set this option to ``False`` if you want to ignore SSL errors. -The following options dictate the values of the API JSON payload: +``slack_title``: Sets a title for the message, this shows up as a blue text at the start of the message -``alerta_severity``: Defaults to "warning". +``slack_title_link``: You can add a link in your Slack notification by setting this to a valid URL. Requires slack_title to be set. -``alerta_timeout``: Defaults 84600 (1 Day). +``slack_timeout``: You can specify a timeout value, in seconds, for making communicating with Slack. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. -``alerta_type``: Defaults to "elastalert". +``slack_attach_kibana_discover_url``: Enables the attachment of the ``kibana_discover_url`` to the slack notification. The config ``generate_kibana_discover_url`` must also be ``True`` in order to generate the url. Defaults to ``False``. -The following options use Python-like string syntax ``{}`` or ``%()s`` to access parts of the match, similar to the CommandAlerter. Ie: "Alert for {clientip}". -If the referenced key is not found in the match, it is replaced by the text indicated by the option ``alert_missing_value``. +``slack_kibana_discover_color``: The color of the Kibana Discover url attachment. Defaults to ``#ec4b98``. -``alerta_resource``: Defaults to "elastalert". +``slack_kibana_discover_title``: The title of the Kibana Discover url attachment. Defaults to ``Discover in Kibana``. -``alerta_service``: Defaults to "elastalert". +``slack_ca_certs``: Set this option to ``True`` if you want to validate the SSL certificate. -``alerta_origin``: Defaults to "elastalert". +Splunk On-Call (Formerly VictorOps) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -``alerta_environment``: Defaults to "Production". +Splunk On-Call (Formerly VictorOps) alerter will trigger an incident to a predefined Splunk On-Call (Formerly VictorOps) routing key. The body of the notification is formatted the same as with other alerters. -``alerta_group``: Defaults to "". +The alerter requires the following options: -``alerta_correlate``: Defaults to an empty list. +``victorops_api_key``: API key generated under the 'REST Endpoint' in the Integrations settings. -``alerta_tags``: Defaults to an empty list. +``victorops_routing_key``: Splunk On-Call (Formerly VictorOps) routing key to route the alert to. -``alerta_event``: Defaults to the rule's name. +``victorops_message_type``: Splunk On-Call (Formerly VictorOps) field to specify severity level. Must be one of the following: INFO, WARNING, ACKNOWLEDGEMENT, CRITICAL, RECOVERY -``alerta_text``: Defaults to the rule's text according to its type. +Optional: -``alerta_value``: Defaults to "". +``victorops_entity_id``: The identity of the incident used by Splunk On-Call (Formerly VictorOps) to correlate incidents throughout the alert lifecycle. If not defined, Splunk On-Call (Formerly VictorOps) will assign a random string to each alert. -The ``attributes`` dictionary is built by joining the lists from ``alerta_attributes_keys`` and ``alerta_attributes_values``, considered in order. +``victorops_entity_display_name``: Human-readable name of alerting entity to summarize incidents without affecting the life-cycle workflow. +``victorops_proxy``: By default ElastAlert will not use a network proxy to send notifications to Splunk On-Call (Formerly VictorOps). Set this option using ``hostname:port`` if you need to use a proxy. -Example usage using old-style format:: +Example usage:: alert: - - alerta - alerta_api_url: "http://youralertahost/api/alert" - alerta_attributes_keys: ["hostname", "TimestampEvent", "senderIP" ] - alerta_attributes_values: ["%(key)s", "%(logdate)s", "%(sender_ip)s" ] - alerta_correlate: ["ProbeUP","ProbeDOWN"] - alerta_event: "ProbeUP" - alerta_text: "Probe %(hostname)s is UP at %(logdate)s GMT" - alerta_value: "UP" + - "victorops" + victorops_api_key: "VictorOps API Key" + victorops_routing_key: "VictorOps routing Key" + victorops_message_type: "INFO" -Example usage using new-style format:: - - alert: - - alerta - alerta_attributes_values: ["{key}", "{logdate}", "{sender_ip}" ] - alerta_text: "Probe {hostname} is UP at {logdate} GMT" +Stomp +~~~~~ +This alert type will use the STOMP protocol in order to push a message to a broker like ActiveMQ or RabbitMQ. The message body is a JSON string containing the alert details. +The default values will work with a pristine ActiveMQ installation. +The alerter requires the following options: -HTTP POST -~~~~~~~~~ +``stomp_hostname``: The STOMP host to use, defaults to ``localhost``. -This alert type will send results to a JSON endpoint using HTTP POST. The key names are configurable so this is compatible with almost any endpoint. By default, the JSON will contain all the items from the match, unless you specify http_post_payload, in which case it will only contain those items. +``stomp_hostport``: The STOMP port to use, defaults to ``61613``. -Required: +``stomp_login``: The STOMP login to use, defaults to ``admin``. -``http_post_url``: The URL to POST. +``stomp_password``: The STOMP password to use, defaults to ``admin``. Optional: -``http_post_payload``: List of keys:values to use as the content of the POST. Example - ip:clientip will map the value from the clientip index of Elasticsearch to JSON key named ip. If not defined, all the Elasticsearch keys will be sent. - -``http_post_static_payload``: Key:value pairs of static parameters to be sent, along with the Elasticsearch results. Put your authentication or other information here. - -``http_post_headers``: Key:value pairs of headers to be sent as part of the request. - -``http_post_proxy``: URL of proxy, if required. +``stomp_ssl``: Connect the STOMP host using TLS, defaults to ``False``. -``http_post_all_values``: Boolean of whether or not to include every key value pair from the match in addition to those in http_post_payload and http_post_static_payload. Defaults to True if http_post_payload is not specified, otherwise False. +``stomp_destination``: The STOMP destination to use, defaults to ``/queue/ALERT`` -``http_post_timeout``: The timeout value, in seconds, for making the post. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. +The stomp_destination field depends on the broker, the /queue/ALERT example is the nomenclature used by ActiveMQ. Each broker has its own logic. Example usage:: - alert: post - http_post_url: "http://example.com/api" - http_post_payload: - ip: clientip - http_post_static_payload: - apikey: abc123 - http_post_headers: - authorization: Basic 123dr3234 - -Squadcast -~~~~~~~~~ - -Alerts can be sent to Squadcast using the `http post` method described above and Squadcast will process it and send Phone, SMS, Email and Push notifications to the relevant person(s) and let them take actions. - -Configuration variables in rules YAML file:: - - alert: post - http_post_url: - http_post_static_payload: - Title: - http_post_all_values: true + alert: + - "stomp" + stomp_hostname: "localhost" + stomp_hostport: "61613" + stomp_login: "admin" + stomp_password: "admin" + stomp_destination: "/queue/ALERT" -For more details, you can refer the `Squadcast documentation `_. +Telegram +~~~~~~~~ +Telegram alerter will send a notification to a predefined Telegram username or channel. The body of the notification is formatted the same as with other alerters. -Alerter -~~~~~~~ +The alerter requires the following two options: -For all Alerter subclasses, you may reference values from a top-level rule property in your Alerter fields by referring to the property name surrounded by dollar signs. This can be useful when you have rule-level properties that you would like to reference many times in your alert. For example: +``telegram_bot_token``: The token is a string along the lines of ``110201543:AAHdqTcvCH1vGWJxfSeofSAs0K5PALDsaw`` that will be required to authorize the bot and send requests to the Bot API. You can learn about obtaining tokens and generating new ones in this document https://core.telegram.org/bots#6-botfather -Example usage:: +``telegram_room_id``: Unique identifier for the target chat or username of the target channel using telegram chat_id (in the format "-xxxxxxxx") - jira_priority: $priority$ - jira_alert_owner: $owner$ +Optional: +``telegram_api_url``: Custom domain to call Telegram Bot API. Default to api.telegram.org +``telegram_proxy``: By default ElastAlert will not use a network proxy to send notifications to Telegram. Set this option using ``hostname:port`` if you need to use a proxy. -Line Notify -~~~~~~~~~~~ +``telegram_proxy_login``: The Telegram proxy auth username. -Line Notify will send notification to a Line application. The body of the notification is formatted the same as with other alerters. +``telegram_proxy_pass``: The Telegram proxy auth password. -Required: +Example usage:: -``linenotify_access_token``: The access token that you got from https://notify-bot.line.me/my/ + alert: + - "telegram" + telegram_bot_token: "bot_token" + telegram_room_id: "chat_id" -theHive +TheHive ~~~~~~~ theHive alert type will send JSON request to theHive (Security Incident Response Platform) with TheHive4py API. Sent request will be stored like Hive Alert with description and observables. @@ -2397,143 +2566,90 @@ Example usage:: alert: hivealerter - hive_connection: - hive_host: http://localhost - hive_port: - hive_apikey: - hive_proxies: - http: '' - https: '' - - hive_alert_config: - title: 'Title' ## This will default to {rule[index]_rule[name]} if not provided - type: 'external' - source: 'elastalert' - description: '{match[field1]} {rule[name]} Sample description' - severity: 2 - tags: ['tag1', 'tag2 {rule[name]}'] - tlp: 3 - status: 'New' - follow: True + hive_connection: + hive_host: http://localhost + hive_port: + hive_apikey: + hive_proxies: + http: '' + https: '' + + hive_alert_config: + title: 'Title' ## This will default to {rule[index]_rule[name]} if not provided + type: 'external' + source: 'elastalert' + description: '{match[field1]} {rule[name]} Sample description' + severity: 2 + tags: ['tag1', 'tag2 {rule[name]}'] + tlp: 3 + status: 'New' + follow: True hive_observable_data_mapping: - - domain: "{match[field1]}_{rule[name]}" - - domain: "{match[field]}" - - ip: "{match[ip_field]}" - - -Zabbix -~~~~~~~~~~~ + - domain: "{match[field1]}_{rule[name]}" + - domain: "{match[field]}" + - ip: "{match[ip_field]}" -Zabbix will send notification to a Zabbix server. The item in the host specified receive a 1 value for each hit. For example, if the elastic query produce 3 hits in the last execution of elastalert, three '1' (integer) values will be send from elastalert to Zabbix Server. If the query have 0 hits, any value will be sent. +Twilio +~~~~~~ -Required: +Twilio alerter will trigger an incident to a mobile phone as an sms from your twilio phone number. The sms will contain the alert name. You may use either twilio SMS or twilio copilot +to send the message, controlled by the ``twilio_use_copilot`` option. -``zbx_sender_host``: The address where zabbix server is running. -``zbx_sender_port``: The port where zabbix server is listenning. -``zbx_host``: This field setup the host in zabbix that receives the value sent by ElastAlert 2. -``zbx_key``: This field setup the key in the host that receives the value sent by ElastAlert 2. +Note that when twilio copilot *is* used the ``twilio_message_service_sid`` option is required. Likewise, when *not* using twilio copilot, the ``twilio_from_number`` option is required. +The alerter requires the following options: -Discord -~~~~~~~ +``twilio_account_sid``: This is sid of your twilio account. -Discord will send notification to a Discord application. The body of the notification is formatted the same as with other alerters. +``twilio_auth_token``: Auth token assosiated with your twilio account. -Required: +``twilio_to_number``: The phone number where you would like send the notification. -``discord_webhook_url``: The webhook URL. +Either one of + * ``twilio_from_number``: Your twilio phone number from which message will be sent. + * ``twilio_message_service_sid``: The SID of your twilio message service. Optional: -``discord_emoji_title``: By default ElastAlert will use the ``:warning:`` emoji when posting to the channel. You can use a different emoji per ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If slack_icon_url_override parameter is provided, emoji is ignored. - -``discord_proxy``: By default ElastAlert will not use a network proxy to send notifications to Discord. Set this option using hostname:port if you need to use a proxy. - -``discord_proxy_login``: The Discord proxy auth username. - -``discord_proxy_password``: The Discord proxy auth username. - -``discord_embed_color``: embed color. By default ``0xffffff``. - -``discord_embed_footer``: embed footer. - -``discord_embed_icon_url``: You can provide icon_url to use custom image. Provide absolute address of the pciture. - -Dingtalk -~~~~~~~~ - -Dingtalk will send notification to a Dingtalk application. The body of the notification is formatted the same as with other alerters. - -Required: - -``dingtalk_access_token``: Dingtalk access token. - -``dingtalk_msgtype``: Dingtalk msgtype. ``text``, ``markdown``, ``single_action_card``, ``action_card``. - -dingtalk_msgtype single_action_card Required: - -``dingtalk_single_title``: The title of a single button.. - -``dingtalk_single_url``: Jump link for a single button. - -dingtalk_msgtype action_card Required: - -``dingtalk_btns``: Button. - -dingtalk_msgtype action_card Optional: - -``dingtalk_btn_orientation``: "0": Buttons are arranged vertically "1": Buttons are arranged horizontally. - -Example msgtype : text:: - - alert: - - dingtalk - dingtalk_access_token: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' - dingtalk_msgtype: 'text' - - -Example msgtype : markdown:: - - alert: - - dingtalk - dingtalk_access_token: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' - dingtalk_msgtype: 'markdown' - +``twilio_use_copilot``: Whether or not to use twilio copilot, False by default. -Example msgtype : single_action_card:: +Example With Copilot usage:: alert: - - dingtalk - dingtalk_access_token: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' - dingtalk_msgtype: 'single_action_card' - dingtalk_single_title: 'test3' - dingtalk_single_url: 'https://xxxx.xxx' - + - "twilio" + twilio_use_copilot: True + twilio_to_number: "0123456789" + twilio_auth_token: "abcdefghijklmnopqrstuvwxyz012345" + twilio_account_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" + twilio_message_service_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" -Example msgtype : action_card:: +Example With SMS usage:: alert: - - dingtalk - dingtalk_access_token: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' - dingtalk_msgtype: 'action_card' - dingtalk_btn_orientation: '0' - dingtalk_btns: [{'title': 'a', 'actionURL': 'https://xxxx1.xxx'}, {'title': 'b', 'actionURL': 'https://xxxx2.xxx'}] + - "twilio" + twilio_to_number: "0123456789" + twilio_from_number: "9876543210" + twilio_auth_token: "abcdefghijklmnopqrstuvwxyz012345" + twilio_account_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" -Chatwork -~~~~~~~~ +Zabbix +~~~~~~ -Chatwork will send notification to a Chatwork application. The body of the notification is formatted the same as with other alerters. +Zabbix will send notification to a Zabbix server. The item in the host specified receive a 1 value for each hit. For example, if the elastic query produce 3 hits in the last execution of elastalert, three '1' (integer) values will be send from elastalert to Zabbix Server. If the query have 0 hits, any value will be sent. Required: -``chatwork_apikey``: ChatWork API KEY. - -``chatwork_room_id``: The ID of the room you are talking to in Chatwork. How to find the room ID is the part of the number after "rid" at the end of the URL of the browser. +``zbx_sender_host``: The address where zabbix server is running. +``zbx_sender_port``: The port where zabbix server is listenning. +``zbx_host``: This field setup the host in zabbix that receives the value sent by ElastAlert 2. +``zbx_key``: This field setup the key in the host that receives the value sent by ElastAlert 2. Example usage:: alert: - - chatwork - chatwork_apikey: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' - chatwork_room_id: 'xxxxxxxxx' + - "zabbix" + zbx_sender_host: "zabbix-server" + zbx_sender_port: 10051 + zbx_host: "test001" + zbx_key: "sender_load1" \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt index 66bcc3861..a64fe430a 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -8,4 +8,4 @@ pylint<2.9 pytest<3.7.0 setuptools sphinx_rtd_theme -tox==3.23.0 +tox==3.23.1 diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 28aa5074b..3af230a1a 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -9,6 +9,7 @@ import mock import pytest from jira.exceptions import JIRAError +from requests.auth import HTTPProxyAuth from elastalert.alerts import AlertaAlerter from elastalert.alerts import Alerter @@ -723,315 +724,844 @@ def test_opsgenie_details_with_environment_variable_replacement(environ): assert expected_json == actual_json -def test_jira(): - description_txt = "Description stuff goes here like a runbook link." +def test_opsgenie_tags(): rule = { - 'name': 'test alert', - 'jira_account_file': 'jirafile', + 'name': 'Opsgenie Details', 'type': mock_rule(), - 'jira_project': 'testproject', - 'jira_priority': 0, - 'jira_issuetype': 'testtype', - 'jira_server': 'jiraserver', - 'jira_label': 'testlabel', - 'jira_component': 'testcomponent', - 'jira_description': description_txt, - 'jira_watchers': ['testwatcher1', 'testwatcher2'], - 'timestamp_field': '@timestamp', - 'alert_subject': 'Issue {0} occurred at {1}', - 'alert_subject_args': ['test_term', '@timestamp'], - 'rule_file': '/tmp/foo.yaml' + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_tags': ['test1', 'test2'] + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' } + alert = OpsGenieAlerter(rule) - mock_priority = mock.Mock(id='5') + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) - expected = [ - mock.call('jiraserver', basic_auth=('jirauser', 'jirapassword')), - mock.call().priorities(), - mock.call().fields(), - mock.call().create_issue( - issuetype={'name': 'testtype'}, - priority={'id': '5'}, - project={'key': 'testproject'}, - labels=['testlabel'], - components=[{'name': 'testcomponent'}], - description=mock.ANY, - summary='Issue test_value occurred at 2014-10-31T00:00:00', - ), - mock.call().add_watcher(mock.ANY, 'testwatcher1'), - mock.call().add_watcher(mock.ANY, 'testwatcher2'), - ] + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['test1', 'test2', 'ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json - # We don't care about additional calls to mock_jira, such as __str__ - assert mock_jira.mock_calls[:6] == expected - assert mock_jira.mock_calls[3][2]['description'].startswith(description_txt) - # Search called if jira_bump_tickets - rule['jira_bump_tickets'] = True - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.return_value = [] - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] +def test_opsgenie_message(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_message': 'test1' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - expected.insert(3, mock.call().search_issues(mock.ANY)) - assert mock_jira.mock_calls == expected + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) - # Remove a field if jira_ignore_in_title set - rule['jira_ignore_in_title'] = 'test_term' - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.return_value = [] - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'test1', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - assert 'test_value' not in mock_jira.mock_calls[3][1][0] +def test_opsgenie_alias(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_alias': 'test1' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) - # Issue is still created if search_issues throws an exception - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.side_effect = JIRAError - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) - assert mock_jira.mock_calls == expected + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies', + 'alias': 'test1' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json - # Only bump after 3d of inactivity - rule['jira_bump_after_inactivity'] = 3 - mock_issue = mock.Mock() - # Check ticket is bumped if it is updated 4 days ago - mock_issue.fields.updated = str(ts_now() - datetime.timedelta(days=4)) - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.return_value = [mock_issue] - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] +def test_opsgenie_subject(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_subject': 'test1' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - # Check add_comment is called - assert len(mock_jira.mock_calls) == 5 - assert '().add_comment' == mock_jira.mock_calls[4][0] + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - # Check ticket is bumped is not bumped if ticket is updated right now - mock_issue.fields.updated = str(ts_now()) - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.return_value = [mock_issue] - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - # Only 4 calls for mock_jira since add_comment is not called - assert len(mock_jira.mock_calls) == 4 + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'test1', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json - # Test match resolved values - rule = { - 'name': 'test alert', - 'jira_account_file': 'jirafile', - 'type': mock_rule(), - 'owner': 'the_owner', - 'jira_project': 'testproject', - 'jira_issuetype': 'testtype', - 'jira_server': 'jiraserver', - 'jira_label': 'testlabel', - 'jira_component': 'testcomponent', - 'jira_description': "DESC", - 'jira_watchers': ['testwatcher1', 'testwatcher2'], - 'timestamp_field': '@timestamp', - 'jira_affected_user': "#gmail.the_user", - 'rule_file': '/tmp/foo.yaml' - } - mock_issue = mock.Mock() - mock_issue.fields.updated = str(ts_now() - datetime.timedelta(days=4)) - mock_fields = [ - {'name': 'affected user', 'id': 'affected_user_id', 'schema': {'type': 'string'}} - ] - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.return_value = [mock_issue] - mock_jira.return_value.fields.return_value = mock_fields - mock_jira.return_value.priorities.return_value = [mock_priority] - alert = JiraAlerter(rule) - alert.alert([{'gmail.the_user': 'jdoe', '@timestamp': '2014-10-31T00:00:00'}]) - assert mock_jira.mock_calls[4][2]['affected_user_id'] == "jdoe" - -def test_jira_arbitrary_field_support(): - description_txt = "Description stuff goes here like a runbook link." +def test_opsgenie_subject_args(): rule = { - 'name': 'test alert', - 'jira_account_file': 'jirafile', + 'name': 'Opsgenie Details', 'type': mock_rule(), - 'owner': 'the_owner', - 'jira_project': 'testproject', - 'jira_issuetype': 'testtype', - 'jira_server': 'jiraserver', - 'jira_label': 'testlabel', - 'jira_component': 'testcomponent', - 'jira_description': description_txt, - 'jira_watchers': ['testwatcher1', 'testwatcher2'], - 'jira_arbitrary_reference_string_field': '$owner$', - 'jira_arbitrary_string_field': 'arbitrary_string_value', - 'jira_arbitrary_string_array_field': ['arbitrary_string_value1', 'arbitrary_string_value2'], - 'jira_arbitrary_string_array_field_provided_as_single_value': 'arbitrary_string_value_in_array_field', - 'jira_arbitrary_number_field': 1, - 'jira_arbitrary_number_array_field': [2, 3], - 'jira_arbitrary_number_array_field_provided_as_single_value': 1, - 'jira_arbitrary_complex_field': 'arbitrary_complex_value', - 'jira_arbitrary_complex_array_field': ['arbitrary_complex_value1', 'arbitrary_complex_value2'], - 'jira_arbitrary_complex_array_field_provided_as_single_value': 'arbitrary_complex_value_in_array_field', - 'timestamp_field': '@timestamp', - 'alert_subject': 'Issue {0} occurred at {1}', - 'alert_subject_args': ['test_term', '@timestamp'], - 'rule_file': '/tmp/foo.yaml' + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_subject': 'test', + 'opsgenie_subject_args': ['Testing', 'message'] + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' } + alert = OpsGenieAlerter(rule) - mock_priority = mock.MagicMock(id='5') + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - mock_fields = [ - {'name': 'arbitrary reference string field', 'id': 'arbitrary_reference_string_field', 'schema': {'type': 'string'}}, - {'name': 'arbitrary string field', 'id': 'arbitrary_string_field', 'schema': {'type': 'string'}}, - {'name': 'arbitrary string array field', 'id': 'arbitrary_string_array_field', 'schema': {'type': 'array', 'items': 'string'}}, - { - 'name': 'arbitrary string array field provided as single value', - 'id': 'arbitrary_string_array_field_provided_as_single_value', - 'schema': {'type': 'array', 'items': 'string'} - }, - {'name': 'arbitrary number field', 'id': 'arbitrary_number_field', 'schema': {'type': 'number'}}, - {'name': 'arbitrary number array field', 'id': 'arbitrary_number_array_field', 'schema': {'type': 'array', 'items': 'number'}}, - { - 'name': 'arbitrary number array field provided as single value', - 'id': 'arbitrary_number_array_field_provided_as_single_value', - 'schema': {'type': 'array', 'items': 'number'} - }, - {'name': 'arbitrary complex field', 'id': 'arbitrary_complex_field', 'schema': {'type': 'ArbitraryType'}}, - { - 'name': 'arbitrary complex array field', - 'id': 'arbitrary_complex_array_field', - 'schema': {'type': 'array', 'items': 'ArbitraryType'} - }, - { - 'name': 'arbitrary complex array field provided as single value', - 'id': 'arbitrary_complex_array_field_provided_as_single_value', - 'schema': {'type': 'array', 'items': 'ArbitraryType'} + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' }, - ] + json=mock.ANY, + proxies=None + ) - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = mock_fields - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'test', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json - expected = [ - mock.call('jiraserver', basic_auth=('jirauser', 'jirapassword')), - mock.call().priorities(), - mock.call().fields(), - mock.call().create_issue( - issuetype={'name': 'testtype'}, - project={'key': 'testproject'}, - labels=['testlabel'], - components=[{'name': 'testcomponent'}], - description=mock.ANY, - summary='Issue test_value occurred at 2014-10-31T00:00:00', - arbitrary_reference_string_field='the_owner', - arbitrary_string_field='arbitrary_string_value', - arbitrary_string_array_field=['arbitrary_string_value1', 'arbitrary_string_value2'], - arbitrary_string_array_field_provided_as_single_value=['arbitrary_string_value_in_array_field'], - arbitrary_number_field=1, - arbitrary_number_array_field=[2, 3], - arbitrary_number_array_field_provided_as_single_value=[1], - arbitrary_complex_field={'name': 'arbitrary_complex_value'}, - arbitrary_complex_array_field=[{'name': 'arbitrary_complex_value1'}, {'name': 'arbitrary_complex_value2'}], - arbitrary_complex_array_field_provided_as_single_value=[{'name': 'arbitrary_complex_value_in_array_field'}], - ), - mock.call().add_watcher(mock.ANY, 'testwatcher1'), - mock.call().add_watcher(mock.ANY, 'testwatcher2'), - ] - # We don't care about additional calls to mock_jira, such as __str__ - assert mock_jira.mock_calls[:6] == expected - assert mock_jira.mock_calls[3][2]['description'].startswith(description_txt) +def test_opsgenie_priority_p1(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'P1' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) - # Reference an arbitrary string field that is not defined on the JIRA server - rule['jira_nonexistent_field'] = 'nonexistent field value' + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = mock_fields + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) - with pytest.raises(Exception) as exception: - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - assert "Could not find a definition for the jira field 'nonexistent field'" in str(exception) + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': 'P1', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json - del rule['jira_nonexistent_field'] - # Reference a watcher that does not exist - rule['jira_watchers'] = 'invalid_watcher' +def test_opsgenie_priority_p2(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'P2' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = mock_fields + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - # Cause add_watcher to raise, which most likely means that the user did not exist - mock_jira.return_value.add_watcher.side_effect = Exception() + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) - with pytest.raises(Exception) as exception: - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - assert "Exception encountered when trying to add 'invalid_watcher' as a watcher. Does the user exist?" in str(exception) + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': 'P2', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json -def test_kibana(ea): - rule = {'filter': [{'query': {'query_string': {'query': 'xy:z'}}}], - 'name': 'Test rule!', - 'es_host': 'test.testing', - 'es_port': 12345, - 'timeframe': datetime.timedelta(hours=1), - 'index': 'logstash-test', - 'include': ['@timestamp'], - 'timestamp_field': '@timestamp'} - match = {'@timestamp': '2014-10-10T00:00:00'} - with mock.patch("elastalert.elastalert.elasticsearch_client") as mock_es: +def test_opsgenie_priority_p3(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'P3' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': 'P3', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_priority_p4(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'P4' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': 'P4', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_priority_p5(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'P5' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': 'P5', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_priority_none(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'abc' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_proxy(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_proxy': 'https://proxy.url' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies={'https': 'https://proxy.url'} + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_jira(): + description_txt = "Description stuff goes here like a runbook link." + rule = { + 'name': 'test alert', + 'jira_account_file': 'jirafile', + 'type': mock_rule(), + 'jira_project': 'testproject', + 'jira_priority': 0, + 'jira_issuetype': 'testtype', + 'jira_server': 'jiraserver', + 'jira_label': 'testlabel', + 'jira_component': 'testcomponent', + 'jira_description': description_txt, + 'jira_watchers': ['testwatcher1', 'testwatcher2'], + 'timestamp_field': '@timestamp', + 'alert_subject': 'Issue {0} occurred at {1}', + 'alert_subject_args': ['test_term', '@timestamp'], + 'rule_file': '/tmp/foo.yaml' + } + + mock_priority = mock.Mock(id='5') + + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + + expected = [ + mock.call('jiraserver', basic_auth=('jirauser', 'jirapassword')), + mock.call().priorities(), + mock.call().fields(), + mock.call().create_issue( + issuetype={'name': 'testtype'}, + priority={'id': '5'}, + project={'key': 'testproject'}, + labels=['testlabel'], + components=[{'name': 'testcomponent'}], + description=mock.ANY, + summary='Issue test_value occurred at 2014-10-31T00:00:00', + ), + mock.call().add_watcher(mock.ANY, 'testwatcher1'), + mock.call().add_watcher(mock.ANY, 'testwatcher2'), + ] + + # We don't care about additional calls to mock_jira, such as __str__ + assert mock_jira.mock_calls[:6] == expected + assert mock_jira.mock_calls[3][2]['description'].startswith(description_txt) + + # Search called if jira_bump_tickets + rule['jira_bump_tickets'] = True + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.return_value = [] + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + + expected.insert(3, mock.call().search_issues(mock.ANY)) + assert mock_jira.mock_calls == expected + + # Remove a field if jira_ignore_in_title set + rule['jira_ignore_in_title'] = 'test_term' + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.return_value = [] + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + + assert 'test_value' not in mock_jira.mock_calls[3][1][0] + + # Issue is still created if search_issues throws an exception + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.side_effect = JIRAError + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + + assert mock_jira.mock_calls == expected + + # Only bump after 3d of inactivity + rule['jira_bump_after_inactivity'] = 3 + mock_issue = mock.Mock() + + # Check ticket is bumped if it is updated 4 days ago + mock_issue.fields.updated = str(ts_now() - datetime.timedelta(days=4)) + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.return_value = [mock_issue] + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + # Check add_comment is called + assert len(mock_jira.mock_calls) == 5 + assert '().add_comment' == mock_jira.mock_calls[4][0] + + # Check ticket is bumped is not bumped if ticket is updated right now + mock_issue.fields.updated = str(ts_now()) + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.return_value = [mock_issue] + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + # Only 4 calls for mock_jira since add_comment is not called + assert len(mock_jira.mock_calls) == 4 + + # Test match resolved values + rule = { + 'name': 'test alert', + 'jira_account_file': 'jirafile', + 'type': mock_rule(), + 'owner': 'the_owner', + 'jira_project': 'testproject', + 'jira_issuetype': 'testtype', + 'jira_server': 'jiraserver', + 'jira_label': 'testlabel', + 'jira_component': 'testcomponent', + 'jira_description': "DESC", + 'jira_watchers': ['testwatcher1', 'testwatcher2'], + 'timestamp_field': '@timestamp', + 'jira_affected_user': "#gmail.the_user", + 'rule_file': '/tmp/foo.yaml' + } + mock_issue = mock.Mock() + mock_issue.fields.updated = str(ts_now() - datetime.timedelta(days=4)) + mock_fields = [ + {'name': 'affected user', 'id': 'affected_user_id', 'schema': {'type': 'string'}} + ] + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.return_value = [mock_issue] + mock_jira.return_value.fields.return_value = mock_fields + mock_jira.return_value.priorities.return_value = [mock_priority] + alert = JiraAlerter(rule) + alert.alert([{'gmail.the_user': 'jdoe', '@timestamp': '2014-10-31T00:00:00'}]) + assert mock_jira.mock_calls[4][2]['affected_user_id'] == "jdoe" + + +def test_jira_arbitrary_field_support(): + description_txt = "Description stuff goes here like a runbook link." + rule = { + 'name': 'test alert', + 'jira_account_file': 'jirafile', + 'type': mock_rule(), + 'owner': 'the_owner', + 'jira_project': 'testproject', + 'jira_issuetype': 'testtype', + 'jira_server': 'jiraserver', + 'jira_label': 'testlabel', + 'jira_component': 'testcomponent', + 'jira_description': description_txt, + 'jira_watchers': ['testwatcher1', 'testwatcher2'], + 'jira_arbitrary_reference_string_field': '$owner$', + 'jira_arbitrary_string_field': 'arbitrary_string_value', + 'jira_arbitrary_string_array_field': ['arbitrary_string_value1', 'arbitrary_string_value2'], + 'jira_arbitrary_string_array_field_provided_as_single_value': 'arbitrary_string_value_in_array_field', + 'jira_arbitrary_number_field': 1, + 'jira_arbitrary_number_array_field': [2, 3], + 'jira_arbitrary_number_array_field_provided_as_single_value': 1, + 'jira_arbitrary_complex_field': 'arbitrary_complex_value', + 'jira_arbitrary_complex_array_field': ['arbitrary_complex_value1', 'arbitrary_complex_value2'], + 'jira_arbitrary_complex_array_field_provided_as_single_value': 'arbitrary_complex_value_in_array_field', + 'timestamp_field': '@timestamp', + 'alert_subject': 'Issue {0} occurred at {1}', + 'alert_subject_args': ['test_term', '@timestamp'], + 'rule_file': '/tmp/foo.yaml' + } + + mock_priority = mock.MagicMock(id='5') + + mock_fields = [ + {'name': 'arbitrary reference string field', 'id': 'arbitrary_reference_string_field', 'schema': {'type': 'string'}}, + {'name': 'arbitrary string field', 'id': 'arbitrary_string_field', 'schema': {'type': 'string'}}, + {'name': 'arbitrary string array field', 'id': 'arbitrary_string_array_field', 'schema': {'type': 'array', 'items': 'string'}}, + { + 'name': 'arbitrary string array field provided as single value', + 'id': 'arbitrary_string_array_field_provided_as_single_value', + 'schema': {'type': 'array', 'items': 'string'} + }, + {'name': 'arbitrary number field', 'id': 'arbitrary_number_field', 'schema': {'type': 'number'}}, + {'name': 'arbitrary number array field', 'id': 'arbitrary_number_array_field', 'schema': {'type': 'array', 'items': 'number'}}, + { + 'name': 'arbitrary number array field provided as single value', + 'id': 'arbitrary_number_array_field_provided_as_single_value', + 'schema': {'type': 'array', 'items': 'number'} + }, + {'name': 'arbitrary complex field', 'id': 'arbitrary_complex_field', 'schema': {'type': 'ArbitraryType'}}, + { + 'name': 'arbitrary complex array field', + 'id': 'arbitrary_complex_array_field', + 'schema': {'type': 'array', 'items': 'ArbitraryType'} + }, + { + 'name': 'arbitrary complex array field provided as single value', + 'id': 'arbitrary_complex_array_field_provided_as_single_value', + 'schema': {'type': 'array', 'items': 'ArbitraryType'} + }, + ] + + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = mock_fields + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + + expected = [ + mock.call('jiraserver', basic_auth=('jirauser', 'jirapassword')), + mock.call().priorities(), + mock.call().fields(), + mock.call().create_issue( + issuetype={'name': 'testtype'}, + project={'key': 'testproject'}, + labels=['testlabel'], + components=[{'name': 'testcomponent'}], + description=mock.ANY, + summary='Issue test_value occurred at 2014-10-31T00:00:00', + arbitrary_reference_string_field='the_owner', + arbitrary_string_field='arbitrary_string_value', + arbitrary_string_array_field=['arbitrary_string_value1', 'arbitrary_string_value2'], + arbitrary_string_array_field_provided_as_single_value=['arbitrary_string_value_in_array_field'], + arbitrary_number_field=1, + arbitrary_number_array_field=[2, 3], + arbitrary_number_array_field_provided_as_single_value=[1], + arbitrary_complex_field={'name': 'arbitrary_complex_value'}, + arbitrary_complex_array_field=[{'name': 'arbitrary_complex_value1'}, {'name': 'arbitrary_complex_value2'}], + arbitrary_complex_array_field_provided_as_single_value=[{'name': 'arbitrary_complex_value_in_array_field'}], + ), + mock.call().add_watcher(mock.ANY, 'testwatcher1'), + mock.call().add_watcher(mock.ANY, 'testwatcher2'), + ] + + # We don't care about additional calls to mock_jira, such as __str__ + assert mock_jira.mock_calls[:6] == expected + assert mock_jira.mock_calls[3][2]['description'].startswith(description_txt) + + # Reference an arbitrary string field that is not defined on the JIRA server + rule['jira_nonexistent_field'] = 'nonexistent field value' + + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = mock_fields + + with pytest.raises(Exception) as exception: + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + assert "Could not find a definition for the jira field 'nonexistent field'" in str(exception) + + del rule['jira_nonexistent_field'] + + # Reference a watcher that does not exist + rule['jira_watchers'] = 'invalid_watcher' + + with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = mock_fields + + # Cause add_watcher to raise, which most likely means that the user did not exist + mock_jira.return_value.add_watcher.side_effect = Exception() + + with pytest.raises(Exception) as exception: + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + assert "Exception encountered when trying to add 'invalid_watcher' as a watcher. Does the user exist?" in str(exception) + + +def test_kibana(ea): + rule = {'filter': [{'query': {'query_string': {'query': 'xy:z'}}}], + 'name': 'Test rule!', + 'es_host': 'test.testing', + 'es_port': 12345, + 'timeframe': datetime.timedelta(hours=1), + 'index': 'logstash-test', + 'include': ['@timestamp'], + 'timestamp_field': '@timestamp'} + match = {'@timestamp': '2014-10-10T00:00:00'} + with mock.patch("elastalert.elastalert.elasticsearch_client") as mock_es: mock_create = mock.Mock(return_value={'_id': 'ABCDEFGH'}) mock_es_inst = mock.Mock() mock_es_inst.index = mock_create @@ -1040,654 +1570,1862 @@ def test_kibana(ea): mock_es.return_value = mock_es_inst link = ea.generate_kibana_db(rule, match) - assert 'http://test.testing:12345/_plugin/kibana/#/dashboard/temp/ABCDEFGH' == link + assert 'http://test.testing:12345/_plugin/kibana/#/dashboard/temp/ABCDEFGH' == link + + # Name and index + dashboard = json.loads(mock_create.call_args_list[0][1]['body']['dashboard']) + assert dashboard['index']['default'] == 'logstash-test' + assert 'Test rule!' in dashboard['title'] + + # Filters and time range + filters = dashboard['services']['filter']['list'] + assert 'xy:z' in filters['1']['query'] + assert filters['1']['type'] == 'querystring' + time_range = filters['0'] + assert time_range['from'] == ts_add(match['@timestamp'], -rule['timeframe']) + assert time_range['to'] == ts_add(match['@timestamp'], datetime.timedelta(minutes=10)) + + # Included fields active in table + assert dashboard['rows'][1]['panels'][0]['fields'] == ['@timestamp'] + + +def test_command(): + # Test command as list with a formatted arg + rule = {'command': ['/bin/test/', '--arg', '%(somefield)s']} + alert = CommandAlerter(rule) + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'nested': {'field': 1}} + with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + alert.alert([match]) + assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) + + # Test command as string with formatted arg (old-style string format) + rule = {'command': '/bin/test/ --arg %(somefield)s'} + alert = CommandAlerter(rule) + with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + alert.alert([match]) + assert mock_popen.called_with('/bin/test --arg foobarbaz', stdin=subprocess.PIPE, shell=False) + + # Test command as string without formatted arg (old-style string format) + rule = {'command': '/bin/test/foo.sh'} + alert = CommandAlerter(rule) + with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + alert.alert([match]) + assert mock_popen.called_with('/bin/test/foo.sh', stdin=subprocess.PIPE, shell=True) + + # Test command with pipe_match_json + rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], + 'pipe_match_json': True} + alert = CommandAlerter(rule) + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz'} + with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + mock_subprocess = mock.Mock() + mock_popen.return_value = mock_subprocess + mock_subprocess.communicate.return_value = (None, None) + alert.alert([match]) + assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) + assert mock_subprocess.communicate.called_with(input=json.dumps(match)) + + # Test command with pipe_alert_text + rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], + 'pipe_alert_text': True, 'type': mock_rule(), 'name': 'Test'} + alert = CommandAlerter(rule) + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz'} + alert_text = str(BasicMatchString(rule, match)) + with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + mock_subprocess = mock.Mock() + mock_popen.return_value = mock_subprocess + mock_subprocess.communicate.return_value = (None, None) + alert.alert([match]) + assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) + assert mock_subprocess.communicate.called_with(input=alert_text.encode()) + + # Test command with fail_on_non_zero_exit + rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], + 'fail_on_non_zero_exit': True} + alert = CommandAlerter(rule) + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz'} + with pytest.raises(Exception) as exception: + with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + mock_subprocess = mock.Mock() + mock_popen.return_value = mock_subprocess + mock_subprocess.wait.return_value = 1 + alert.alert([match]) + assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) + assert "Non-zero exit code while running command" in str(exception) + + +def test_ms_teams(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'ms_teams_webhook_url': 'http://test.webhook.url', + 'ms_teams_alert_summary': 'Alert from ElastAlert', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MsTeamsAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + '@type': 'MessageCard', + '@context': 'http://schema.org/extensions', + 'summary': rule['ms_teams_alert_summary'], + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__() + } + mock_post_request.assert_called_once_with( + rule['ms_teams_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_ms_teams_uses_color_and_fixed_width_text(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'ms_teams_webhook_url': 'http://test.webhook.url', + 'ms_teams_alert_summary': 'Alert from ElastAlert', + 'ms_teams_alert_fixed_width': True, + 'ms_teams_theme_color': '#124578', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MsTeamsAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + body = BasicMatchString(rule, match).__str__() + body = body.replace('`', "'") + body = "```{0}```".format('```\n\n```'.join(x for x in body.split('\n'))).replace('\n``````', '') + expected_data = { + '@type': 'MessageCard', + '@context': 'http://schema.org/extensions', + 'summary': rule['ms_teams_alert_summary'], + 'title': rule['alert_subject'], + 'themeColor': '#124578', + 'text': body + } + mock_post_request.assert_called_once_with( + rule['ms_teams_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_ms_teams_proxy(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'ms_teams_webhook_url': 'http://test.webhook.url', + 'ms_teams_alert_summary': 'Alert from ElastAlert', + 'ms_teams_proxy': 'https://test.proxy.url', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MsTeamsAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + '@type': 'MessageCard', + '@context': 'http://schema.org/extensions', + 'summary': rule['ms_teams_alert_summary'], + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__() + } + mock_post_request.assert_called_once_with( + rule['ms_teams_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': rule['ms_teams_proxy']} + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_uses_custom_title(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_uses_custom_timeout(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert_subject': 'Cool subject', + 'alert': [], + 'slack_timeout': 20 + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=20 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_uses_rule_name_when_custom_title_is_not_provided(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': ['http://please.dontgohere.slack'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none', + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_uses_custom_slack_channel(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': ['http://please.dontgohere.slack'], + 'slack_channel_override': '#test-alert', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '#test-alert', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none', + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_uses_list_of_custom_slack_channel(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': ['http://please.dontgohere.slack'], + 'slack_channel_override': ['#test-alert', '#test-alert2'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data1 = { + 'username': 'elastalert', + 'channel': '#test-alert', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + expected_data2 = { + 'username': 'elastalert', + 'channel': '#test-alert2', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_with( + rule['slack_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data1 == json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data2 == json.loads(mock_post_request.call_args_list[1][1]['data']) + + +def test_slack_attach_kibana_discover_url_when_generated(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_attach_kibana_discover_url': True, + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'kibana_discover_url': 'http://kibana#discover' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'parse': 'none', + 'text': '', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + }, + { + 'color': '#ec4b98', + 'title': 'Discover in Kibana', + 'title_link': 'http://kibana#discover' + } + ], + 'icon_emoji': ':ghost:', + 'channel': '' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_slack_attach_kibana_discover_url_when_not_generated(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_attach_kibana_discover_url': True, + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'parse': 'none', + 'text': '', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'icon_emoji': ':ghost:', + 'channel': '' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_slack_kibana_discover_title(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_attach_kibana_discover_url': True, + 'slack_kibana_discover_title': 'Click to discover in Kibana', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'kibana_discover_url': 'http://kibana#discover' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'parse': 'none', + 'text': '', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + }, + { + 'color': '#ec4b98', + 'title': 'Click to discover in Kibana', + 'title_link': 'http://kibana#discover' + } + ], + 'icon_emoji': ':ghost:', + 'channel': '' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_slack_kibana_discover_color(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_attach_kibana_discover_url': True, + 'slack_kibana_discover_color': 'blue', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'kibana_discover_url': 'http://kibana#discover' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'parse': 'none', + 'text': '', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + }, + { + 'color': 'blue', + 'title': 'Discover in Kibana', + 'title_link': 'http://kibana#discover' + } + ], + 'icon_emoji': ':ghost:', + 'channel': '' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_slack_ignore_ssl_errors(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_ignore_ssl_errors': True, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=False, + timeout=10 + ) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_proxy(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_proxy': 'http://proxy.url', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': rule['slack_proxy']}, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_username_override(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'test elastalert', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'test elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_title_link(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_title_link': 'http://slack.title.link', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'title_link': 'http://slack.title.link' + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_title(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_title': 'slack title', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'slack title', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_icon_url_override(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_icon_url_override': 'http://slack.icon.url.override', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_url': 'http://slack.icon.url.override', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_msg_color(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_msg_color': 'good', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'good', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_parse_override(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_parse_override': 'full', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'full' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + - # Name and index - dashboard = json.loads(mock_create.call_args_list[0][1]['body']['dashboard']) - assert dashboard['index']['default'] == 'logstash-test' - assert 'Test rule!' in dashboard['title'] +def test_slack_text_string(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_text_string': 'text str', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - # Filters and time range - filters = dashboard['services']['filter']['list'] - assert 'xy:z' in filters['1']['query'] - assert filters['1']['type'] == 'querystring' - time_range = filters['0'] - assert time_range['from'] == ts_add(match['@timestamp'], -rule['timeframe']) - assert time_range['to'] == ts_add(match['@timestamp'], datetime.timedelta(minutes=10)) + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': 'text str', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - # Included fields active in table - assert dashboard['rows'][1]['panels'][0]['fields'] == ['@timestamp'] +def test_slack_alert_fields(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_alert_fields': [ + { + 'title': 'Host', + 'value': 'somefield', + 'short': 'true' + }, + { + 'title': 'Sensors', + 'value': '@timestamp', + 'short': 'true' + } + ], + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) -def test_command(): - # Test command as list with a formatted arg - rule = {'command': ['/bin/test/', '--arg', '%(somefield)s']} - alert = CommandAlerter(rule) - match = {'@timestamp': '2014-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'nested': {'field': 1}} - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': + [ + { + 'short': 'true', + 'title': 'Host', + 'value': 'foobarbaz' + }, + { + 'short': 'true', + 'title': 'Sensors', + 'value': '2016-01-01T00:00:00' + } + ], + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_ca_certs(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_ca_certs': True, + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) - # Test command as string with formatted arg (old-style string format) - rule = {'command': '/bin/test/ --arg %(somefield)s'} - alert = CommandAlerter(rule) - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_with_payload(): + rule = { + 'name': 'Test HTTP Post Alerter With Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_payload': {'posted_name': 'somefield'}, + 'http_post_static_payload': {'name': 'somestaticname'}, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - assert mock_popen.called_with('/bin/test --arg foobarbaz', stdin=subprocess.PIPE, shell=False) + expected_data = { + 'posted_name': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - # Test command as string without formatted arg (old-style string format) - rule = {'command': '/bin/test/foo.sh'} - alert = CommandAlerter(rule) - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + +def test_http_alerter_with_payload_all_values(): + rule = { + 'name': 'Test HTTP Post Alerter With Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_payload': {'posted_name': 'somefield'}, + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_all_values': True, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'posted_name': 'foobarbaz', + 'name': 'somestaticname', + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_without_payload(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - assert mock_popen.called_with('/bin/test/foo.sh', stdin=subprocess.PIPE, shell=True) + expected_data = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - # Test command with pipe_match_json - rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], - 'pipe_match_json': True} - alert = CommandAlerter(rule) - match = {'@timestamp': '2014-01-01T00:00:00', - 'somefield': 'foobarbaz'} - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: - mock_subprocess = mock.Mock() - mock_popen.return_value = mock_subprocess - mock_subprocess.communicate.return_value = (None, None) - alert.alert([match]) - assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) - assert mock_subprocess.communicate.called_with(input=json.dumps(match)) - # Test command with fail_on_non_zero_exit - rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], - 'fail_on_non_zero_exit': True} - alert = CommandAlerter(rule) - match = {'@timestamp': '2014-01-01T00:00:00', - 'somefield': 'foobarbaz'} - with pytest.raises(Exception) as exception: - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: - mock_subprocess = mock.Mock() - mock_popen.return_value = mock_subprocess - mock_subprocess.wait.return_value = 1 - alert.alert([match]) - assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) - assert "Non-zero exit code while running command" in str(exception) +def test_http_alerter_proxy(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies={'https': 'http://proxy.url'}, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_ms_teams(): +def test_http_alerter_timeout(): rule = { - 'name': 'Test Rule', + 'name': 'Test HTTP Post Alerter Without Payload', 'type': 'any', - 'ms_teams_webhook_url': 'http://test.webhook.url', - 'ms_teams_alert_summary': 'Alert from ElastAlert', - 'alert_subject': 'Cool subject', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_timeout': 20, 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = MsTeamsAlerter(rule) + alert = HTTPPostAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - '@type': 'MessageCard', - '@context': 'http://schema.org/extensions', - 'summary': rule['ms_teams_alert_summary'], - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__() + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' } mock_post_request.assert_called_once_with( - rule['ms_teams_webhook_url'], + rule['http_post_url'], data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=20, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_ms_teams_uses_color_and_fixed_width_text(): +def test_http_alerter_headers(): rule = { - 'name': 'Test Rule', + 'name': 'Test HTTP Post Alerter Without Payload', 'type': 'any', - 'ms_teams_webhook_url': 'http://test.webhook.url', - 'ms_teams_alert_summary': 'Alert from ElastAlert', - 'ms_teams_alert_fixed_width': True, - 'ms_teams_theme_color': '#124578', - 'alert_subject': 'Cool subject', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_headers': {'authorization': 'Basic 123dr3234'}, 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = MsTeamsAlerter(rule) + alert = HTTPPostAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - body = BasicMatchString(rule, match).__str__() - body = body.replace('`', "'") - body = "```{0}```".format('```\n\n```'.join(x for x in body.split('\n'))).replace('\n``````', '') expected_data = { - '@type': 'MessageCard', - '@context': 'http://schema.org/extensions', - 'summary': rule['ms_teams_alert_summary'], - 'title': rule['alert_subject'], - 'themeColor': '#124578', - 'text': body + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' } mock_post_request.assert_called_once_with( - rule['ms_teams_webhook_url'], + rule['http_post_url'], data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8', 'authorization': 'Basic 123dr3234'}, + proxies=None, + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_uses_custom_title(): +def test_http_alerter_post_ca_certs_true(): rule = { - 'name': 'Test Rule', + 'name': 'Test HTTP Post Alerter Without Payload', 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'alert_subject': 'Cool subject', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_ca_certs': True, 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = HTTPPostAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' } mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], + rule['http_post_url'], data=mock.ANY, - headers={'content-type': 'application/json'}, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, proxies=None, - verify=True, - timeout=10 + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_uses_custom_timeout(): +def test_http_alerter_post_ca_certs_false(): rule = { - 'name': 'Test Rule', + 'name': 'Test HTTP Post Alerter Without Payload', 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'alert_subject': 'Cool subject', - 'alert': [], - 'slack_timeout': 20 + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_ca_certs': False, + 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = HTTPPostAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' } mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], + rule['http_post_url'], data=mock.ANY, - headers={'content-type': 'application/json'}, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, proxies=None, - verify=True, - timeout=20 + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_uses_rule_name_when_custom_title_is_not_provided(): +def test_pagerduty_alerter(): rule = { - 'name': 'Test Rule', + 'name': 'Test PD Rule', 'type': 'any', - 'slack_webhook_url': ['http://please.dontgohere.slack'], + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'description': 'Test PD Rule', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': '', + 'service_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with('https://events.pagerduty.com/generic/2010-04-15/create_event.json', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_alerter_v2(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['name'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none', + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'mysql', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'][0], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_uses_custom_slack_channel(): +def test_pagerduty_alerter_v2_payload_class_args(): rule = { - 'name': 'Test Rule', + 'name': 'Test PD Rule', 'type': 'any', - 'slack_webhook_url': ['http://please.dontgohere.slack'], - 'slack_channel_override': '#test-alert', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'somefield', + 'pagerduty_v2_payload_class_args': ['@timestamp', 'somefield'], + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = PagerDutyAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'username': 'elastalert', - 'channel': '#test-alert', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['name'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none', + 'client': 'ponies inc.', + 'payload': { + 'class': 'somefield', + 'component': 'mysql', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'][0], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_uses_list_of_custom_slack_channel(): +def test_pagerduty_alerter_v2_payload_component_args(): rule = { - 'name': 'Test Rule', + 'name': 'Test PD Rule', 'type': 'any', - 'slack_webhook_url': ['http://please.dontgohere.slack'], - 'slack_channel_override': ['#test-alert', '#test-alert2'], + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'somefield', + 'pagerduty_v2_payload_component_args': ['@timestamp', 'somefield'], + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = PagerDutyAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - - expected_data1 = { - 'username': 'elastalert', - 'channel': '#test-alert', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['name'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' - } - expected_data2 = { - 'username': 'elastalert', - 'channel': '#test-alert2', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['name'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' + expected_data = { + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'somefield', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', } - mock_post_request.assert_called_with( - rule['slack_webhook_url'][0], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data1 == json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data2 == json.loads(mock_post_request.call_args_list[1][1]['data']) + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_attach_kibana_discover_url_when_generated(): +def test_pagerduty_alerter_v2_payload_group_args(): rule = { - 'name': 'Test Rule', + 'name': 'Test PD Rule', 'type': 'any', - 'slack_attach_kibana_discover_url': True, - 'slack_webhook_url': 'http://please.dontgohere.slack', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'somefield', + 'pagerduty_v2_payload_group_args': ['@timestamp', 'somefield'], + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = PagerDutyAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', - 'kibana_discover_url': 'http://kibana#discover' + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'username': 'elastalert', - 'parse': 'none', - 'text': '', - 'attachments': [ - { - 'color': 'danger', - 'title': 'Test Rule', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'mysql', + 'group': 'somefield', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' }, - { - 'color': '#ec4b98', - 'title': 'Discover in Kibana', - 'title_link': 'http://kibana#discover' - } - ], - 'icon_emoji': ':ghost:', - 'channel': '' + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_attach_kibana_discover_url_when_not_generated(): +def test_pagerduty_alerter_v2_payload_source_args(): rule = { - 'name': 'Test Rule', + 'name': 'Test PD Rule', 'type': 'any', - 'slack_attach_kibana_discover_url': True, - 'slack_webhook_url': 'http://please.dontgohere.slack', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'somefield', + 'pagerduty_v2_payload_source_args': ['@timestamp', 'somefield'], 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = PagerDutyAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00' + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'username': 'elastalert', - 'parse': 'none', - 'text': '', - 'attachments': [ - { - 'color': 'danger', - 'title': 'Test Rule', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'icon_emoji': ':ghost:', - 'channel': '' + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'mysql', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'somefield', + 'summary': 'Test PD Rule', + 'custom_details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_kibana_discover_title(): +def test_pagerduty_alerter_v2_payload_custom_details(): rule = { - 'name': 'Test Rule', + 'name': 'Test PD Rule', 'type': 'any', - 'slack_attach_kibana_discover_url': True, - 'slack_kibana_discover_title': 'Click to discover in Kibana', - 'slack_webhook_url': 'http://please.dontgohere.slack', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', + 'pagerduty_v2_payload_custom_details': {'a': 'somefield', 'c': 'f'}, 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = PagerDutyAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', - 'kibana_discover_url': 'http://kibana#discover' + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'username': 'elastalert', - 'parse': 'none', - 'text': '', - 'attachments': [ - { - 'color': 'danger', - 'title': 'Test Rule', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'mysql', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': { + 'a': 'foobarbaz', + 'c': None, + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' }, - { - 'color': '#ec4b98', - 'title': 'Click to discover in Kibana', - 'title_link': 'http://kibana#discover' - } - ], - 'icon_emoji': ':ghost:', - 'channel': '' + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_kibana_discover_color(): +def test_pagerduty_alerter_v2_payload_include_all_info(): rule = { - 'name': 'Test Rule', + 'name': 'Test PD Rule', 'type': 'any', - 'slack_attach_kibana_discover_url': True, - 'slack_kibana_discover_color': 'blue', - 'slack_webhook_url': 'http://please.dontgohere.slack', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', + 'pagerduty_v2_payload_include_all_info': False, 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = PagerDutyAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', - 'kibana_discover_url': 'http://kibana#discover' + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'username': 'elastalert', - 'parse': 'none', - 'text': '', - 'attachments': [ - { - 'color': 'danger', - 'title': 'Test Rule', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - }, - { - 'color': 'blue', - 'title': 'Discover in Kibana', - 'title_link': 'http://kibana#discover' - } - ], - 'icon_emoji': ':ghost:', - 'channel': '' + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'mysql', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': {}, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_ignore_ssl_errors(): +def test_pagerduty_alerter_custom_incident_key(): rule = { - 'name': 'Test Rule', + 'name': 'Test PD Rule', 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_ignore_ssl_errors': True, + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom key', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = SlackAlerter(rule) + alert = PagerDutyAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00' + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=False, - timeout=10 - ) - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': 'Test Rule', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' + 'client': 'ponies inc.', + 'description': 'Test PD Rule', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': 'custom key', + 'service_key': 'magicalbadgers', } + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_http_alerter_with_payload(): +def test_pagerduty_alerter_custom_incident_key_with_args(): rule = { - 'name': 'Test HTTP Post Alerter With Payload', + 'name': 'Test PD Rule', 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_payload': {'posted_name': 'somefield'}, - 'http_post_static_payload': {'name': 'somestaticname'}, + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['somefield'], 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) + alert = PagerDutyAlerter(rule) match = { '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' @@ -1695,33 +3433,33 @@ def test_http_alerter_with_payload(): with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { - 'posted_name': 'foobarbaz', - 'name': 'somestaticname' + 'client': 'ponies inc.', + 'description': 'Test PD Rule', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': 'custom foobarbaz', + 'service_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['http_post_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, - proxies=None, - timeout=10, - verify=True - ) + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_http_alerter_with_payload_all_values(): +def test_pagerduty_alerter_custom_alert_subject(): rule = { - 'name': 'Test HTTP Post Alerter With Payload', + 'name': 'Test PD Rule', 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_payload': {'posted_name': 'somefield'}, - 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_all_values': True, + 'alert_subject': 'Hungry kittens', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['somefield'], 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) + alert = PagerDutyAlerter(rule) match = { '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' @@ -1729,61 +3467,66 @@ def test_http_alerter_with_payload_all_values(): with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { - 'posted_name': 'foobarbaz', - 'name': 'somestaticname', - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' + 'client': 'ponies inc.', + 'description': 'Hungry kittens', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': 'custom foobarbaz', + 'service_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['http_post_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, - proxies=None, - timeout=10, - verify=True - ) + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_http_alerter_without_payload(): +def test_pagerduty_alerter_custom_alert_subject_with_args(): rule = { - 'name': 'Test HTTP Post Alerter Without Payload', + 'name': 'Test PD Rule', 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_static_payload': {'name': 'somestaticname'}, + 'alert_subject': '{0} kittens', + 'alert_subject_args': ['somefield'], + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['someotherfield'], 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) + alert = PagerDutyAlerter(rule) match = { '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' + 'somefield': 'Stinky', + 'someotherfield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'name': 'somestaticname' + 'client': 'ponies inc.', + 'description': 'Stinky kittens', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinky\nsomeotherfield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': 'custom foobarbaz', + 'service_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with( - rule['http_post_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, - proxies=None, - timeout=10, - verify=True - ) + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_pagerduty_alerter(): +def test_pagerduty_alerter_custom_alert_subject_with_args_specifying_trigger(): rule = { 'name': 'Test PD Rule', 'type': 'any', + 'alert_subject': '{0} kittens', + 'alert_subject_args': ['somefield'], 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_event_type': 'trigger', 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['someotherfield'], 'alert': [] } rules_loader = FileRulesLoader({}) @@ -1791,37 +3534,37 @@ def test_pagerduty_alerter(): alert = PagerDutyAlerter(rule) match = { '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' + 'somefield': 'Stinkiest', + 'someotherfield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { 'client': 'ponies inc.', - 'description': 'Test PD Rule', + 'description': 'Stinkiest kittens', 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinkiest\nsomeotherfield: foobarbaz\n' }, 'event_type': 'trigger', - 'incident_key': '', + 'incident_key': 'custom foobarbaz', 'service_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with('https://events.pagerduty.com/generic/2010-04-15/create_event.json', - data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_pagerduty_alerter_v2(): +def test_pagerduty_alerter_proxy(): rule = { 'name': 'Test PD Rule', 'type': 'any', + 'alert_subject': '{0} kittens', + 'alert_subject_args': ['somefield'], 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_event_type': 'trigger', 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_api_version': 'v2', - 'pagerduty_v2_payload_class': 'ping failure', - 'pagerduty_v2_payload_component': 'mysql', - 'pagerduty_v2_payload_group': 'app-stack', - 'pagerduty_v2_payload_severity': 'error', - 'pagerduty_v2_payload_source': 'mysql.host.name', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['someotherfield'], + 'pagerduty_proxy': 'http://proxy.url', 'alert': [] } rules_loader = FileRulesLoader({}) @@ -1829,322 +3572,557 @@ def test_pagerduty_alerter_v2(): alert = PagerDutyAlerter(rule) match = { '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' + 'somefield': 'Stinkiest', + 'someotherfield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { 'client': 'ponies inc.', - 'payload': { - 'class': 'ping failure', - 'component': 'mysql', - 'group': 'app-stack', - 'severity': 'error', - 'source': 'mysql.host.name', - 'summary': 'Test PD Rule', - 'custom_details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'timestamp': '2017-01-01T00:00:00' + 'description': 'Stinkiest kittens', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinkiest\nsomeotherfield: foobarbaz\n' }, - 'event_action': 'trigger', - 'dedup_key': '', - 'routing_key': 'magicalbadgers', + 'event_type': 'trigger', + 'incident_key': 'custom foobarbaz', + 'service_key': 'magicalbadgers', } - mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', - data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'}) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_pagerduty_alerter_custom_incident_key(): +def test_alert_text_kw(ea): + rule = ea.rules[0].copy() + rule['alert_text'] = '{field} at {time}' + rule['alert_text_kw'] = { + '@timestamp': 'time', + 'field': 'field', + } + match = {'@timestamp': '1918-01-17', 'field': 'value'} + alert_text = str(BasicMatchString(rule, match)) + body = '{field} at {@timestamp}'.format(**match) + assert body in alert_text + + +def test_alert_text_global_substitution(ea): + rule = ea.rules[0].copy() + rule['owner'] = 'the owner from rule' + rule['priority'] = 'priority from rule' + rule['abc'] = 'abc from rule' + rule['alert_text'] = 'Priority: {0}; Owner: {1}; Abc: {2}' + rule['alert_text_args'] = ['priority', 'owner', 'abc'] + + match = { + '@timestamp': '2016-01-01', + 'field': 'field_value', + 'abc': 'abc from match', + } + + alert_text = str(BasicMatchString(rule, match)) + assert 'Priority: priority from rule' in alert_text + assert 'Owner: the owner from rule' in alert_text + + # When the key exists in both places, it will come from the match + assert 'Abc: abc from match' in alert_text + + +def test_alert_text_kw_global_substitution(ea): + rule = ea.rules[0].copy() + rule['foo_rule'] = 'foo from rule' + rule['owner'] = 'the owner from rule' + rule['abc'] = 'abc from rule' + rule['alert_text'] = 'Owner: {owner}; Foo: {foo}; Abc: {abc}' + rule['alert_text_kw'] = { + 'owner': 'owner', + 'foo_rule': 'foo', + 'abc': 'abc', + } + + match = { + '@timestamp': '2016-01-01', + 'field': 'field_value', + 'abc': 'abc from match', + } + + alert_text = str(BasicMatchString(rule, match)) + assert 'Owner: the owner from rule' in alert_text + assert 'Foo: foo from rule' in alert_text + + # When the key exists in both places, it will come from the match + assert 'Abc: abc from match' in alert_text + + +def test_resolving_rule_references(): rule = { - 'name': 'Test PD Rule', + 'name': 'test_rule', + 'type': mock_rule(), + 'owner': 'the_owner', + 'priority': 2, + 'list_of_things': [ + '1', + '$owner$', + [ + '11', + '$owner$', + ], + ], + 'nested_dict': { + 'nested_one': '1', + 'nested_owner': '$owner$', + }, + 'resolved_string_reference': '$owner$', + 'resolved_int_reference': '$priority$', + 'unresolved_reference': '$foo$', + } + alert = Alerter(rule) + assert 'the_owner' == alert.rule['resolved_string_reference'] + assert 2 == alert.rule['resolved_int_reference'] + assert '$foo$' == alert.rule['unresolved_reference'] + assert 'the_owner' == alert.rule['list_of_things'][1] + assert 'the_owner' == alert.rule['list_of_things'][2][1] + assert 'the_owner' == alert.rule['nested_dict']['nested_owner'] + + +def test_alerta_no_auth(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_api_skip_ssl': True, + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["%(key)s", "%(logdate)s", "%(sender_ip)s"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe %(hostname)s is UP at %(logdate)s GMT", + 'alerta_value': "UP", 'type': 'any', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom key', - 'alert': [] + 'alerta_use_match_timestamp': True, + 'alert': 'alerta' } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) + match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' + '@timestamp': '2014-10-10T00:00:00', + # 'key': ---- missing field on purpose, to verify that simply the text is left empty + # 'logdate': ---- missing field on purpose, to verify that simply the text is left empty + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { - 'client': 'ponies inc.', - 'description': 'Test PD Rule', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': 'custom key', - 'service_key': 'magicalbadgers', + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" } - mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + headers={ + 'content-type': 'application/json'}, + verify=False + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) -def test_pagerduty_alerter_custom_incident_key_with_args(): +def test_alerta_auth(): rule = { - 'name': 'Test PD Rule', + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'alerta_api_key': '123456789ABCDEF', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_severity': "debug", 'type': 'any', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom {0}', - 'pagerduty_incident_key_args': ['somefield'], - 'alert': [] + 'alerta_use_match_timestamp': True, + 'alert': 'alerta' } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) + match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = { - 'client': 'ponies inc.', - 'description': 'Test PD Rule', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': 'custom foobarbaz', - 'service_key': 'magicalbadgers', - } - mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json', + 'Authorization': 'Key {}'.format(rule['alerta_api_key'])}) -def test_pagerduty_alerter_custom_alert_subject(): +def test_alerta_new_style(): rule = { - 'name': 'Test PD Rule', + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", 'type': 'any', - 'alert_subject': 'Hungry kittens', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom {0}', - 'pagerduty_incident_key_args': ['somefield'], - 'alert': [] + 'alerta_use_match_timestamp': True, + 'alert': 'alerta' } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) + match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' + '@timestamp': '2014-10-10T00:00:00', + # 'key': ---- missing field on purpose, to verify that simply the text is left empty + # 'logdate': ---- missing field on purpose, to verify that simply the text is left empty + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { - 'client': 'ponies inc.', - 'description': 'Hungry kittens', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': 'custom foobarbaz', - 'service_key': 'magicalbadgers', + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" } - mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) -def test_pagerduty_alerter_custom_alert_subject_with_args(): +def test_alerta_use_qk_as_resource(): rule = { - 'name': 'Test PD Rule', + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", 'type': 'any', - 'alert_subject': '{0} kittens', - 'alert_subject_args': ['somefield'], - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom {0}', - 'pagerduty_incident_key_args': ['someotherfield'], - 'alert': [] + 'alerta_use_match_timestamp': True, + 'alerta_use_qk_as_resource': True, + 'query_key': 'hostname', + 'alert': 'alerta' } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) + match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'Stinky', - 'someotherfield': 'foobarbaz' + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { - 'client': 'ponies inc.', - 'description': 'Stinky kittens', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinky\nsomeotherfield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': 'custom foobarbaz', - 'service_key': 'magicalbadgers', + "origin": "ElastAlert 2", + "resource": "aProbe", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" } - mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) -def test_pagerduty_alerter_custom_alert_subject_with_args_specifying_trigger(): +def test_alerta_timeout(): rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'alert_subject': '{0} kittens', - 'alert_subject_args': ['somefield'], - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_event_type': 'trigger', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom {0}', - 'pagerduty_incident_key_args': ['someotherfield'], - 'alert': [] + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alerta_timeout': 86450, + 'alert': 'alerta' } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) + match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'Stinkiest', - 'someotherfield': 'foobarbaz' + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { - 'client': 'ponies inc.', - 'description': 'Stinkiest kittens', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinkiest\nsomeotherfield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': 'custom foobarbaz', - 'service_key': 'magicalbadgers', + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86450, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" } - mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - -def test_alert_text_kw(ea): - rule = ea.rules[0].copy() - rule['alert_text'] = '{field} at {time}' - rule['alert_text_kw'] = { - '@timestamp': 'time', - 'field': 'field', - } - match = {'@timestamp': '1918-01-17', 'field': 'value'} - alert_text = str(BasicMatchString(rule, match)) - body = '{field} at {@timestamp}'.format(**match) - assert body in alert_text + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) -def test_alert_text_global_substitution(ea): - rule = ea.rules[0].copy() - rule['owner'] = 'the owner from rule' - rule['priority'] = 'priority from rule' - rule['abc'] = 'abc from rule' - rule['alert_text'] = 'Priority: {0}; Owner: {1}; Abc: {2}' - rule['alert_text_args'] = ['priority', 'owner', 'abc'] +def test_alerta_type(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alerta_type': 'elastalert2', + 'alert': 'alerta' + } match = { - '@timestamp': '2016-01-01', - 'field': 'field_value', - 'abc': 'abc from match', + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } - alert_text = str(BasicMatchString(rule, match)) - assert 'Priority: priority from rule' in alert_text - assert 'Owner: the owner from rule' in alert_text + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) - # When the key exists in both places, it will come from the match - assert 'Abc: abc from match' in alert_text + expected_data = { + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert2", + "event": "ProbeUP" + } + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) -def test_alert_text_kw_global_substitution(ea): - rule = ea.rules[0].copy() - rule['foo_rule'] = 'foo from rule' - rule['owner'] = 'the owner from rule' - rule['abc'] = 'abc from rule' - rule['alert_text'] = 'Owner: {owner}; Foo: {foo}; Abc: {abc}' - rule['alert_text_kw'] = { - 'owner': 'owner', - 'foo_rule': 'foo', - 'abc': 'abc', +def test_alerta_resource(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alerta_resource': 'elastalert2', + 'alert': 'alerta' } match = { - '@timestamp': '2016-01-01', - 'field': 'field_value', - 'abc': 'abc from match', + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } - alert_text = str(BasicMatchString(rule, match)) - assert 'Owner: the owner from rule' in alert_text - assert 'Foo: foo from rule' in alert_text - - # When the key exists in both places, it will come from the match - assert 'Abc: abc from match' in alert_text - + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) -def test_resolving_rule_references(ea): - rule = { - 'name': 'test_rule', - 'type': mock_rule(), - 'owner': 'the_owner', - 'priority': 2, - 'list_of_things': [ - '1', - '$owner$', - [ - '11', - '$owner$', - ], - ], - 'nested_dict': { - 'nested_one': '1', - 'nested_owner': '$owner$', - }, - 'resolved_string_reference': '$owner$', - 'resolved_int_reference': '$priority$', - 'unresolved_reference': '$foo$', + expected_data = { + "origin": "ElastAlert 2", + "resource": "elastalert2", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" } - alert = Alerter(rule) - assert 'the_owner' == alert.rule['resolved_string_reference'] - assert 2 == alert.rule['resolved_int_reference'] - assert '$foo$' == alert.rule['unresolved_reference'] - assert 'the_owner' == alert.rule['list_of_things'][1] - assert 'the_owner' == alert.rule['list_of_things'][2][1] - assert 'the_owner' == alert.rule['nested_dict']['nested_owner'] + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) -def test_alerta_no_auth(ea): +def test_alerta_service(): rule = { 'name': 'Test Alerta rule!', 'alerta_api_url': 'http://elastalerthost:8080/api/alert', 'timeframe': datetime.timedelta(hours=1), 'timestamp_field': '@timestamp', - 'alerta_api_skip_ssl': True, 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], - 'alerta_attributes_values': ["%(key)s", "%(logdate)s", "%(sender_ip)s"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], 'alerta_event': "ProbeUP", 'alerta_group': "Health", 'alerta_origin': "ElastAlert 2", 'alerta_severity': "debug", - 'alerta_text': "Probe %(hostname)s is UP at %(logdate)s GMT", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", 'alerta_value': "UP", 'type': 'any', 'alerta_use_match_timestamp': True, + 'alerta_service': ['elastalert2'], 'alert': 'alerta' } match = { '@timestamp': '2014-10-10T00:00:00', - # 'key': ---- missing field on purpose, to verify that simply the text is left empty - # 'logdate': ---- missing field on purpose, to verify that simply the text is left empty 'sender_ip': '1.1.1.1', 'hostname': 'aProbe' } @@ -2159,7 +4137,7 @@ def test_alerta_no_auth(ea): "origin": "ElastAlert 2", "resource": "elastalert", "severity": "debug", - "service": ["elastalert"], + "service": ["elastalert2"], "tags": [], "text": "Probe aProbe is UP at GMT", "value": "UP", @@ -2169,7 +4147,7 @@ def test_alerta_no_auth(ea): "timeout": 86400, "correlate": ["ProbeUP", "ProbeDOWN"], "group": "Health", - "attributes": {"senderIP": "1.1.1.1", "hostname": "", "TimestampEvent": ""}, + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, "type": "elastalert", "event": "ProbeUP" } @@ -2177,24 +4155,32 @@ def test_alerta_no_auth(ea): mock_post_request.assert_called_once_with( alert.url, data=mock.ANY, + verify=True, headers={ - 'content-type': 'application/json'}, - verify=False + 'content-type': 'application/json'} ) assert expected_data == json.loads( mock_post_request.call_args_list[0][1]['data']) -def test_alerta_auth(ea): +def test_alerta_environment(): rule = { 'name': 'Test Alerta rule!', 'alerta_api_url': 'http://elastalerthost:8080/api/alert', - 'alerta_api_key': '123456789ABCDEF', 'timeframe': datetime.timedelta(hours=1), 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", 'type': 'any', 'alerta_use_match_timestamp': True, + 'alerta_environment': 'Production2', 'alert': 'alerta' } @@ -2210,16 +4196,37 @@ def test_alerta_auth(ea): with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production2", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" + } + mock_post_request.assert_called_once_with( alert.url, data=mock.ANY, verify=True, headers={ - 'content-type': 'application/json', - 'Authorization': 'Key {}'.format(rule['alerta_api_key'])}) + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) -def test_alerta_new_style(ea): +def test_alerta_tags(): rule = { 'name': 'Test Alerta rule!', 'alerta_api_url': 'http://elastalerthost:8080/api/alert', @@ -2236,13 +4243,12 @@ def test_alerta_new_style(ea): 'alerta_value': "UP", 'type': 'any', 'alerta_use_match_timestamp': True, + 'alerta_tags': ['elastalert2'], 'alert': 'alerta' } match = { '@timestamp': '2014-10-10T00:00:00', - # 'key': ---- missing field on purpose, to verify that simply the text is left empty - # 'logdate': ---- missing field on purpose, to verify that simply the text is left empty 'sender_ip': '1.1.1.1', 'hostname': 'aProbe' } @@ -2258,7 +4264,7 @@ def test_alerta_new_style(ea): "resource": "elastalert", "severity": "debug", "service": ["elastalert"], - "tags": [], + "tags": ['elastalert2'], "text": "Probe aProbe is UP at GMT", "value": "UP", "createTime": "2014-10-10T00:00:00.000000Z", @@ -2283,7 +4289,7 @@ def test_alerta_new_style(ea): mock_post_request.call_args_list[0][1]['data']) -def test_alert_subject_size_limit_no_args(ea): +def test_alert_subject_size_limit_no_args(): rule = { 'name': 'test_rule', 'type': mock_rule(), @@ -2297,6 +4303,39 @@ def test_alert_subject_size_limit_no_args(ea): assert 5 == len(alertSubject) +def test_alert_error(): + rule = { + 'name': 'test_rule', + 'type': mock_rule(), + 'owner': 'the_owner', + 'priority': 2, + 'alert_subject': 'A very long subject', + 'alert_subject_max_len': 5 + } + match = { + '@timestamp': '2021-01-01T00:00:00', + 'name': 'datadog-test-name' + } + alert = Alerter(rule) + try: + alert.alert([match]) + except NotImplementedError: + assert True + + +def test_alert_get_aggregation_summary_text__maximum_width(): + rule = { + 'name': 'test_rule', + 'type': mock_rule(), + 'owner': 'the_owner', + 'priority': 2, + 'alert_subject': 'A very long subject', + 'alert_subject_max_len': 5 + } + alert = Alerter(rule) + assert 80 == alert.get_aggregation_summary_text__maximum_width() + + def test_alert_subject_size_limit_with_args(ea): rule = { 'name': 'test_rule', @@ -2388,6 +4427,47 @@ def test_pagertree(): assert expected_data["Description"] == actual_data['Description'] +def test_pagertree_proxy(): + rule = { + 'name': 'Test PagerTree Rule', + 'type': 'any', + 'pagertree_integration_url': 'https://api.pagertree.com/integration/xxxxx', + 'pagertree_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerTreeAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'event_type': 'create', + 'Id': str(uuid.uuid4()), + 'Title': 'Test PagerTree Rule', + 'Description': 'Test PagerTree Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + + mock_post_request.assert_called_once_with( + rule['pagertree_integration_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + uuid4hex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) + match = uuid4hex.match(actual_data['Id']) + assert bool(match) is True + assert expected_data["event_type"] == actual_data['event_type'] + assert expected_data["Title"] == actual_data['Title'] + assert expected_data["Description"] == actual_data['Description'] + + def test_line_notify(): rule = { 'name': 'Test LineNotify Rule', @@ -2525,6 +4605,41 @@ def test_gitter_msg_level_error(): assert 'error' in actual_data['level'] +def test_gitter_proxy(): + rule = { + 'name': 'Test Gitter Rule', + 'type': 'any', + 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', + 'gitter_msg_level': 'error', + 'gitter_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GitterAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'message': 'Test Gitter Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'level': 'error' + } + + mock_post_request.assert_called_once_with( + rule['gitter_webhook_url'], + mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) + assert expected_data == actual_data + assert 'error' in actual_data['level'] + + def test_chatwork(): rule = { 'name': 'Test Chatwork Rule', @@ -2554,47 +4669,211 @@ def test_chatwork(): auth=None ) - actual_data = mock_post_request.call_args_list[0][1]['params'] + actual_data = mock_post_request.call_args_list[0][1]['params'] + assert expected_data == actual_data + + +def test_chatwork_proxy(): + rule = { + 'name': 'Test Chatwork Rule', + 'type': 'any', + 'chatwork_apikey': 'xxxx1', + 'chatwork_room_id': 'xxxx2', + 'chatwork_proxy': 'http://proxy.url', + 'chatwork_proxy_login': 'admin', + 'chatwork_proxy_pass': 'password', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ChatworkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'body': 'Test Chatwork Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + } + + mock_post_request.assert_called_once_with( + 'https://api.chatwork.com/v2/rooms/xxxx2/messages', + params=mock.ANY, + headers={'X-ChatWorkToken': 'xxxx1'}, + proxies={'https': 'http://proxy.url'}, + auth=HTTPProxyAuth('admin', 'password') + ) + + actual_data = mock_post_request.call_args_list[0][1]['params'] + assert expected_data == actual_data + + +def test_telegram(): + rule = { + 'name': 'Test Telegram Rule', + 'type': 'any', + 'telegram_bot_token': 'xxxxx1', + 'telegram_room_id': 'xxxxx2', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TelegramAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'chat_id': rule['telegram_room_id'], + 'text': '⚠ *Test Telegram Rule* ⚠ ```\nTest Telegram Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n ```', + 'parse_mode': 'markdown', + 'disable_web_page_preview': True + } + + mock_post_request.assert_called_once_with( + 'https://api.telegram.org/botxxxxx1/sendMessage', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_telegram_proxy(): + rule = { + 'name': 'Test Telegram Rule', + 'type': 'any', + 'telegram_bot_token': 'xxxxx1', + 'telegram_room_id': 'xxxxx2', + 'telegram_proxy': 'http://proxy.url', + 'telegram_proxy_login': 'admin', + 'telegram_proxy_pass': 'password', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TelegramAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'chat_id': rule['telegram_room_id'], + 'text': '⚠ *Test Telegram Rule* ⚠ ```\nTest Telegram Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n ```', + 'parse_mode': 'markdown', + 'disable_web_page_preview': True + } + + mock_post_request.assert_called_once_with( + 'https://api.telegram.org/botxxxxx1/sendMessage', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'}, + auth=HTTPProxyAuth('admin', 'password') + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_telegram_text_maxlength(): + rule = { + 'name': 'Test Telegram Rule' + ('a' * 3985), + 'type': 'any', + 'telegram_bot_token': 'xxxxx1', + 'telegram_room_id': 'xxxxx2', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TelegramAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'chat_id': rule['telegram_room_id'], + 'text': '⚠ *Test Telegram Rule' + ('a' * 3979) + + '\n⚠ *message was cropped according to telegram limits!* ⚠ ```', + 'parse_mode': 'markdown', + 'disable_web_page_preview': True + } + + mock_post_request.assert_called_once_with( + 'https://api.telegram.org/botxxxxx1/sendMessage', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data -def test_telegram(): +def test_service_now(): rule = { - 'name': 'Test Telegram Rule', + 'name': 'Test ServiceNow Rule', 'type': 'any', - 'telegram_bot_token': 'xxxxx1', - 'telegram_room_id': 'xxxxx2', + 'username': 'ServiceNow username', + 'password': 'ServiceNow password', + 'servicenow_rest_url': 'https://xxxxxxxxxx', + 'short_description': 'ServiceNow short_description', + 'comments': 'ServiceNow comments', + 'assignment_group': 'ServiceNow assignment_group', + 'category': 'ServiceNow category', + 'subcategory': 'ServiceNow subcategory', + 'cmdb_ci': 'ServiceNow cmdb_ci', + 'caller_id': 'ServiceNow caller_id', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = TelegramAlerter(rule) + alert = ServiceNowAlerter(rule) match = { '@timestamp': '2021-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { - 'chat_id': rule['telegram_room_id'], - 'text': '⚠ *Test Telegram Rule* ⚠ ```\nTest Telegram Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n ```', - 'parse_mode': 'markdown', - 'disable_web_page_preview': True + 'description': 'Test ServiceNow Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'short_description': rule['short_description'], + 'comments': rule['comments'], + 'assignment_group': rule['assignment_group'], + 'category': rule['category'], + 'subcategory': rule['subcategory'], + 'cmdb_ci': rule['cmdb_ci'], + 'caller_id': rule['caller_id'] } mock_post_request.assert_called_once_with( - 'https://api.telegram.org/botxxxxx1/sendMessage', + rule['servicenow_rest_url'], + auth=(rule['username'], rule['password']), + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - auth=None + proxies=None ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data -def test_service_now(): +def test_service_now_proxy(): rule = { 'name': 'Test ServiceNow Rule', 'type': 'any', @@ -2608,6 +4887,7 @@ def test_service_now(): 'subcategory': 'ServiceNow subcategory', 'cmdb_ci': 'ServiceNow cmdb_ci', 'caller_id': 'ServiceNow caller_id', + 'servicenow_proxy': 'http://proxy.url', 'alert': [] } rules_loader = FileRulesLoader({}) @@ -2639,7 +4919,7 @@ def test_service_now(): 'Accept': 'application/json;charset=utf-8' }, data=mock.ANY, - proxies=None + proxies={'https': 'http://proxy.url'} ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -2684,6 +4964,45 @@ def test_victor_ops(): assert expected_data == actual_data +def test_victor_ops_proxy(): + rule = { + 'name': 'Test VictorOps Rule', + 'type': 'any', + 'victorops_api_key': 'xxxx1', + 'victorops_routing_key': 'xxxx2', + 'victorops_message_type': 'INFO', + 'victorops_entity_display_name': 'no entity display name', + 'victorops_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = VictorOpsAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'message_type': rule['victorops_message_type'], + 'entity_display_name': rule['victorops_entity_display_name'], + 'monitoring_tool': 'ElastAlert', + 'state_message': 'Test VictorOps Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + + mock_post_request.assert_called_once_with( + 'https://alert.victorops.com/integrations/generic/20131114/alert/xxxx1/xxxx2', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + def test_google_chat_basic(): rule = { 'name': 'Test GoogleChat Rule', @@ -2869,6 +5188,92 @@ def test_discord_not_footer(): assert expected_data == actual_data +def test_discord_proxy(): + rule = { + 'name': 'Test Discord Rule', + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'discord_proxy': 'http://proxy.url', + 'discord_proxy_login': 'admin', + 'discord_proxy_password': 'password', + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', + 'color': 0xffffff + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies={'https': 'http://proxy.url'}, + auth=HTTPProxyAuth('admin', 'password') + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_discord_description_maxlength(): + rule = { + 'name': 'Test Discord Rule' + ('a' * 2069), + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule' + ('a' * 1933) + + '\n *message was cropped according to discord embed description limits!* ```', + 'color': 0xffffff + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + def test_dingtalk_text(): rule = { 'name': 'Test DingTalk Rule', @@ -3053,7 +5458,114 @@ def test_dingtalk_action_card(): assert expected_data == actual_data -def test_Mattermost_alert_text_only(): +def test_dingtalk_proxy(): + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'action_card', + 'dingtalk_single_title': 'elastalert', + 'dingtalk_single_url': 'http://xxxxx2', + 'dingtalk_btn_orientation': '1', + 'dingtalk_btns': [ + { + 'title': 'test1', + 'actionURL': 'https://xxxxx0/' + }, + { + 'title': 'test2', + 'actionURL': 'https://xxxxx1/' + } + ], + 'dingtalk_proxy': 'http://proxy.url', + 'dingtalk_proxy_login': 'admin', + 'dingtalk_proxy_pass': 'password', + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'msgtype': 'actionCard', + 'actionCard': { + 'title': 'Test DingTalk', + 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'btnOrientation': rule['dingtalk_btn_orientation'], + 'btns': rule['dingtalk_btns'] + } + } + + mock_post_request.assert_called_once_with( + 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + proxies={'https': 'http://proxy.url'}, + auth=HTTPProxyAuth('admin', 'password') + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_proxy(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_proxy': 'https://proxy.url', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies={'https': 'https://proxy.url'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_alert_text_only(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -3099,7 +5611,7 @@ def test_Mattermost_alert_text_only(): assert expected_data == actual_data -def test_Mattermost_not_alert_text_only(): +def test_mattermost_not_alert_text_only(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -3143,11 +5655,10 @@ def test_Mattermost_not_alert_text_only(): ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - print(actual_data) assert expected_data == actual_data -def test_Mattermost_msg_fields(): +def test_mattermost_msg_fields(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -3209,7 +5720,7 @@ def test_Mattermost_msg_fields(): assert expected_data == actual_data -def test_Mattermost_icon_url_override(): +def test_mattermost_icon_url_override(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -3258,7 +5769,7 @@ def test_Mattermost_icon_url_override(): assert expected_data == actual_data -def test_Mattermost_channel_override(): +def test_mattermost_channel_override(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -3307,7 +5818,7 @@ def test_Mattermost_channel_override(): assert expected_data == actual_data -def test_Mattermost_ignore_ssl_errors(): +def test_mattermost_ignore_ssl_errors(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', diff --git a/tests/util_test.py b/tests/util_test.py index 55a2f9c8f..2a24446d3 100644 --- a/tests/util_test.py +++ b/tests/util_test.py @@ -7,14 +7,18 @@ from dateutil.parser import parse as dt from elastalert.util import add_raw_postfix +from elastalert.util import dt_to_ts_with_format +from elastalert.util import flatten_dict from elastalert.util import format_index from elastalert.util import lookup_es_key from elastalert.util import parse_deadline from elastalert.util import parse_duration +from elastalert.util import pytzfy from elastalert.util import replace_dots_in_field_names from elastalert.util import resolve_string from elastalert.util import set_es_key from elastalert.util import should_scrolling_continue +from elastalert.util import ts_to_dt_with_format @pytest.mark.parametrize('spec, expected_delta', [ @@ -228,3 +232,22 @@ def test_should_scrolling_continue(): assert should_scrolling_continue(rule_before_first_run) is True assert should_scrolling_continue(rule_before_max_scrolling) is True assert should_scrolling_continue(rule_over_max_scrolling) is False + + +def test_ts_to_dt_with_format(): + assert ts_to_dt_with_format('2021/02/01 12:30:00', '%Y/%m/%d %H:%M:%S') == dt('2021-02-01 12:30:00+00:00') + assert ts_to_dt_with_format('01/02/2021 12:30:00', '%d/%m/%Y %H:%M:%S') == dt('2021-02-01 12:30:00+00:00') + + +def test_dt_to_ts_with_format(): + assert dt_to_ts_with_format(dt('2021-02-01 12:30:00+00:00'), '%Y/%m/%d %H:%M:%S') == '2021/02/01 12:30:00' + assert dt_to_ts_with_format(dt('2021-02-01 12:30:00+00:00'), '%d/%m/%Y %H:%M:%S') == '01/02/2021 12:30:00' + + +def test_flatten_dict(): + assert flatten_dict({'test': 'value1', 'test2': 'value2'}) == {'test': 'value1', 'test2': 'value2'} + + +def test_pytzfy(): + assert pytzfy(dt('2021-02-01 12:30:00+00:00')) == dt('2021-02-01 12:30:00+00:00') + assert pytzfy(datetime(2018, 12, 31, 5, 0, 30, 1000)) == dt('2018-12-31 05:00:30.001000') From a1230b6a4327844b411a1687b122c5d1179b0711 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 7 May 2021 19:37:01 +0900 Subject: [PATCH 0198/1065] remove configparser --- requirements.txt | 1 - setup.py | 1 - 2 files changed, 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 478c75561..878d0355a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,6 @@ aws-requests-auth>=0.3.0 sortedcontainers>=2.2.2 boto3>=1.4.4 cffi>=1.11.5 -configparser>=3.5.0 croniter>=0.3.16 elasticsearch==7.0.0 envparse>=0.2.0 diff --git a/setup.py b/setup.py index c1dcc7bcb..5423daba6 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,6 @@ 'aws-requests-auth>=0.3.0', 'sortedcontainers>=2.2.2', 'boto3>=1.4.4', - 'configparser>=3.5.0', 'croniter>=0.3.16', 'elasticsearch==7.0.0', 'envparse>=0.2.0', From 77d2da1a337816fafeade905dbe87900c1f8e243 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 7 May 2021 21:25:07 +0900 Subject: [PATCH 0199/1065] Add Slack Alerts Footer --- docs/source/ruletypes.rst | 4 ++++ elastalert/alerts.py | 6 +++++ elastalert/schema.yaml | 2 ++ tests/alerts_test.py | 50 +++++++++++++++++++++++++++++++++++++++ 4 files changed, 62 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index bb4462061..afd9203ac 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2453,6 +2453,10 @@ Example slack_alert_fields:: ``slack_ca_certs``: Set this option to ``True`` if you want to validate the SSL certificate. +``slack_footer``: Add a static footer text for alert. Defaults to "". + +``slack_footer_icon``: A Public Url for a footer icon. Defaults to "". + Splunk On-Call (Formerly VictorOps) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 646323d87..8cbf99155 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1060,6 +1060,8 @@ def __init__(self, rule): self.slack_attach_kibana_discover_url = self.rule.get('slack_attach_kibana_discover_url', False) self.slack_kibana_discover_color = self.rule.get('slack_kibana_discover_color', '#ec4b98') self.slack_kibana_discover_title = self.rule.get('slack_kibana_discover_title', 'Discover in Kibana') + self.footer = self.rule.get('slack_footer', '') + self.footer_icon = self.rule.get('slack_footer_icon', '') def format_body(self, body): # https://api.slack.com/docs/formatting @@ -1122,6 +1124,10 @@ def alert(self, matches): if self.slack_title_link != '': payload['attachments'][0]['title_link'] = self.slack_title_link + if self.footer != '' and self.footer_icon != '': + payload['attachments'][0]['footer'] = self.footer + payload['attachments'][0]['footer_icon'] = self.footer_icon + if self.slack_attach_kibana_discover_url: kibana_discover_url = lookup_es_key(matches[0], 'kibana_discover_url') if kibana_discover_url: diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index ca402d19e..b4a513653 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -282,6 +282,8 @@ properties: slack_attach_kibana_discover_url: {type: boolean} slack_kibana_discover_color: {type: string} slack_kibana_discover_title: {type: string} + slack_footer: {type: string} + slack_footer_icon: {type: string} ### Mattermost mattermost_webhook_url: *arrayOfString diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 3af230a1a..3e1febf7c 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -2749,6 +2749,56 @@ def test_slack_ca_certs(): assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) +def test_slack_footer(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_footer': 'Elastic Alerts', + 'slack_footer_icon': 'http://footer.icon.url', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'footer': 'Elastic Alerts', + 'footer_icon': 'http://footer.icon.url' + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + def test_http_alerter_with_payload(): rule = { 'name': 'Test HTTP Post Alerter With Payload', From 75223b255e2ff8d02dc7b56a3e596207e645e973 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 9 May 2021 02:41:33 +0900 Subject: [PATCH 0200/1065] Added settings for slack and mattermost mattermost mattermost_title_link mattermost_footer mattermost_footer_icon mattermost_image_url mattermost_thumb_url mattermost_author_name mattermost_author_link mattermost_author_icon slack slack_image_url slack_thumb_url slack_author_name slack_author_link slack_author_icon slack_msg_pretext --- docs/source/ruletypes.rst | 28 + elastalert/alerts.py | 69 ++- elastalert/schema.yaml | 14 + tests/alerts_test.py | 1172 +++++++++++++++++++++++++++++-------- 4 files changed, 1032 insertions(+), 251 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index afd9203ac..536c38066 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2182,6 +2182,22 @@ Example mattermost_msg_fields:: value: static field short: false +``mattermost_title_link``: You can add a link in your Slack notification by setting this to a valid URL. Requires mattermost_title to be set. Defaults to "". + +``mattermost_footer``: Add a static footer text for alert. Defaults to "". + +``mattermost_footer_icon``: A Public Url for a footer icon. Defaults to "". + +``mattermost_image_url``: An optional URL to an image file (GIF, JPEG, PNG, BMP, or SVG). Defaults to "". + +``mattermost_thumb_url``: An optional URL to an image file (GIF, JPEG, PNG, BMP, or SVG) that is displayed as thumbnail. Defaults to "". + +``mattermost_author_name``: An optional name used to identify the author. . Defaults to "". + +``mattermost_author_link``: An optional URL used to hyperlink the author_name. Defaults to "". + +``mattermost_author_icon``: An optional URL used to display a 16x16 pixel icon beside the author_name. Defaults to "". + Microsoft Teams ~~~~~~~~~~~~~~~ @@ -2457,6 +2473,18 @@ Example slack_alert_fields:: ``slack_footer_icon``: A Public Url for a footer icon. Defaults to "". +``slack_image_url``: An optional URL to an image file (GIF, JPEG, PNG, BMP, or SVG). Defaults to "". + +``slack_thumb_url``: An optional URL to an image file (GIF, JPEG, PNG, BMP, or SVG) that is displayed as thumbnail. Defaults to "". + +``slack_author_name``: An optional name used to identify the author. Defaults to "". + +``slack_author_link``: An optional URL used to hyperlink the author_name. Defaults to "". + +``slack_author_icon``: An optional URL used to display a 16x16 pixel icon beside the author_name. Defaults to "". + +``slack_msg_pretext``: You can set the message attachment pretext using this option. Defaults to "". + Splunk On-Call (Formerly VictorOps) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 8cbf99155..0237c0318 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1060,8 +1060,14 @@ def __init__(self, rule): self.slack_attach_kibana_discover_url = self.rule.get('slack_attach_kibana_discover_url', False) self.slack_kibana_discover_color = self.rule.get('slack_kibana_discover_color', '#ec4b98') self.slack_kibana_discover_title = self.rule.get('slack_kibana_discover_title', 'Discover in Kibana') - self.footer = self.rule.get('slack_footer', '') - self.footer_icon = self.rule.get('slack_footer_icon', '') + self.slack_footer = self.rule.get('slack_footer', '') + self.slack_footer_icon = self.rule.get('slack_footer_icon', '') + self.slack_image_url = self.rule.get('slack_image_url', '') + self.slack_thumb_url = self.rule.get('slack_thumb_url', '') + self.slack_author_name = self.rule.get('slack_author_name', '') + self.slack_author_link = self.rule.get('slack_author_link', '') + self.slack_author_icon = self.rule.get('slack_author_icon', '') + self.slack_msg_pretext = self.rule.get('slack_msg_pretext', '') def format_body(self, body): # https://api.slack.com/docs/formatting @@ -1124,9 +1130,29 @@ def alert(self, matches): if self.slack_title_link != '': payload['attachments'][0]['title_link'] = self.slack_title_link - if self.footer != '' and self.footer_icon != '': - payload['attachments'][0]['footer'] = self.footer - payload['attachments'][0]['footer_icon'] = self.footer_icon + if self.slack_footer != '': + payload['attachments'][0]['footer'] = self.slack_footer + + if self.slack_footer_icon != '': + payload['attachments'][0]['footer_icon'] = self.slack_footer_icon + + if self.slack_image_url != '': + payload['attachments'][0]['image_url'] = self.slack_image_url + + if self.slack_thumb_url != '': + payload['attachments'][0]['thumb_url'] = self.slack_thumb_url + + if self.slack_author_name != '': + payload['attachments'][0]['author_name'] = self.slack_author_name + + if self.slack_author_link != '': + payload['attachments'][0]['author_link'] = self.slack_author_link + + if self.slack_author_icon != '': + payload['attachments'][0]['author_icon'] = self.slack_author_icon + + if self.slack_msg_pretext != '': + payload['attachments'][0]['pretext'] = self.slack_msg_pretext if self.slack_attach_kibana_discover_url: kibana_discover_url = lookup_es_key(matches[0], 'kibana_discover_url') @@ -1186,6 +1212,15 @@ def __init__(self, rule): self.mattermost_msg_pretext = self.rule.get('mattermost_msg_pretext', '') self.mattermost_msg_color = self.rule.get('mattermost_msg_color', 'danger') self.mattermost_msg_fields = self.rule.get('mattermost_msg_fields', '') + self.mattermost_image_url = self.rule.get('mattermost_image_url', '') + self.mattermost_title_link = self.rule.get('mattermost_title_link', '') + self.mattermost_footer = self.rule.get('mattermost_footer', '') + self.mattermost_footer_icon = self.rule.get('mattermost_footer_icon', '') + self.mattermost_image_url = self.rule.get('mattermost_image_url', '') + self.mattermost_thumb_url = self.rule.get('mattermost_thumb_url', '') + self.mattermost_author_name = self.rule.get('mattermost_author_name', '') + self.mattermost_author_link = self.rule.get('mattermost_author_link', '') + self.mattermost_author_icon = self.rule.get('mattermost_author_icon', '') def get_aggregation_summary_text__maximum_width(self): width = super(MattermostAlerter, self).get_aggregation_summary_text__maximum_width() @@ -1250,6 +1285,30 @@ def alert(self, matches): if self.mattermost_channel_override != '': payload['channel'] = self.mattermost_channel_override + if self.mattermost_title_link != '': + payload['attachments'][0]['title_link'] = self.mattermost_title_link + + if self.mattermost_footer != '': + payload['attachments'][0]['footer'] = self.mattermost_footer + + if self.mattermost_footer_icon != '': + payload['attachments'][0]['footer_icon'] = self.mattermost_footer_icon + + if self.mattermost_image_url != '': + payload['attachments'][0]['image_url'] = self.mattermost_image_url + + if self.mattermost_thumb_url != '': + payload['attachments'][0]['thumb_url'] = self.mattermost_thumb_url + + if self.mattermost_author_name != '': + payload['attachments'][0]['author_name'] = self.mattermost_author_name + + if self.mattermost_author_link != '': + payload['attachments'][0]['author_link'] = self.mattermost_author_link + + if self.mattermost_author_icon != '': + payload['attachments'][0]['author_icon'] = self.mattermost_author_icon + for url in self.mattermost_webhook_url: try: if self.mattermost_ignore_ssl_errors: diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index b4a513653..e34cff896 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -284,6 +284,12 @@ properties: slack_kibana_discover_title: {type: string} slack_footer: {type: string} slack_footer_icon: {type: string} + slack_image_url: {type: string} + slack_thumb_url: {type: string} + slack_author_name: {type: string} + slack_author_link: {type: string} + slack_author_icon: {type: string} + slack_msg_pretext: {type: string} ### Mattermost mattermost_webhook_url: *arrayOfString @@ -295,6 +301,14 @@ properties: mattermost_msg_color: {enum: [good, warning, danger]} mattermost_msg_pretext: {type: string} mattermost_msg_fields: *mattermostField + mattermost_title_link: {type: string} + mattermost_footer: {type: string} + mattermost_footer_icon: {type: string} + mattermost_image_url: {type: string} + mattermost_thumb_url: {type: string} + mattermost_author_name: {type: string} + mattermost_author_link: {type: string} + mattermost_author_icon: {type: string} ## Opsgenie opsgenie_details: diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 3e1febf7c..dcce1293e 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -2799,217 +2799,301 @@ def test_slack_footer(): assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_http_alerter_with_payload(): +def test_slack_image_url(): rule = { - 'name': 'Test HTTP Post Alerter With Payload', + 'name': 'Test Rule', 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_payload': {'posted_name': 'somefield'}, - 'http_post_static_payload': {'name': 'somestaticname'}, + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_image_url': 'http://image.url', + 'alert_subject': 'Cool subject', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) + alert = SlackAlerter(rule) match = { - '@timestamp': '2017-01-01T00:00:00', + '@timestamp': '2016-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { - 'posted_name': 'foobarbaz', - 'name': 'somestaticname' + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'image_url': 'http://image.url', + } + ], + 'text': '', + 'parse': 'none' } mock_post_request.assert_called_once_with( - rule['http_post_url'], + rule['slack_webhook_url'], data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + headers={'content-type': 'application/json'}, proxies=None, - timeout=10, - verify=True + verify=True, + timeout=10 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_http_alerter_with_payload_all_values(): +def test_slack_thumb_url(): rule = { - 'name': 'Test HTTP Post Alerter With Payload', + 'name': 'Test Rule', 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_payload': {'posted_name': 'somefield'}, - 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_all_values': True, + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_thumb_url': 'http://thumb.url', + 'alert_subject': 'Cool subject', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) + alert = SlackAlerter(rule) match = { - '@timestamp': '2017-01-01T00:00:00', + '@timestamp': '2016-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { - 'posted_name': 'foobarbaz', - 'name': 'somestaticname', - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'thumb_url': 'http://thumb.url', + } + ], + 'text': '', + 'parse': 'none' } mock_post_request.assert_called_once_with( - rule['http_post_url'], + rule['slack_webhook_url'], data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + headers={'content-type': 'application/json'}, proxies=None, - timeout=10, - verify=True + verify=True, + timeout=10 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_http_alerter_without_payload(): +def test_slack_author_name(): rule = { - 'name': 'Test HTTP Post Alerter Without Payload', + 'name': 'Test Rule', 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_static_payload': {'name': 'somestaticname'}, + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_author_name': 'author name', + 'alert_subject': 'Cool subject', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) + alert = SlackAlerter(rule) match = { - '@timestamp': '2017-01-01T00:00:00', + '@timestamp': '2016-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'name': 'somestaticname' + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'author_name': 'author name', + } + ], + 'text': '', + 'parse': 'none' } mock_post_request.assert_called_once_with( - rule['http_post_url'], + rule['slack_webhook_url'], data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + headers={'content-type': 'application/json'}, proxies=None, - timeout=10, - verify=True + verify=True, + timeout=10 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_http_alerter_proxy(): +def test_slack_author_link(): rule = { - 'name': 'Test HTTP Post Alerter Without Payload', + 'name': 'Test Rule', 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_proxy': 'http://proxy.url', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_author_link': 'http://author.url', + 'alert_subject': 'Cool subject', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) + alert = SlackAlerter(rule) match = { - '@timestamp': '2017-01-01T00:00:00', + '@timestamp': '2016-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'name': 'somestaticname' + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'author_link': 'http://author.url', + } + ], + 'text': '', + 'parse': 'none' } mock_post_request.assert_called_once_with( - rule['http_post_url'], + rule['slack_webhook_url'], data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, - proxies={'https': 'http://proxy.url'}, - timeout=10, - verify=True + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_http_alerter_timeout(): +def test_slack_author_icon(): rule = { - 'name': 'Test HTTP Post Alerter Without Payload', + 'name': 'Test Rule', 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_timeout': 20, + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_author_icon': 'http://author.icon', + 'alert_subject': 'Cool subject', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) + alert = SlackAlerter(rule) match = { - '@timestamp': '2017-01-01T00:00:00', + '@timestamp': '2016-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'name': 'somestaticname' + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'author_icon': 'http://author.icon', + } + ], + 'text': '', + 'parse': 'none' } mock_post_request.assert_called_once_with( - rule['http_post_url'], + rule['slack_webhook_url'], data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + headers={'content-type': 'application/json'}, proxies=None, - timeout=20, - verify=True + verify=True, + timeout=10 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_http_alerter_headers(): +def test_slack_msg_pretext(): rule = { - 'name': 'Test HTTP Post Alerter Without Payload', + 'name': 'Test Rule', 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_headers': {'authorization': 'Basic 123dr3234'}, + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_msg_pretext': 'pretext value', + 'alert_subject': 'Cool subject', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) + alert = SlackAlerter(rule) match = { - '@timestamp': '2017-01-01T00:00:00', + '@timestamp': '2016-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'name': 'somestaticname' + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'pretext': 'pretext value' + } + ], + 'text': '', + 'parse': 'none' } mock_post_request.assert_called_once_with( - rule['http_post_url'], + rule['slack_webhook_url'], data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8', 'authorization': 'Basic 123dr3234'}, + headers={'content-type': 'application/json'}, proxies=None, - timeout=10, - verify=True + verify=True, + timeout=10 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_http_alerter_post_ca_certs_true(): +def test_http_alerter_with_payload(): rule = { - 'name': 'Test HTTP Post Alerter Without Payload', + 'name': 'Test HTTP Post Alerter With Payload', 'type': 'any', 'http_post_url': 'http://test.webhook.url', + 'http_post_payload': {'posted_name': 'somefield'}, 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_ca_certs': True, 'alert': [] } rules_loader = FileRulesLoader({}) @@ -3022,8 +3106,7 @@ def test_http_alerter_post_ca_certs_true(): with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz', + 'posted_name': 'foobarbaz', 'name': 'somestaticname' } mock_post_request.assert_called_once_with( @@ -3037,13 +3120,14 @@ def test_http_alerter_post_ca_certs_true(): assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_http_alerter_post_ca_certs_false(): +def test_http_alerter_with_payload_all_values(): rule = { - 'name': 'Test HTTP Post Alerter Without Payload', + 'name': 'Test HTTP Post Alerter With Payload', 'type': 'any', 'http_post_url': 'http://test.webhook.url', + 'http_post_payload': {'posted_name': 'somefield'}, 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_ca_certs': False, + 'http_post_all_values': True, 'alert': [] } rules_loader = FileRulesLoader({}) @@ -3056,9 +3140,213 @@ def test_http_alerter_post_ca_certs_false(): with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { + 'posted_name': 'foobarbaz', + 'name': 'somestaticname', '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'name': 'somestaticname' + 'somefield': 'foobarbaz' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_without_payload(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_proxy(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies={'https': 'http://proxy.url'}, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_timeout(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_timeout': 20, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=20, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_headers(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_headers': {'authorization': 'Basic 123dr3234'}, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8', 'authorization': 'Basic 123dr3234'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_post_ca_certs_true(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_ca_certs': True, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_post_ca_certs_false(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_ca_certs': False, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' } mock_post_request.assert_called_once_with( rule['http_post_url'], @@ -5370,11 +5658,421 @@ def test_dingtalk_markdown(): 'dingtalk_access_token': 'xxxxxxx', 'dingtalk_msgtype': 'markdown', 'alert': [], - 'alert_subject': 'Test DingTalk' + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'msgtype': 'markdown', + 'markdown': { + 'title': 'Test DingTalk', + 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + } + + mock_post_request.assert_called_once_with( + 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_dingtalk_single_action_card(): + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'single_action_card', + 'dingtalk_single_title': 'elastalert', + 'dingtalk_single_url': 'http://xxxxx2', + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'msgtype': 'actionCard', + 'actionCard': { + 'title': 'Test DingTalk', + 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'singleTitle': rule['dingtalk_single_title'], + 'singleURL': rule['dingtalk_single_url'] + } + } + + mock_post_request.assert_called_once_with( + 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_dingtalk_action_card(): + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'action_card', + 'dingtalk_single_title': 'elastalert', + 'dingtalk_single_url': 'http://xxxxx2', + 'dingtalk_btn_orientation': '1', + 'dingtalk_btns': [ + { + 'title': 'test1', + 'actionURL': 'https://xxxxx0/' + }, + { + 'title': 'test2', + 'actionURL': 'https://xxxxx1/' + } + ], + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'msgtype': 'actionCard', + 'actionCard': { + 'title': 'Test DingTalk', + 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'btnOrientation': rule['dingtalk_btn_orientation'], + 'btns': rule['dingtalk_btns'] + } + } + + mock_post_request.assert_called_once_with( + 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_dingtalk_proxy(): + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'action_card', + 'dingtalk_single_title': 'elastalert', + 'dingtalk_single_url': 'http://xxxxx2', + 'dingtalk_btn_orientation': '1', + 'dingtalk_btns': [ + { + 'title': 'test1', + 'actionURL': 'https://xxxxx0/' + }, + { + 'title': 'test2', + 'actionURL': 'https://xxxxx1/' + } + ], + 'dingtalk_proxy': 'http://proxy.url', + 'dingtalk_proxy_login': 'admin', + 'dingtalk_proxy_pass': 'password', + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'msgtype': 'actionCard', + 'actionCard': { + 'title': 'Test DingTalk', + 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'btnOrientation': rule['dingtalk_btn_orientation'], + 'btns': rule['dingtalk_btns'] + } + } + + mock_post_request.assert_called_once_with( + 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + proxies={'https': 'http://proxy.url'}, + auth=HTTPProxyAuth('admin', 'password') + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_proxy(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_proxy': 'https://proxy.url', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies={'https': 'https://proxy.url'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_alert_text_only(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_not_alert_text_only(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'exclude_fields', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [] + } + ], + 'text': 'Test Mattermost Rule\n\n', + 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_msg_fields(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_msg_fields': [ + { + 'title': 'Stack', + 'value': "{0} {1}", + 'short': False, + 'args': ["type", "msg.status_code"] + }, + { + 'title': 'Name', + 'value': 'static field', + 'short': False + } + ], + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [ + {'title': 'Stack', 'value': ' ', 'short': False}, + {'title': 'Name', 'value': 'static field', 'short': False} + ], + 'text': 'Test Mattermost Rule\n\n' + } + ], 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_icon_url_override(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_icon_url_override': 'http://xxxx/icon.png', + 'alert': [], + 'alert_subject': 'Test Mattermost' } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = DingTalkAlerter(rule) + alert = MattermostAlerter(rule) match = { '@timestamp': '2021-01-01T00:00:00', 'somefield': 'foobarbaz' @@ -5383,42 +6081,47 @@ def test_dingtalk_markdown(): alert.alert([match]) expected_data = { - 'msgtype': 'markdown', - 'markdown': { - 'title': 'Test DingTalk', - 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' - } + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], + 'username': 'elastalert', + 'icon_url': 'http://xxxx/icon.png' } mock_post_request.assert_called_once_with( - 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + rule['mattermost_webhook_url'], data=mock.ANY, - headers={ - 'Content-Type': 'application/json', - 'Accept': 'application/json;charset=utf-8' - }, - proxies=None, - auth=None + headers={'content-type': 'application/json'}, + verify=True, + proxies=None ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data -def test_dingtalk_single_action_card(): +def test_mattermost_channel_override(): rule = { - 'name': 'Test DingTalk Rule', + 'name': 'Test Mattermost Rule', 'type': 'any', - 'dingtalk_access_token': 'xxxxxxx', - 'dingtalk_msgtype': 'single_action_card', - 'dingtalk_single_title': 'elastalert', - 'dingtalk_single_url': 'http://xxxxx2', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_channel_override': 'test channel', 'alert': [], - 'alert_subject': 'Test DingTalk' + 'alert_subject': 'Test Mattermost' } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = DingTalkAlerter(rule) + alert = MattermostAlerter(rule) match = { '@timestamp': '2021-01-01T00:00:00', 'somefield': 'foobarbaz' @@ -5427,55 +6130,47 @@ def test_dingtalk_single_action_card(): alert.alert([match]) expected_data = { - 'msgtype': 'actionCard', - 'actionCard': { - 'title': 'Test DingTalk', - 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - 'singleTitle': rule['dingtalk_single_title'], - 'singleURL': rule['dingtalk_single_url'] - } + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], + 'username': 'elastalert', + 'channel': 'test channel' } mock_post_request.assert_called_once_with( - 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + rule['mattermost_webhook_url'], data=mock.ANY, - headers={ - 'Content-Type': 'application/json', - 'Accept': 'application/json;charset=utf-8' - }, - proxies=None, - auth=None + headers={'content-type': 'application/json'}, + verify=True, + proxies=None ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data -def test_dingtalk_action_card(): +def test_mattermost_ignore_ssl_errors(): rule = { - 'name': 'Test DingTalk Rule', + 'name': 'Test Mattermost Rule', 'type': 'any', - 'dingtalk_access_token': 'xxxxxxx', - 'dingtalk_msgtype': 'action_card', - 'dingtalk_single_title': 'elastalert', - 'dingtalk_single_url': 'http://xxxxx2', - 'dingtalk_btn_orientation': '1', - 'dingtalk_btns': [ - { - 'title': 'test1', - 'actionURL': 'https://xxxxx0/' - }, - { - 'title': 'test2', - 'actionURL': 'https://xxxxx1/' - } - ], + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_ignore_ssl_errors': True, 'alert': [], - 'alert_subject': 'Test DingTalk' + 'alert_subject': 'Test Mattermost' } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = DingTalkAlerter(rule) + alert = MattermostAlerter(rule) match = { '@timestamp': '2021-01-01T00:00:00', 'somefield': 'foobarbaz' @@ -5484,58 +6179,46 @@ def test_dingtalk_action_card(): alert.alert([match]) expected_data = { - 'msgtype': 'actionCard', - 'actionCard': { - 'title': 'Test DingTalk', - 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - 'btnOrientation': rule['dingtalk_btn_orientation'], - 'btns': rule['dingtalk_btns'] - } + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], + 'username': 'elastalert' } mock_post_request.assert_called_once_with( - 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + rule['mattermost_webhook_url'], data=mock.ANY, - headers={ - 'Content-Type': 'application/json', - 'Accept': 'application/json;charset=utf-8' - }, - proxies=None, - auth=None + headers={'content-type': 'application/json'}, + verify=False, + proxies=None ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data -def test_dingtalk_proxy(): +def test_mattermost_title_link(): rule = { - 'name': 'Test DingTalk Rule', + 'name': 'Test Mattermost Rule', 'type': 'any', - 'dingtalk_access_token': 'xxxxxxx', - 'dingtalk_msgtype': 'action_card', - 'dingtalk_single_title': 'elastalert', - 'dingtalk_single_url': 'http://xxxxx2', - 'dingtalk_btn_orientation': '1', - 'dingtalk_btns': [ - { - 'title': 'test1', - 'actionURL': 'https://xxxxx0/' - }, - { - 'title': 'test2', - 'actionURL': 'https://xxxxx1/' - } - ], - 'dingtalk_proxy': 'http://proxy.url', - 'dingtalk_proxy_login': 'admin', - 'dingtalk_proxy_pass': 'password', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_title_link': 'http://title.url', 'alert': [], - 'alert_subject': 'Test DingTalk' + 'alert_subject': 'Test Mattermost' } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = DingTalkAlerter(rule) + alert = MattermostAlerter(rule) match = { '@timestamp': '2021-01-01T00:00:00', 'somefield': 'foobarbaz' @@ -5544,31 +6227,33 @@ def test_dingtalk_proxy(): alert.alert([match]) expected_data = { - 'msgtype': 'actionCard', - 'actionCard': { - 'title': 'Test DingTalk', - 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - 'btnOrientation': rule['dingtalk_btn_orientation'], - 'btns': rule['dingtalk_btns'] - } + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n', + 'title_link': 'http://title.url' + } + ], + 'username': 'elastalert' } mock_post_request.assert_called_once_with( - 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + rule['mattermost_webhook_url'], data=mock.ANY, - headers={ - 'Content-Type': 'application/json', - 'Accept': 'application/json;charset=utf-8' - }, - proxies={'https': 'http://proxy.url'}, - auth=HTTPProxyAuth('admin', 'password') + headers={'content-type': 'application/json'}, + verify=True, + proxies=None ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data -def test_mattermost_proxy(): +def test_mattermost_footer(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -5576,7 +6261,7 @@ def test_mattermost_proxy(): 'mattermost_webhook_url': 'http://xxxxx', 'mattermost_msg_pretext': 'aaaaa', 'mattermost_msg_color': 'danger', - 'mattermost_proxy': 'https://proxy.url', + 'mattermost_footer': 'Mattermost footer', 'alert': [], 'alert_subject': 'Test Mattermost' } @@ -5598,9 +6283,11 @@ def test_mattermost_proxy(): 'title': 'Test Mattermost', 'pretext': 'aaaaa', 'fields': [], - 'text': 'Test Mattermost Rule\n\n' + 'text': 'Test Mattermost Rule\n\n', + 'footer': 'Mattermost footer' } - ], 'username': 'elastalert' + ], + 'username': 'elastalert' } mock_post_request.assert_called_once_with( @@ -5608,14 +6295,14 @@ def test_mattermost_proxy(): data=mock.ANY, headers={'content-type': 'application/json'}, verify=True, - proxies={'https': 'https://proxy.url'} + proxies=None ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data -def test_mattermost_alert_text_only(): +def test_mattermost_footer_icon(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -5623,6 +6310,7 @@ def test_mattermost_alert_text_only(): 'mattermost_webhook_url': 'http://xxxxx', 'mattermost_msg_pretext': 'aaaaa', 'mattermost_msg_color': 'danger', + 'mattermost_footer_icon': 'http://icon.url', 'alert': [], 'alert_subject': 'Test Mattermost' } @@ -5644,9 +6332,11 @@ def test_mattermost_alert_text_only(): 'title': 'Test Mattermost', 'pretext': 'aaaaa', 'fields': [], - 'text': 'Test Mattermost Rule\n\n' + 'text': 'Test Mattermost Rule\n\n', + 'footer_icon': 'http://icon.url' } - ], 'username': 'elastalert' + ], + 'username': 'elastalert' } mock_post_request.assert_called_once_with( @@ -5661,14 +6351,15 @@ def test_mattermost_alert_text_only(): assert expected_data == actual_data -def test_mattermost_not_alert_text_only(): +def test_mattermost_image_url(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', - 'alert_text_type': 'exclude_fields', + 'alert_text_type': 'alert_text_only', 'mattermost_webhook_url': 'http://xxxxx', 'mattermost_msg_pretext': 'aaaaa', 'mattermost_msg_color': 'danger', + 'mattermost_image_url': 'http://image.url', 'alert': [], 'alert_subject': 'Test Mattermost' } @@ -5689,10 +6380,11 @@ def test_mattermost_not_alert_text_only(): 'color': 'danger', 'title': 'Test Mattermost', 'pretext': 'aaaaa', - 'fields': [] + 'fields': [], + 'text': 'Test Mattermost Rule\n\n', + 'image_url': 'http://image.url' } ], - 'text': 'Test Mattermost Rule\n\n', 'username': 'elastalert' } @@ -5708,7 +6400,7 @@ def test_mattermost_not_alert_text_only(): assert expected_data == actual_data -def test_mattermost_msg_fields(): +def test_mattermost_thumb_url(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -5716,19 +6408,7 @@ def test_mattermost_msg_fields(): 'mattermost_webhook_url': 'http://xxxxx', 'mattermost_msg_pretext': 'aaaaa', 'mattermost_msg_color': 'danger', - 'mattermost_msg_fields': [ - { - 'title': 'Stack', - 'value': "{0} {1}", - 'short': False, - 'args': ["type", "msg.status_code"] - }, - { - 'title': 'Name', - 'value': 'static field', - 'short': False - } - ], + 'mattermost_thumb_url': 'http://thumb.url', 'alert': [], 'alert_subject': 'Test Mattermost' } @@ -5749,13 +6429,12 @@ def test_mattermost_msg_fields(): 'color': 'danger', 'title': 'Test Mattermost', 'pretext': 'aaaaa', - 'fields': [ - {'title': 'Stack', 'value': ' ', 'short': False}, - {'title': 'Name', 'value': 'static field', 'short': False} - ], - 'text': 'Test Mattermost Rule\n\n' + 'fields': [], + 'text': 'Test Mattermost Rule\n\n', + 'thumb_url': 'http://thumb.url' } - ], 'username': 'elastalert' + ], + 'username': 'elastalert' } mock_post_request.assert_called_once_with( @@ -5770,7 +6449,7 @@ def test_mattermost_msg_fields(): assert expected_data == actual_data -def test_mattermost_icon_url_override(): +def test_mattermost_author_name(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -5778,7 +6457,7 @@ def test_mattermost_icon_url_override(): 'mattermost_webhook_url': 'http://xxxxx', 'mattermost_msg_pretext': 'aaaaa', 'mattermost_msg_color': 'danger', - 'mattermost_icon_url_override': 'http://xxxx/icon.png', + 'mattermost_author_name': 'author name', 'alert': [], 'alert_subject': 'Test Mattermost' } @@ -5800,11 +6479,11 @@ def test_mattermost_icon_url_override(): 'title': 'Test Mattermost', 'pretext': 'aaaaa', 'fields': [], - 'text': 'Test Mattermost Rule\n\n' + 'text': 'Test Mattermost Rule\n\n', + 'author_name': 'author name' } ], - 'username': 'elastalert', - 'icon_url': 'http://xxxx/icon.png' + 'username': 'elastalert' } mock_post_request.assert_called_once_with( @@ -5819,7 +6498,7 @@ def test_mattermost_icon_url_override(): assert expected_data == actual_data -def test_mattermost_channel_override(): +def test_mattermost_author_link(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -5827,7 +6506,7 @@ def test_mattermost_channel_override(): 'mattermost_webhook_url': 'http://xxxxx', 'mattermost_msg_pretext': 'aaaaa', 'mattermost_msg_color': 'danger', - 'mattermost_channel_override': 'test channel', + 'mattermost_author_link': 'http://author.link.url', 'alert': [], 'alert_subject': 'Test Mattermost' } @@ -5849,11 +6528,11 @@ def test_mattermost_channel_override(): 'title': 'Test Mattermost', 'pretext': 'aaaaa', 'fields': [], - 'text': 'Test Mattermost Rule\n\n' + 'text': 'Test Mattermost Rule\n\n', + 'author_link': 'http://author.link.url' } ], - 'username': 'elastalert', - 'channel': 'test channel' + 'username': 'elastalert' } mock_post_request.assert_called_once_with( @@ -5868,7 +6547,7 @@ def test_mattermost_channel_override(): assert expected_data == actual_data -def test_mattermost_ignore_ssl_errors(): +def test_mattermost_author_icon(): rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -5876,7 +6555,7 @@ def test_mattermost_ignore_ssl_errors(): 'mattermost_webhook_url': 'http://xxxxx', 'mattermost_msg_pretext': 'aaaaa', 'mattermost_msg_color': 'danger', - 'mattermost_ignore_ssl_errors': True, + 'mattermost_author_icon': 'http://author.icon.url', 'alert': [], 'alert_subject': 'Test Mattermost' } @@ -5898,7 +6577,8 @@ def test_mattermost_ignore_ssl_errors(): 'title': 'Test Mattermost', 'pretext': 'aaaaa', 'fields': [], - 'text': 'Test Mattermost Rule\n\n' + 'text': 'Test Mattermost Rule\n\n', + 'author_icon': 'http://author.icon.url' } ], 'username': 'elastalert' @@ -5908,7 +6588,7 @@ def test_mattermost_ignore_ssl_errors(): rule['mattermost_webhook_url'], data=mock.ANY, headers={'content-type': 'application/json'}, - verify=False, + verify=True, proxies=None ) From 702339b86181e8c17c1933dc1537d7fc9451d1e4 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sun, 9 May 2021 14:43:48 +0100 Subject: [PATCH 0201/1065] Add a couple of changelog entries --- CHANGELOG.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c67b00740..2902b865e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,8 @@ ## New features - Added support for alerting via Amazon Simple Email System (SES) - [#105](https://github.com/jertel/elastalert2/pull/105) - @nsano-rururu +- Support a footer in alert text - [#133](https://github.com/jertel/elastalert2/pull/133) - @nsano-rururu +- Support extra message features for Slack and Mattermost - [#140](https://github.com/jertel/elastalert2/pull/140) - @nsano-rururu ## Other changes - Fix issue with testing alerts that contain Jinja templates - [#101](https://github.com/jertel/elastalert2/pull/101) - @jertel @@ -51,4 +53,4 @@ - Now publishing container images to both DockerHub and to GitHub Packages for redundancy. - Container images are now built and published via GitHub actions instead of relying on DockerHub's automated builds. - Update PIP library description and Helm chart description to be consistent. -- Continue updates to change references from _ElastAlert_ to _ElastAlert 2_ \ No newline at end of file +- Continue updates to change references from _ElastAlert_ to _ElastAlert 2_ From 57de2a589d2acf35f4ffd3cc54261779e472bab8 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sun, 9 May 2021 14:59:53 +0100 Subject: [PATCH 0202/1065] Refactor TheHive alerter --- docs/source/ruletypes.rst | 57 ++++++++++---- elastalert/alerts.py | 162 +++++++++++++++++++++++--------------- 2 files changed, 140 insertions(+), 79 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 536c38066..9e7785fe2 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2578,21 +2578,40 @@ Example usage:: TheHive ~~~~~~~ -theHive alert type will send JSON request to theHive (Security Incident Response Platform) with TheHive4py API. Sent request will be stored like Hive Alert with description and observables. +TheHive alerter can be used to create a new alert in TheHive. The alerter supports adding tags, +custom fields, and observables from the alert matches and rule data. Required: -``hive_connection``: The connection details as key:values. Required keys are ``hive_host``, ``hive_port`` and ``hive_apikey``. +``hive_connection``: The connection details to your instance (see example below for the required syntax). +Only ``hive_apikey`` is required, ``hive_host`` and ``hive_port`` default to ``http://localhost`` and +``9000`` respectively. -``hive_alert_config``: Configuration options for the alert. +``hive_alert_config``: Configuration options for the alert, see example below for structure. + +If not supplied, the alert title and description will be populated from the ElastAlert default +``title`` and ``alert_text`` fields, including any defined ``alert_text_args``. Optional: -``hive_proxies``: Proxy configuration. +``tags`` can be populated from the matched record, using the same syntax used in ``alert_text_args``. +If a record doesn't contain the specified value, the rule itself will be examined for the tag. If +this doesn't contain the tag either, the tag is attached without modification to the alert. For +aggregated alerts, all matches are examined individually, and tags generated for each one. All tags +are then attached to the same alert. -``hive_verify``: Wether or not to enable SSL certificate validation. Defaults to False. +``customFields`` can also be populated from rule fields as well as matched results. Custom fields +are only populated once. If an alert is an aggregated alert, the custom field values will be populated +using the first matched record, before checking the rule. If neither matches, the ``customField.value`` +will be used directly. -``hive_observable_data_mapping``: If needed, matched data fields can be mapped to TheHive observable types using python string formatting. +``hive_observable_data_mapping``: If needed, matched data fields can be mapped to TheHive +observable types using the same syntax as ``tags``, described above. The algorithm used to populate +the observable value is also the same, including the behaviour for aggregated alerts. + +``hive_proxies``: Proxy configuration. + +``hive_verify``: Whether or not to enable SSL certificate validation. Defaults to False. Example usage:: @@ -2607,20 +2626,24 @@ Example usage:: https: '' hive_alert_config: - title: 'Title' ## This will default to {rule[index]_rule[name]} if not provided - type: 'external' - source: 'elastalert' - description: '{match[field1]} {rule[name]} Sample description' + customFields: + - name: example + type: string + value: example + follow: True severity: 2 - tags: ['tag1', 'tag2 {rule[name]}'] - tlp: 3 status: 'New' - follow: True + source: 'elastalert' + description: 'Sample description' + tags: ['tag1', 'tag2'] + title: 'Title' + tlp: 3 + type: 'external' hive_observable_data_mapping: - - domain: "{match[field1]}_{rule[name]}" - - domain: "{match[field]}" - - ip: "{match[ip_field]}" + - domain: agent.hostname + - domain: response.domain + - ip: client.ip Twilio ~~~~~~ @@ -2684,4 +2707,4 @@ Example usage:: zbx_sender_host: "zabbix-server" zbx_sender_port: 10051 zbx_host: "test001" - zbx_key: "sender_load1" \ No newline at end of file + zbx_key: "sender_load1" diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 0237c0318..14a25d736 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2042,74 +2042,112 @@ class HiveAlerter(Alerter): """ Use matched data to create alerts containing observables in an instance of TheHive """ - required_options = set(['hive_connection', 'hive_alert_config']) - def alert(self, matches): + def lookup_field(self, match: dict, field_name: str, default): + """Populates a field with values depending on the contents of the Elastalert match + provided to it. - connection_details = self.rule['hive_connection'] + Uses a similar algorithm to that implemented to populate the `alert_text_args`. + First checks any fields found in the match provided, then any fields defined in + the rule, finally returning the default value provided if no value can be found. + """ + field_value = lookup_es_key(match, field_name) + if field_value is None: + field_value = self.rule.get(field_name, default) + + return field_value + + # Iterate through the matches, building up a list of observables + def load_observable_artifacts(self, match: dict): + artifacts = [] + for mapping in self.rule.get('hive_observable_data_mapping', []): + for observable_type, mapping_key in mapping.items(): + data = self.lookup_field(match, mapping_key, '') + artifact = {'tlp': 2, + 'tags': [], + 'message': None, + 'dataType': observable_type, + 'data': data} + artifacts.append(artifact) + + return artifacts + + def load_custom_fields(self, custom_fields_raw: list, match: dict): + custom_fields = {} + position = 0 + + for field in custom_fields_raw: + if (isinstance(field['value'], str)): + value = self.lookup_field(match, field['value'], field['value']) + else: + value = field['value'] + + custom_fields[field['name']] = {'order': position, field['type']: value} + position += 1 + + return custom_fields + + def load_tags(self, tag_names: list, match: dict): + tag_values = set() + for tag in tag_names: + tag_value = self.lookup_field(match, tag, tag) + if isinstance(tag_value, list): + for sub_tag in tag_value: + tag_values.add(sub_tag) + else: + tag_values.add(tag_value) + + return tag_values + def alert(self, matches): + # Build TheHive alert object, starting with some defaults, updating with any + # user-specified config + alert_config = { + 'artifacts': [], + 'customFields': {}, + 'date': int(time.time()) * 1000, + 'description': self.create_alert_body(matches), + 'sourceRef': str(uuid.uuid4())[0:6], + 'tags': [], + 'title': self.create_title(matches), + } + alert_config.update(self.rule.get('hive_alert_config', {})) + + # Iterate through each match found, populating the alert tags and observables as required + tags = set() for match in matches: - context = {'rule': self.rule, 'match': match} + alert_config['artifacts'].append(self.load_observable_artifacts(match)) + tags.update(self.load_tags(alert_config['tags'], match)) - artifacts = [] - for mapping in self.rule.get('hive_observable_data_mapping', []): - for observable_type, match_data_key in mapping.items(): - try: - match_data_keys = re.findall(r'\{match\[([^\]]*)\]', match_data_key) - rule_data_keys = re.findall(r'\{rule\[([^\]]*)\]', match_data_key) - data_keys = match_data_keys + rule_data_keys - context_keys = list(context['match'].keys()) + list(context['rule'].keys()) - if all([True if k in context_keys else False for k in data_keys]): - artifact = {'tlp': 2, 'tags': [], 'message': None, 'dataType': observable_type, - 'data': match_data_key.format(**context)} - artifacts.append(artifact) - except KeyError: - raise KeyError('\nformat string\n{}\nmatch data\n{}'.format(match_data_key, context)) - - alert_config = { - 'artifacts': artifacts, - 'caseTemplate': None, - 'customFields': {}, - 'date': int(time.time()) * 1000, - 'description': self.create_alert_body(matches), - 'sourceRef': str(uuid.uuid4())[0:6], - 'title': '{rule[index]}_{rule[name]}'.format(**context), - } - alert_config.update(self.rule.get('hive_alert_config', {})) - custom_fields = {} - for alert_config_field, alert_config_value in alert_config.items(): - if alert_config_field == 'customFields': - n = 0 - for cf_key, cf_value in alert_config_value.items(): - cf = {'order': n, cf_value['type']: cf_value['value'].format(**context)} - n += 1 - custom_fields[cf_key] = cf - elif isinstance(alert_config_value, str): - alert_value = alert_config_value.format(**context) - if alert_config_field in ['severity', 'tlp']: - alert_value = int(alert_value) - alert_config[alert_config_field] = alert_value - elif isinstance(alert_config_value, (list, tuple)): - formatted_list = [] - for element in alert_config_value: - try: - formatted_list.append(element.format(**context)) - except (AttributeError, KeyError, IndexError): - formatted_list.append(element) - alert_config[alert_config_field] = formatted_list - if custom_fields: - alert_config['customFields'] = custom_fields - - alert_body = json.dumps(alert_config, indent=4, sort_keys=True) - req = '{}:{}/api/alert'.format(connection_details['hive_host'], connection_details['hive_port']) - headers = {'Content-Type': 'application/json', 'Authorization': 'Bearer {}'.format(connection_details.get('hive_apikey', ''))} - proxies = connection_details.get('hive_proxies', {'http': '', 'https': ''}) - verify = connection_details.get('hive_verify', False) - response = requests.post(req, headers=headers, data=alert_body, proxies=proxies, verify=verify) - - if response.status_code != 201: - raise Exception('alert not successfully created in TheHive\n{}'.format(response.text)) + alert_config['tags'] = list(tags) + + # Populate the customFields + alert_config['customFields'] = self.load_custom_fields(alert_config['customFields'], + matches[0]) + + # POST the alert to TheHive + connection_details = self.rule['hive_connection'] + + api_key = connection_details.get('hive_apikey', '') + hive_host = connection_details.get('hive_host', 'http://localhost') + hive_port = connection_details.get('hive_port', 9000) + proxies = connection_details.get('hive_proxies', {'http': '', 'https': ''}) + verify = connection_details.get('hive_verify', False) + + alert_body = json.dumps(alert_config, indent=4, sort_keys=True) + req = f'{hive_host}:{hive_port}/api/alert' + headers = {'Content-Type': 'application/json', + 'Authorization': f'Bearer {api_key}'} + + response = requests.post(req, + headers=headers, + data=alert_body, + proxies=proxies, + verify=verify) + + if response.status_code != 201: + raise Exception(f'Unable to create TheHive alert: {response.text}') def get_info(self): From 321c8d543f7b696a6bf5aaf3882bac05a9b3d039 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sun, 9 May 2021 15:48:45 +0100 Subject: [PATCH 0203/1065] Fix artifact loading --- elastalert/alerts.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 14a25d736..cea0b7b2a 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2116,10 +2116,12 @@ def alert(self, matches): # Iterate through each match found, populating the alert tags and observables as required tags = set() + artifacts = [] for match in matches: - alert_config['artifacts'].append(self.load_observable_artifacts(match)) + artifacts = artifacts + self.load_observable_artifacts(match) tags.update(self.load_tags(alert_config['tags'], match)) + alert_config['artifacts'] = artifacts alert_config['tags'] = list(tags) # Populate the customFields From 178486e58c4cea47a084400c059ac274d93d580d Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sun, 9 May 2021 15:50:53 +0100 Subject: [PATCH 0204/1065] Remove unusued dependency --- elastalert/alerts.py | 1 - 1 file changed, 1 deletion(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index cea0b7b2a..0b3736c42 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -3,7 +3,6 @@ import datetime import json import os -import re import subprocess import sys import time From 9ca0b583853a7bbaf4669027e487a33f8ce68f64 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Mon, 10 May 2021 00:51:13 +0900 Subject: [PATCH 0205/1065] Modify ruletypes.rst, alerts.py, schema.yaml and add / modify test code for alerts_test.py ruletypes.rst Command Added description of fail_on_non_zero_exit EMail Partially modified the description of email_format OpsGenie Partial correction of description Zabbix Partial correction of description alerts.py EmailAlerter Set default value for smtp_port HTTPPostAlerter Added mandatory check for http_post_url DiscordAlerter Add comment schema.yaml Add items that have not been added. Change the order of alerter settings to the order of documents. alerts_test.py add test_ms_teams_ea_exception test_slack_ea_exception test_http_alerter_post_ea_exception test_pagerduty_ea_exception test_alerta_ea_exception test_datadog_alerterea_exception test_pagertree_ea_exception test_line_notify_ea_exception test_gitter_ea_exception test_chatwork_ea_exception test_telegram_ea_exception test_service_now_ea_exception test_victor_ops_ea_exception test_google_chat_ea_exception test_discord_ea_exception test_dingtalk_ea_exception test_mattermost_ea_exception modify test_email test_email_with_unicode_strings test_email_with_auth test_email_with_cert_key test_email_with_cc test_email_with_bcc test_email_with_cc_and_bcc test_email_with_args test_command --- docs/source/ruletypes.rst | 17 +- elastalert/alerts.py | 5 +- elastalert/schema.yaml | 305 ++++++++++++++++------- tests/alerts_test.py | 495 +++++++++++++++++++++++++++++++++++++- 4 files changed, 719 insertions(+), 103 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 536c38066..35c0a7391 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1720,6 +1720,8 @@ until the command exits or sends an EOF to stdout. ``pipe_alert_text``: If true, the standard alert body text will be passed to stdin of the command. Note that this will cause ElastAlert to block until the command exits or sends an EOF to stdout. It cannot be used at the same time as ``pipe_match_json``. +``fail_on_non_zero_exit``: By default this is ``False``. Allows monitoring of when commands fail to run. When a command returns a non-zero exit status, the alert raises an exception. + Example usage using old-style format:: alert: @@ -1905,7 +1907,7 @@ by the smtp server. ``bcc``: This adds the BCC emails to the list of recipients but does not show up in the email message. By default, this is left empty. -``email_format``: If set to ``html``, the email's MIME type will be set to HTML, and HTML content should correctly render. If you use this, +``email_format``: If set to 'html', the email's MIME type will be set to HTML, and HTML content should correctly render. If you use this, you need to put your own HTML into ``alert_text`` and use ``alert_text_type: alert_text_jinja`` Or ``alert_text_type: alert_text_only``. Exotel @@ -2242,13 +2244,21 @@ The OpsGenie alert requires one option: Optional: ``opsgenie_account``: The OpsGenie account to integrate with. + ``opsgenie_addr``: The OpsGenie URL to to connect against, default is ``https://api.opsgenie.com/v2/alerts`` + ``opsgenie_recipients``: A list OpsGenie recipients who will be notified by the alert. + ``opsgenie_recipients_args``: Map of arguments used to format opsgenie_recipients. + ``opsgenie_default_receipients``: List of default recipients to notify when the formatting of opsgenie_recipients is unsuccesful. + ``opsgenie_teams``: A list of OpsGenie teams to notify (useful for schedules with escalation). -``opsgenie_teams_args``: Map of arguments used to format opsgenie_teams (useful for assigning the alerts to teams based on some data) + +``opsgenie_teams_args``: Map of arguments used to format opsgenie_teams (useful for assigning the alerts to teams based on some data). + ``opsgenie_default_teams``: List of default teams to notify when the formatting of opsgenie_teams is unsuccesful. + ``opsgenie_tags``: A list of tags for this alert. ``opsgenie_message``: Set the OpsGenie message to something other than the rule name. The message can be formatted with fields from the first match e.g. "Error occurred for {app_name} at {timestamp}.". @@ -2673,8 +2683,11 @@ Zabbix will send notification to a Zabbix server. The item in the host specified Required: ``zbx_sender_host``: The address where zabbix server is running. + ``zbx_sender_port``: The port where zabbix server is listenning. + ``zbx_host``: This field setup the host in zabbix that receives the value sent by ElastAlert 2. + ``zbx_key``: This field setup the key in the host that receives the value sent by ElastAlert 2. Example usage:: diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 0237c0318..96a92d8dc 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -416,7 +416,7 @@ def __init__(self, *args): self.smtp_host = self.rule.get('smtp_host', 'localhost') self.smtp_ssl = self.rule.get('smtp_ssl', False) self.from_addr = self.rule.get('from_addr', 'ElastAlert') - self.smtp_port = self.rule.get('smtp_port') + self.smtp_port = self.rule.get('smtp_port', 25) if self.rule.get('smtp_auth_file'): self.get_account(self.rule['smtp_auth_file']) self.smtp_key_file = self.rule.get('smtp_key_file') @@ -1960,6 +1960,7 @@ def get_json_payload(self, match): class HTTPPostAlerter(Alerter): """ Requested elasticsearch indices are sent by HTTP POST. Encoded with JSON. """ + required_options = frozenset(['http_post_url']) def __init__(self, rule): super(HTTPPostAlerter, self).__init__(rule) @@ -2120,7 +2121,7 @@ def get_info(self): class DiscordAlerter(Alerter): - + """ Created a Discord for each alert """ required_options = frozenset(['discord_webhook_url']) def __init__(self, rule): diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index e34cff896..a451be3f1 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -240,66 +240,158 @@ properties: timestamp_field: {type: string} field: {} - ### Commands + ### Simple + simple_webhook_url: *arrayOfString + simple_proxy: {type: string} + + ### Alerta + alerta_api_url: {type: string} + alerta_api_key: {type: string} + alerta_use_qk_as_resource: {type: boolean} + alerta_use_match_timestamp: {type: boolean} + alerta_api_skip_ssl: {type: boolean} + alerta_severity: {type: string} + alerta_timeout: {type: integer} + alerta_type: {type: string} + alerta_resource: {type: string} # Python format string + alerta_service: {type: array, items: {type: string}} # Python format string + alerta_origin: {type: string} # Python format string + alerta_environment: {type: string} # Python format string + alerta_group: {type: string} # Python format string + alerta_correlate: {type: array, items: {type: string}} # Python format string + alerta_tags: {type: array, items: {type: string}} # Python format string + alerta_event: {type: string} # Python format stringalerta_use_match_timestamp + alerta_text: {type: string} # Python format string + alerta_value: {type: string} # Python format string + alerta_attributes_keys: {type: array, items: {type: string}} + alerta_attributes_values: {type: array, items: {type: string}} # Python format string + + ### AWS SES + ses_email: *arrayOfString + ses_from_addr: {type: string} + ses_aws_access_key: {type: string} + ses_aws_secret_key: {type: string} + ses_aws_region: {type: string} + ses_aws_profile: {type: string} + ses_email_reply_to: {type: string} + ses_cc: *arrayOfString + ses_bcc: *arrayOfString + + ### AWS SNS + sns_topic_arn: {type: string} + sns_aws_access_key_id: {type: string} + sns_aws_secret_access_key: {type: string} + sns_aws_region: {type: string} + sns_aws_profile: {type: string} + + ### Chatwork + chatwork_apikey: {type: string} + chatwork_room_id: {type: string} + + ### Command command: *arrayOfString pipe_match_json: {type: boolean} + pipe_alert_text: {type: boolean} fail_on_non_zero_exit: {type: boolean} + ### Datadog + datadog_api_key: {type: string} + datadog_app_key: {type: string} + + ### Dingtalk + dingtalk_access_token: {type: string} + dingtalk_msgtype: {type: string, enum: ['text', 'markdown', 'single_action_card', 'action_card']} + dingtalk_single_title: {type: string} + dingtalk_single_url: {type: string} + dingtalk_btn_orientation: {type: string} + + ## Discord + discord_webhook_url: {type: string} + discord_emoji_title: {type: string} + discord_proxy: {type: string} + discord_proxy_login: {type: string} + discord_proxy_password: {type: string} + discord_embed_color: {type: integer} + discord_embed_footer: {type: string} + discord_embed_icon_url: {type: string} + ### Email email: *arrayOfString - email_reply_to: {type: string} - notify_email: *arrayOfString # if rule is slow or erroring, send to this email + email_from_field: {type: string} + email_add_domain: {type: string} smtp_host: {type: string} + smtp_port: {type: integer} + smtp_ssl: {type: boolean} + smtp_auth_file: {type: string} + smtp_cert_file: {type: string} + smtp_key_file: {type: string} + email_reply_to: {type: string} from_addr: {type: string} + cc: *arrayOfString + bcc: *arrayOfString + email_format: {type: string} + notify_email: *arrayOfString # if rule is slow or erroring, send to this email + + ### Exotel + exotel_account_sid: {type: string} + exotel_auth_token: {type: string} + exotel_to_number: {type: string} + exotel_from_number: {type: string} + + ### Gitter + gitter_webhook_url: {type: string} + gitter_msg_level: {enum: [info, error]} + gitter_proxy: {type: string} + + ### GoogleChat + googlechat_webhook_url: {type: string} + googlechat_format: {type: string, enum: ['basic', 'card']} + googlechat_header_title: {type: string} + googlechat_header_subtitle: {type: string} + googlechat_header_image: {type: string} + googlechat_footer_kibanalink: {type: string} + + ### HTTP POST + http_post_url: {type: string} + http_post_proxy: {type: string} + http_post_ca_certs: {type: boolean} + http_post_ignore_ssl_errors: {type: boolean} + http_post_timeout: {type: integer} ### JIRA jira_server: {type: string} jira_project: {type: string} jira_issuetype: {type: string} jira_account_file: {type: string} # a Yaml file that includes the keys {user:, password:} - jira_assignee: {type: string} jira_component: *arrayOfString jira_components: *arrayOfString + jira_description: {type: string} jira_label: *arrayOfString jira_labels: *arrayOfString + jira_priority: {type: number} + jira_watchers: *arrayOfString jira_bump_tickets: {type: boolean} - jira_bump_in_statuses: *arrayOfString - jira_bump_not_in_statuses: *arrayOfString + jira_ignore_in_title: {type: string} jira_max_age: {type: number} - jira_watchers: *arrayOfString + jira_bump_not_in_statuses: *arrayOfString + jira_bump_in_statuses: *arrayOfString + jira_bump_only: {type: boolean} + jira_transition_to: {type: boolean} + jira_bump_after_inactivity: {type: number} - ### Slack - slack_webhook_url: *arrayOfString - slack_username_override: {type: string} - slack_emoji_override: {type: string} - slack_icon_url_override: {type: string} - slack_msg_color: {enum: [good, warning, danger]} - slack_parse_override: {enum: [none, full]} - slack_text_string: {type: string} - slack_ignore_ssl_errors: {type: boolean} - slack_ca_certs: {type: string} - slack_attach_kibana_discover_url: {type: boolean} - slack_kibana_discover_color: {type: string} - slack_kibana_discover_title: {type: string} - slack_footer: {type: string} - slack_footer_icon: {type: string} - slack_image_url: {type: string} - slack_thumb_url: {type: string} - slack_author_name: {type: string} - slack_author_link: {type: string} - slack_author_icon: {type: string} - slack_msg_pretext: {type: string} + ### Line Notify + linenotify_access_token: {type: string} ### Mattermost mattermost_webhook_url: *arrayOfString mattermost_proxy: {type: string} mattermost_ignore_ssl_errors: {type: boolean} mattermost_username_override: {type: string} - mattermost_icon_url_override: {type: string} mattermost_channel_override: {type: string} - mattermost_msg_color: {enum: [good, warning, danger]} + mattermost_icon_url_override: {type: string} mattermost_msg_pretext: {type: string} + mattermost_msg_color: {enum: [good, warning, danger]} mattermost_msg_fields: *mattermostField mattermost_title_link: {type: string} mattermost_footer: {type: string} @@ -310,7 +402,22 @@ properties: mattermost_author_link: {type: string} mattermost_author_icon: {type: string} - ## Opsgenie + ### Microsoft Teams + ms_teams_webhook_url: {type: string} + ms_teams_alert_summary: {type: string} + ms_teams_theme_color: {type: string} + ms_teams_proxy: {type: string} + ms_teams_alert_fixed_width: {type: boolean} + + ### Opsgenie + opsgenie_key: {type: string} + opsgenie_account: {type: string} + opsgenie_addr: {type: string} + opsgenie_message: {type: string} + opsgenie_alias: {type: string} + opsgenie_subject: {type: string} + opsgenie_priority: {type: string} + opsgenie_proxy: {type: string} opsgenie_details: type: object minProperties: 1 @@ -328,83 +435,99 @@ properties: pagerduty_service_key: {type: string} pagerduty_client_name: {type: string} pagerduty_event_type: {enum: [none, trigger, resolve, acknowledge]} - -### PagerTree + pagerduty_incident_key: {type: string} + pagerduty_incident_key_args: {type: array, items: {type: string}} + pagerduty_proxy: {type: string} + pagerduty_api_version: {type: string, enum: ['v1', 'v2']} + pagerduty_v2_payload_class: {type: string} + pagerduty_v2_payload_class_args: {type: array, items: {type: string}} + pagerduty_v2_payload_component: {type: string} + pagerduty_v2_payload_component_args: {type: array, items: {type: string}} + pagerduty_v2_payload_group: {type: string} + pagerduty_v2_payload_group_args: {type: array, items: {type: string}} + pagerduty_v2_payload_severity: {type: string, enum: ['critical', 'error', 'warning', 'info']} + pagerduty_v2_payload_source: {type: string} + pagerduty_v2_payload_source_args: {type: array, items: {type: string}} + pagerduty_v2_payload_include_all_info: {type: boolean} + + ### PagerTree pagertree_integration_url: {type: string} + pagertree_proxy: {type: string} + + ### ServiceNow + servicenow_rest_url: {type: string} + username: {type: string} + password: {type: string} + short_description: {type: string} + comments: {type: string} + assignment_group: {type: string} + category: {type: string} + subcategory: {type: string} + cmdb_ci: {type: string} + caller_id: {type: string} + servicenow_proxy: {type: string} + ### Slack + slack_webhook_url: *arrayOfString + slack_username_override: {type: string} + slack_channel_override: {type: string} + slack_emoji_override: {type: string} + slack_icon_url_override: {type: string} + slack_msg_color: {enum: [good, warning, danger]} + slack_parse_override: {enum: [none, full]} + slack_text_string: {type: string} + slack_proxy: {type: string} + slack_ignore_ssl_errors: {type: boolean} + slack_title: {type: string} + slack_title_link: {type: string} + slack_timeout: {type: integer} + slack_attach_kibana_discover_url: {type: boolean} + slack_kibana_discover_color: {type: string} + slack_kibana_discover_title: {type: string} + slack_ca_certs: {type: boolean} + slack_footer: {type: string} + slack_footer_icon: {type: string} + slack_image_url: {type: string} + slack_thumb_url: {type: string} + slack_author_name: {type: string} + slack_author_link: {type: string} + slack_author_icon: {type: string} + slack_msg_pretext: {type: string} - ### Exotel - exotel_account_sid: {type: string} - exotel_auth_token: {type: string} - exotel_to_number: {type: string} - exotel_from_number: {type: string} - - ### Twilio - twilio_account_sid: {type: string} - twilio_auth_token: {type: string} - twilio_to_number: {type: string} - twilio_from_number: {type: string} - twilio_message_service_sid: {type: string} - twilio_use_copilot: {type: boolean} - - ### VictorOps + ### Splunk On-Call (Formerly VictorOps) victorops_api_key: {type: string} victorops_routing_key: {type: string} victorops_message_type: {enum: [INFO, WARNING, ACKNOWLEDGEMENT, CRITICAL, RECOVERY]} victorops_entity_id: {type: string} victorops_entity_display_name: {type: string} + victorops_proxy: {type: string} + + ### Stomp + stomp_hostname: {type: string} + stomp_hostport: {type: string} + stomp_login: {type: string} + stomp_password: {type: string} + stomp_ssl: {type: boolean} + stomp_destination: {type: string} ### Telegram telegram_bot_token: {type: string} telegram_room_id: {type: string} telegram_api_url: {type: string} + telegram_proxy: {type: string} + telegram_proxy_login: {type: string} + telegram_proxy_pass: {type: string} - ### Gitter - gitter_webhook_url: {type: string} - gitter_proxy: {type: string} - gitter_msg_level: {enum: [info, error]} - - ### Alerta - alerta_api_url: {type: string} - alerta_api_key: {type: string} - alerta_severity: {type: string} - alerta_resource: {type: string} # Python format string - alerta_environment: {type: string} # Python format string - alerta_origin: {type: string} # Python format string - alerta_group: {type: string} # Python format string - alerta_service: {type: array, items: {type: string}} # Python format string - alerta_correlate: {type: array, items: {type: string}} # Python format string - alerta_tags: {type: array, items: {type: string}} # Python format string - alerta_event: {type: string} # Python format string - alerta_text: {type: string} # Python format string - alerta_type: {type: string} - alerta_value: {type: string} # Python format string - alerta_attributes_keys: {type: array, items: {type: string}} - alerta_attributes_values: {type: array, items: {type: string}} # Python format string - - ### Simple - simple_webhook_url: *arrayOfString - simple_proxy: {type: string} - - ### LineNotify - linenotify_access_token: {type: string} + ### Twilio + twilio_account_sid: {type: string} + twilio_auth_token: {type: string} + twilio_to_number: {type: string} + twilio_from_number: {type: string} + twilio_message_service_sid: {type: string} + twilio_use_copilot: {type: boolean} ### Zabbix zbx_sender_host: {type: string} zbx_sender_port: {type: integer} zbx_host: {type: string} zbx_key: {type: string} - - ## Discord - discord_webhook_url: {type: string} - - ### Dingtalk - dingtalk_access_token: {type: string} - dingtalk_msgtype: {type: string} - dingtalk_single_title: {type: string} - dingtalk_single_url: {type: string} - dingtalk_btn_orientation: {type: string} - - ### Chatwork - chatwork_apikey: {type: string} - chatwork_room_id: {type: string} diff --git a/tests/alerts_test.py b/tests/alerts_test.py index dcce1293e..97c9a35fc 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -10,6 +10,7 @@ import pytest from jira.exceptions import JIRAError from requests.auth import HTTPProxyAuth +from requests.exceptions import RequestException from elastalert.alerts import AlertaAlerter from elastalert.alerts import Alerter @@ -38,6 +39,7 @@ from elastalert.alerts import VictorOpsAlerter from elastalert.util import ts_add from elastalert.util import ts_now +from elastalert.util import EAException class mock_rule: @@ -114,7 +116,7 @@ def test_email(): alert = EmailAlerter(rule) alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost'), + expected = [mock.call('localhost', 25), mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), @@ -179,7 +181,7 @@ def test_email_with_unicode_strings(): alert = EmailAlerter(rule) alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost'), + expected = [mock.call('localhost', 25), mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), @@ -207,7 +209,7 @@ def test_email_with_auth(): alert = EmailAlerter(rule) alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost'), + expected = [mock.call('localhost', 25), mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), @@ -229,7 +231,7 @@ def test_email_with_cert_key(): alert = EmailAlerter(rule) alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost'), + expected = [mock.call('localhost', 25), mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile='dummy/cert.crt', keyfile='dummy/client.key'), @@ -248,7 +250,7 @@ def test_email_with_cc(): alert = EmailAlerter(rule) alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost'), + expected = [mock.call('localhost', 25), mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), @@ -273,7 +275,7 @@ def test_email_with_bcc(): alert = EmailAlerter(rule) alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost'), + expected = [mock.call('localhost', 25), mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), @@ -298,7 +300,7 @@ def test_email_with_cc_and_bcc(): alert = EmailAlerter(rule) alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost'), + expected = [mock.call('localhost', 25), mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), @@ -343,7 +345,7 @@ def test_email_with_args(): alert = EmailAlerter(rule) alert.alert([{'test_term': 'test_value', 'test_arg1': 'testing', 'test': {'term': ':)', 'arg3': '☃'}}]) - expected = [mock.call('localhost'), + expected = [mock.call('localhost', 25), mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), @@ -1658,6 +1660,23 @@ def test_command(): assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) assert "Non-zero exit code while running command" in str(exception) + # Test OSError + try: + rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], + 'pipe_alert_text': True, 'type': mock_rule(), 'name': 'Test'} + alert = CommandAlerter(rule) + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz'} + alert_text = str(BasicMatchString(rule, match)) + mock_run = mock.MagicMock(side_effect=OSError) + with mock.patch("elastalert.alerts.subprocess.Popen", mock_run), pytest.raises(OSError) as mock_popen: + mock_subprocess = mock.Mock() + mock_popen.return_value = mock_subprocess + mock_subprocess.communicate.return_value = (None, None) + alert.alert([match]) + except EAException: + assert True + def test_ms_teams(): rule = { @@ -1770,6 +1789,30 @@ def test_ms_teams_proxy(): assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) +def test_ms_teams_ea_exception(): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'ms_teams_webhook_url': 'http://test.webhook.url', + 'ms_teams_alert_summary': 'Alert from ElastAlert', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MsTeamsAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + def test_slack_uses_custom_title(): rule = { 'name': 'Test Rule', @@ -3087,6 +3130,31 @@ def test_slack_msg_pretext(): assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) +def test_slack_ea_exception(): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_msg_pretext': 'pretext value', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + def test_http_alerter_with_payload(): rule = { 'name': 'Test HTTP Post Alerter With Payload', @@ -3359,6 +3427,30 @@ def test_http_alerter_post_ca_certs_false(): assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) +def test_http_alerter_post_ea_exception(): + try: + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_ca_certs': False, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + def test_pagerduty_alerter(): rule = { 'name': 'Test PD Rule', @@ -3930,6 +4022,36 @@ def test_pagerduty_alerter_proxy(): assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) +def test_pagerduty_ea_exception(): + try: + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'alert_subject': '{0} kittens', + 'alert_subject_args': ['somefield'], + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_event_type': 'trigger', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['someotherfield'], + 'pagerduty_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'Stinkiest', + 'someotherfield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + def test_alert_text_kw(ea): rule = ea.rules[0].copy() rule['alert_text'] = '{field} at {time}' @@ -4627,6 +4749,44 @@ def test_alerta_tags(): mock_post_request.call_args_list[0][1]['data']) +def test_alerta_ea_exception(): + try: + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alerta_tags': ['elastalert2'], + 'alert': 'alerta' + } + + match = { + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + def test_alert_subject_size_limit_no_args(): rule = { 'name': 'test_rule', @@ -4725,6 +4885,30 @@ def test_datadog_alerter(): assert expected_data == actual_data +def test_datadog_alerterea_exception(): + try: + rule = { + 'name': 'Test Datadog Event Alerter', + 'type': 'any', + 'datadog_api_key': 'test-api-key', + 'datadog_app_key': 'test-app-key', + 'alert': [], + 'alert_subject': 'Test Datadog Event Alert' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DatadogAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'name': 'datadog-test-name' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + def test_pagertree(): rule = { 'name': 'Test PagerTree Rule', @@ -4806,6 +4990,29 @@ def test_pagertree_proxy(): assert expected_data["Description"] == actual_data['Description'] +def test_pagertree_ea_exception(): + try: + rule = { + 'name': 'Test PagerTree Rule', + 'type': 'any', + 'pagertree_integration_url': 'https://api.pagertree.com/integration/xxxxx', + 'pagertree_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerTreeAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + def test_line_notify(): rule = { 'name': 'Test LineNotify Rule', @@ -4840,6 +5047,28 @@ def test_line_notify(): assert expected_data == actual_data +def test_line_notify_ea_exception(): + try: + rule = { + 'name': 'Test LineNotify Rule', + 'type': 'any', + 'linenotify_access_token': 'xxxxx', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = LineNotifyAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + def test_gitter_msg_level_default(): rule = { 'name': 'Test Gitter Rule', @@ -4978,6 +5207,30 @@ def test_gitter_proxy(): assert 'error' in actual_data['level'] +def test_gitter_ea_exception(): + try: + rule = { + 'name': 'Test Gitter Rule', + 'type': 'any', + 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', + 'gitter_msg_level': 'error', + 'gitter_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GitterAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + def test_chatwork(): rule = { 'name': 'Test Chatwork Rule', @@ -5047,6 +5300,32 @@ def test_chatwork_proxy(): assert expected_data == actual_data +def test_chatwork_ea_exception(): + try: + rule = { + 'name': 'Test Chatwork Rule', + 'type': 'any', + 'chatwork_apikey': 'xxxx1', + 'chatwork_room_id': 'xxxx2', + 'chatwork_proxy': 'http://proxy.url', + 'chatwork_proxy_login': 'admin', + 'chatwork_proxy_pass': 'password', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ChatworkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + def test_telegram(): rule = { 'name': 'Test Telegram Rule', @@ -5159,6 +5438,29 @@ def test_telegram_text_maxlength(): assert expected_data == actual_data +def test_telegram_ea_exception(): + try: + rule = { + 'name': 'Test Telegram Rule' + ('a' * 3985), + 'type': 'any', + 'telegram_bot_token': 'xxxxx1', + 'telegram_room_id': 'xxxxx2', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TelegramAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + def test_service_now(): rule = { 'name': 'Test ServiceNow Rule', @@ -5264,6 +5566,38 @@ def test_service_now_proxy(): assert expected_data == actual_data +def test_service_now_ea_exception(): + try: + rule = { + 'name': 'Test ServiceNow Rule', + 'type': 'any', + 'username': 'ServiceNow username', + 'password': 'ServiceNow password', + 'servicenow_rest_url': 'https://xxxxxxxxxx', + 'short_description': 'ServiceNow short_description', + 'comments': 'ServiceNow comments', + 'assignment_group': 'ServiceNow assignment_group', + 'category': 'ServiceNow category', + 'subcategory': 'ServiceNow subcategory', + 'cmdb_ci': 'ServiceNow cmdb_ci', + 'caller_id': 'ServiceNow caller_id', + 'servicenow_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ServiceNowAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + def test_victor_ops(): rule = { 'name': 'Test VictorOps Rule', @@ -5341,6 +5675,32 @@ def test_victor_ops_proxy(): assert expected_data == actual_data +def test_victor_ops_ea_exception(): + try: + rule = { + 'name': 'Test VictorOps Rule', + 'type': 'any', + 'victorops_api_key': 'xxxx1', + 'victorops_routing_key': 'xxxx2', + 'victorops_message_type': 'INFO', + 'victorops_entity_display_name': 'no entity display name', + 'victorops_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = VictorOpsAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + def test_google_chat_basic(): rule = { 'name': 'Test GoogleChat Rule', @@ -5438,6 +5798,33 @@ def test_google_chat_card(): assert expected_data == actual_data +def test_google_chat_ea_exception(): + try: + rule = { + 'name': 'Test GoogleChat Rule', + 'type': 'any', + 'googlechat_webhook_url': 'http://xxxxxxx', + 'googlechat_format': 'card', + 'googlechat_header_title': 'xxxx1', + 'googlechat_header_subtitle': 'xxxx2', + 'googlechat_header_image': 'http://xxxx/image.png', + 'googlechat_footer_kibanalink': 'http://xxxxx/kibana', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GoogleChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + def test_discord(): rule = { 'name': 'Test Discord Rule', @@ -5612,6 +5999,31 @@ def test_discord_description_maxlength(): assert expected_data == actual_data +def test_discord_ea_exception(): + try: + rule = { + 'name': 'Test Discord Rule' + ('a' * 2069), + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + def test_dingtalk_text(): rule = { 'name': 'Test DingTalk Rule', @@ -5856,6 +6268,46 @@ def test_dingtalk_proxy(): assert expected_data == actual_data +def test_dingtalk_ea_exception(): + try: + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'action_card', + 'dingtalk_single_title': 'elastalert', + 'dingtalk_single_url': 'http://xxxxx2', + 'dingtalk_btn_orientation': '1', + 'dingtalk_btns': [ + { + 'title': 'test1', + 'actionURL': 'https://xxxxx0/' + }, + { + 'title': 'test2', + 'actionURL': 'https://xxxxx1/' + } + ], + 'dingtalk_proxy': 'http://proxy.url', + 'dingtalk_proxy_login': 'admin', + 'dingtalk_proxy_pass': 'password', + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + def test_mattermost_proxy(): rule = { 'name': 'Test Mattermost Rule', @@ -6594,3 +7046,30 @@ def test_mattermost_author_icon(): actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data + + +def test_mattermost_ea_exception(): + try: + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_author_icon': 'http://author.icon.url', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True From be3b37eb481bb64b559d66c9f4e426d48ab40b0f Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Mon, 10 May 2021 07:58:27 +0900 Subject: [PATCH 0206/1065] Update schema.yaml and ruletypes.rst MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit schema.yaml add ・assets_dir: {type: string} ・email_image_keys: {type: array, items: {type: string}} ・email_image_values: {type: array, items: {type: string}} modify ・ms_teams_webhook_url: {type: string} → ms_teams_webhook_url: *arrayOfString ・http_post_url: {type: string}} → http_post_url: *arrayOfString ・googlechat_webhook_url: {type: string} → googlechat_webhook_url: *arrayOfString ruletypes.rst Email assets_dir email_image_keys email_image_values --- docs/source/ruletypes.rst | 12 ++++++++++++ elastalert/schema.yaml | 9 ++++++--- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 35c0a7391..1de891896 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1910,6 +1910,18 @@ by the smtp server. ``email_format``: If set to 'html', the email's MIME type will be set to HTML, and HTML content should correctly render. If you use this, you need to put your own HTML into ``alert_text`` and use ``alert_text_type: alert_text_jinja`` Or ``alert_text_type: alert_text_only``. +``assets_dir``: images dir. default to ``/tmp``. + +``email_image_keys``: mapping between images keys. + +``email_image_values``: mapping between images values + +Example assets_dir, email_image_keys, email_image_values:: + + assets_dir: "/opt/elastalert/email_images" + email_image_keys: ["img1"] + email_image_values: ["my_logo.png"] + Exotel ~~~~~~ diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index a451be3f1..0d911f70b 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -330,6 +330,9 @@ properties: cc: *arrayOfString bcc: *arrayOfString email_format: {type: string} + assets_dir: {type: string} + email_image_keys: {type: array, items: {type: string}} + email_image_values: {type: array, items: {type: string}} notify_email: *arrayOfString # if rule is slow or erroring, send to this email ### Exotel @@ -344,7 +347,7 @@ properties: gitter_proxy: {type: string} ### GoogleChat - googlechat_webhook_url: {type: string} + googlechat_webhook_url: *arrayOfString googlechat_format: {type: string, enum: ['basic', 'card']} googlechat_header_title: {type: string} googlechat_header_subtitle: {type: string} @@ -352,7 +355,7 @@ properties: googlechat_footer_kibanalink: {type: string} ### HTTP POST - http_post_url: {type: string} + http_post_url: *arrayOfString http_post_proxy: {type: string} http_post_ca_certs: {type: boolean} http_post_ignore_ssl_errors: {type: boolean} @@ -403,7 +406,7 @@ properties: mattermost_author_icon: {type: string} ### Microsoft Teams - ms_teams_webhook_url: {type: string} + ms_teams_webhook_url: *arrayOfString ms_teams_alert_summary: {type: string} ms_teams_theme_color: {type: string} ms_teams_proxy: {type: string} From 3b6e4f38d87ecaa29713463c016bf8d4a5344c59 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 10 May 2021 08:48:11 +0100 Subject: [PATCH 0207/1065] Improve response error handling --- elastalert/alerts.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 0b3736c42..5b72b46d2 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2141,14 +2141,15 @@ def alert(self, matches): headers = {'Content-Type': 'application/json', 'Authorization': f'Bearer {api_key}'} - response = requests.post(req, - headers=headers, - data=alert_body, - proxies=proxies, - verify=verify) - - if response.status_code != 201: - raise Exception(f'Unable to create TheHive alert: {response.text}') + try: + response = requests.post(req, + headers=headers, + data=alert_body, + proxies=proxies, + verify=verify) + response.raise_for_status() + except RequestException as e: + raise EAException(f"Error posting to TheHive: {e}") def get_info(self): From fc7a84fbad20f72f4fe2fcbe68e50d09b295ebca Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 10 May 2021 08:48:27 +0100 Subject: [PATCH 0208/1065] Add test for TheHive alerter --- tests/alerts_test.py | 83 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 83 insertions(+) diff --git a/tests/alerts_test.py b/tests/alerts_test.py index dcce1293e..82deb3a59 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -22,6 +22,7 @@ from elastalert.alerts import EmailAlerter from elastalert.alerts import GitterAlerter from elastalert.alerts import GoogleChatAlerter +from elastalert.alerts import HiveAlerter from elastalert.alerts import HTTPPostAlerter from elastalert.alerts import JiraAlerter from elastalert.alerts import JiraFormattedMatchString @@ -6594,3 +6595,85 @@ def test_mattermost_author_icon(): actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data + + +def test_thehive_alerter(): + rule = {'alert': [], + 'alert_text': '', + 'alert_text_type': 'alert_text_only', + 'description': 'test', + 'hive_alert_config': {'customFields': [{'name': 'test', + 'type': 'string', + 'value': 'test.ip'}], + 'follow': True, + 'severity': 2, + 'source': 'elastalert', + 'status': 'New', + 'tags': ['test.ip'], + 'tlp': 3, + 'type': 'external'}, + 'hive_connection': {'hive_apikey': '', + 'hive_host': 'https://localhost', + 'hive_port': 9000}, + 'hive_observable_data_mapping': [{'ip': 'test.ip'}], + 'name': 'test-thehive', + 'tags': ['a', 'b'], + 'type': 'any'} + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HiveAlerter(rule) + match = { + "test": { + "ip": "127.0.0.1" + }, + "@timestamp": "2021-05-09T14:43:30", + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + "artifacts": [ + { + "data": "127.0.0.1", + "dataType": "ip", + "message": None, + "tags": [], + "tlp": 2 + } + ], + "customFields": { + "test": { + "order": 0, + "string": "127.0.0.1" + } + }, + "description": "\n\n", + "follow": True, + "severity": 2, + "source": "elastalert", + "status": "New", + "tags": [ + "127.0.0.1" + ], + "title": "test-thehive", + "tlp": 3, + "type": "external" + } + + conn_config = rule['hive_connection'] + alert_url = f"{conn_config['hive_host']}:{conn_config['hive_port']}/api/alert" + mock_post_request.assert_called_once_with( + alert_url, + data=mock.ANY, + headers={'Content-Type': 'application/json', + 'Authorization': 'Bearer '}, + verify=False, + proxies={'http': '', 'https': ''} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + # The date and sourceRef are autogenerated, so we can't expect them to be a particular value + del actual_data['date'] + del actual_data['sourceRef'] + + assert expected_data == actual_data From 7d518b24be6be1734df4fa941b13541e4f55bcb0 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 10 May 2021 09:07:33 +0100 Subject: [PATCH 0209/1065] Fix linter errors --- tests/alerts_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 94f3e3e85..46589522e 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -7075,6 +7075,7 @@ def test_mattermost_ea_exception(): except EAException: assert True + def test_thehive_alerter(): rule = {'alert': [], 'alert_text': '', @@ -7155,4 +7156,3 @@ def test_thehive_alerter(): del actual_data['sourceRef'] assert expected_data == actual_data - \ No newline at end of file From 9e30ed774fa799fd1ba47f26b8ce808f75ea2712 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 10 May 2021 09:10:40 +0100 Subject: [PATCH 0210/1065] Update changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2902b865e..8691e17ec 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,8 @@ - Dockerfile now runs as a non-root user "elastalert". Ensure your volumes are accessible by this non-root user. - System packages removed from the Dockerfile: All dev packages, cargo, libmagic. Image size reduced to 250Mb. - `tmp` files and dev packages removed from the final container image. +- TheHive alerter refactoring - [#142](https://github.com/jertel/elastalert2/pull/142) - @ferozsalam + - See the updated documentation for changes required to alert formatting ## New features - Added support for alerting via Amazon Simple Email System (SES) - [#105](https://github.com/jertel/elastalert2/pull/105) - @nsano-rururu From 0b156de057d39fa0ceec192f56c62815afeb9a5c Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 10 May 2021 09:21:30 +0100 Subject: [PATCH 0211/1065] Add local testing instructions to the contributors' guidelines --- CONTRIBUTING.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9e2162c1b..f8b1e9e89 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -3,7 +3,8 @@ ## Introduction PRs are welcome, but must include tests, when possible. PRs will not be merged if they do not pass -the automated CI workflows. +the automated CI workflows. To test your changes before creating a PR, run `make test-docker` from +the root of the repository (requires Docker to be running on your machine). Before submitting the PR review that you have included the following changes, where applicable: - Documentation: If you're adding new functionality, any new configuration options should be documented appropriately in the docs/ folder. @@ -42,4 +43,4 @@ Maintainers, when creating a new release, follow the procedure below: [2]: https://github.com/jertel/elastalert2/actions/workflows/publish_image.yml [3]: https://github.com/jertel/elastalert2/actions/workflows/python-publish.yml [4]: https://github.com/jertel/elastalert2/actions/workflows/upload_chart.yml -[5]: https://github.com/jertel/elastalert2/discussions \ No newline at end of file +[5]: https://github.com/jertel/elastalert2/discussions From f488769f8c352f060ec1aae74b6ba9a9d5c45d01 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 10 May 2021 10:29:06 +0100 Subject: [PATCH 0212/1065] Updated testing instructions --- CONTRIBUTING.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f8b1e9e89..710306db5 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -3,8 +3,9 @@ ## Introduction PRs are welcome, but must include tests, when possible. PRs will not be merged if they do not pass -the automated CI workflows. To test your changes before creating a PR, run `make test-docker` from -the root of the repository (requires Docker to be running on your machine). +the automated CI workflows. To test your changes before creating a PR, run +`sudo make clean; sudo make test-docker` from the root of the repository (requires Docker to be +running on your machine). Before submitting the PR review that you have included the following changes, where applicable: - Documentation: If you're adding new functionality, any new configuration options should be documented appropriately in the docs/ folder. From a9fc048392ae5732e68f069ca4cbaffd6f9047e8 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 10 May 2021 10:51:04 +0100 Subject: [PATCH 0213/1065] Update docker-test command This removes the test container after it has run, instead of leaving it on the system --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 470062ce8..608c0bb8b 100644 --- a/Makefile +++ b/Makefile @@ -21,7 +21,7 @@ test-elasticsearch: test-docker: docker-compose --project-name elastalert build tox - docker-compose --project-name elastalert run tox + docker-compose --project-name elastalert run --rm tox clean: make -C docs clean From 6e5cd23196dc8524bb22ed461b65a0d6f23786d9 Mon Sep 17 00:00:00 2001 From: Fodor Zoltan Date: Thu, 13 May 2021 10:03:23 +0300 Subject: [PATCH 0214/1065] #110: Replace yaml loader with one that supports env value substitutions. --- CHANGELOG.md | 1 + Makefile | 5 ++++- elastalert/alerts.py | 4 ++-- elastalert/config.py | 7 ++++--- elastalert/loaders.py | 4 ++-- elastalert/yaml.py | 8 ++++++++ requirements.txt | 1 - setup.py | 1 - tests/alerts_test.py | 24 ++++++++++++------------ tests/config_test.py | 34 ++++++++++++++++++++++++++++++++++ tests/example.config.yaml | 19 +++++++++++++++++++ tests/loaders_test.py | 28 ++++++++++++++-------------- 12 files changed, 100 insertions(+), 36 deletions(-) create mode 100644 elastalert/yaml.py create mode 100644 tests/config_test.py create mode 100644 tests/example.config.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index 8691e17ec..d14b0df70 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,7 @@ - Added support for alerting via Amazon Simple Email System (SES) - [#105](https://github.com/jertel/elastalert2/pull/105) - @nsano-rururu - Support a footer in alert text - [#133](https://github.com/jertel/elastalert2/pull/133) - @nsano-rururu - Support extra message features for Slack and Mattermost - [#140](https://github.com/jertel/elastalert2/pull/140) - @nsano-rururu +- Support for environment variable substitutions in yaml config files ## Other changes - Fix issue with testing alerts that contain Jinja templates - [#101](https://github.com/jertel/elastalert2/pull/101) - @jertel diff --git a/Makefile b/Makefile index 608c0bb8b..cbe634879 100644 --- a/Makefile +++ b/Makefile @@ -21,10 +21,13 @@ test-elasticsearch: test-docker: docker-compose --project-name elastalert build tox - docker-compose --project-name elastalert run --rm tox + docker-compose --project-name elastalert run --rm tox tox -- $(filter-out $@,$(MAKECMDGOALS)) clean: make -C docs clean find . -name '*.pyc' -delete find . -name '__pycache__' -delete rm -rf virtualenv_run .tox .coverage *.egg-info build + +%: + @: diff --git a/elastalert/alerts.py b/elastalert/alerts.py index f7fd1e37f..1b508148c 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -26,7 +26,6 @@ from jira.exceptions import JIRAError from requests.auth import HTTPProxyAuth from requests.exceptions import RequestException -from staticconf.loader import yaml_loader from texttable import Texttable from twilio.base.exceptions import TwilioRestException from twilio.rest import Client as TwilioClient @@ -38,6 +37,7 @@ from .util import resolve_string from .util import ts_now from .util import ts_to_dt +from .yaml import read_yaml class DateTimeEncoder(json.JSONEncoder): @@ -312,7 +312,7 @@ def get_account(self, account_file): account_file_path = account_file else: account_file_path = os.path.join(os.path.dirname(self.rule['rule_file']), account_file) - account_conf = yaml_loader(account_file_path) + account_conf = read_yaml(account_file_path) if 'user' not in account_conf or 'password' not in account_conf: raise EAException('Account file must have user and password fields') self.user = account_conf['user'] diff --git a/elastalert/config.py b/elastalert/config.py index 1a8bbee33..28684adc9 100644 --- a/elastalert/config.py +++ b/elastalert/config.py @@ -4,13 +4,14 @@ import logging.config from envparse import Env -from staticconf.loader import yaml_loader from . import loaders from .util import EAException from .util import elastalert_logger from .util import get_module +from elastalert.yaml import read_yaml + # Required global (config.yaml) configuration options required_globals = frozenset(['run_every', 'es_host', 'es_port', 'writeback_index', 'buffer_time']) @@ -45,10 +46,10 @@ def load_conf(args, defaults=None, overwrites=None): """ filename = args.config if filename: - conf = yaml_loader(filename) + conf = read_yaml(filename) else: try: - conf = yaml_loader('config.yaml') + conf = read_yaml('config.yaml') except FileNotFoundError: raise EAException('No --config or config.yaml found') diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 67e2d9a89..6228ca051 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -11,7 +11,6 @@ from jinja2 import Template from jinja2 import Environment from jinja2 import FileSystemLoader -from staticconf.loader import yaml_loader from . import alerts from . import enhancements @@ -29,6 +28,7 @@ from .util import unix_to_dt from .util import unixms_to_dt from .zabbix import ZabbixAlerter +from .yaml import read_yaml class RulesLoader(object): @@ -538,7 +538,7 @@ def get_hashes(self, conf, use_rule=None): def get_yaml(self, filename): try: - return yaml_loader(filename) + return read_yaml(filename) except yaml.scanner.ScannerError as e: raise EAException('Could not parse file %s: %s' % (filename, e)) diff --git a/elastalert/yaml.py b/elastalert/yaml.py new file mode 100644 index 000000000..35810f102 --- /dev/null +++ b/elastalert/yaml.py @@ -0,0 +1,8 @@ +import os +import yaml + + +def read_yaml(path): + with open(path) as f: + yamlContent = os.path.expandvars(f.read()) + return yaml.load(yamlContent, Loader=yaml.FullLoader) diff --git a/requirements.txt b/requirements.txt index 878d0355a..c6ad77062 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,7 +14,6 @@ mock>=2.0.0 prison>=0.1.2 prometheus_client>=0.10.1 py-zabbix>=1.1.3 -PyStaticConfiguration>=0.10.3 python-dateutil>=2.6.0,<2.9.0 PyYAML>=5.1 requests>=2.10.0 diff --git a/setup.py b/setup.py index 5423daba6..9f6181335 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,6 @@ 'prison>=0.1.2', 'prometheus_client>=0.10.1', 'py-zabbix>=1.1.3', - 'PyStaticConfiguration>=0.10.3', 'python-dateutil>=2.6.0,<2.9.0', 'PyYAML>=5.1', 'requests>=2.10.0', diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 46589522e..1d16018e5 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -204,7 +204,7 @@ def test_email_with_auth(): 'alert_subject': 'Test alert for {0}', 'alert_subject_args': ['test_term'], 'smtp_auth_file': 'file.txt', 'rule_file': '/tmp/foo.yaml'} with mock.patch('elastalert.alerts.SMTP') as mock_smtp: - with mock.patch('elastalert.alerts.yaml_loader') as mock_open: + with mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'someone', 'password': 'hunter2'} mock_smtp.return_value = mock.Mock() alert = EmailAlerter(rule) @@ -226,7 +226,7 @@ def test_email_with_cert_key(): 'alert_subject': 'Test alert for {0}', 'alert_subject_args': ['test_term'], 'smtp_auth_file': 'file.txt', 'smtp_cert_file': 'dummy/cert.crt', 'smtp_key_file': 'dummy/client.key', 'rule_file': '/tmp/foo.yaml'} with mock.patch('elastalert.alerts.SMTP') as mock_smtp: - with mock.patch('elastalert.alerts.yaml_loader') as mock_open: + with mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'someone', 'password': 'hunter2'} mock_smtp.return_value = mock.Mock() alert = EmailAlerter(rule) @@ -1279,7 +1279,7 @@ def test_jira(): mock_priority = mock.Mock(id='5') with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value.priorities.return_value = [mock_priority] mock_jira.return_value.fields.return_value = [] @@ -1310,7 +1310,7 @@ def test_jira(): # Search called if jira_bump_tickets rule['jira_bump_tickets'] = True with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value = mock.Mock() mock_jira.return_value.search_issues.return_value = [] @@ -1326,7 +1326,7 @@ def test_jira(): # Remove a field if jira_ignore_in_title set rule['jira_ignore_in_title'] = 'test_term' with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value = mock.Mock() mock_jira.return_value.search_issues.return_value = [] @@ -1340,7 +1340,7 @@ def test_jira(): # Issue is still created if search_issues throws an exception with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value = mock.Mock() mock_jira.return_value.search_issues.side_effect = JIRAError @@ -1359,7 +1359,7 @@ def test_jira(): # Check ticket is bumped if it is updated 4 days ago mock_issue.fields.updated = str(ts_now() - datetime.timedelta(days=4)) with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value = mock.Mock() mock_jira.return_value.search_issues.return_value = [mock_issue] @@ -1375,7 +1375,7 @@ def test_jira(): # Check ticket is bumped is not bumped if ticket is updated right now mock_issue.fields.updated = str(ts_now()) with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value = mock.Mock() mock_jira.return_value.search_issues.return_value = [mock_issue] @@ -1410,7 +1410,7 @@ def test_jira(): {'name': 'affected user', 'id': 'affected_user_id', 'schema': {'type': 'string'}} ] with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value = mock.Mock() mock_jira.return_value.search_issues.return_value = [mock_issue] @@ -1483,7 +1483,7 @@ def test_jira_arbitrary_field_support(): ] with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value.priorities.return_value = [mock_priority] mock_jira.return_value.fields.return_value = mock_fields @@ -1524,7 +1524,7 @@ def test_jira_arbitrary_field_support(): rule['jira_nonexistent_field'] = 'nonexistent field value' with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value.priorities.return_value = [mock_priority] mock_jira.return_value.fields.return_value = mock_fields @@ -1540,7 +1540,7 @@ def test_jira_arbitrary_field_support(): rule['jira_watchers'] = 'invalid_watcher' with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.yaml_loader') as mock_open: + mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value.priorities.return_value = [mock_priority] mock_jira.return_value.fields.return_value = mock_fields diff --git a/tests/config_test.py b/tests/config_test.py new file mode 100644 index 000000000..1b6a16ee5 --- /dev/null +++ b/tests/config_test.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +import os +import mock +import datetime + +from elastalert.config import load_conf + + +def test_config_loads(): + os.environ['ELASTIC_PASS'] = 'password_from_env' + dir_path = os.path.dirname(os.path.realpath(__file__)) + + test_args = mock.Mock() + test_args.config = dir_path + '/example.config.yaml' + test_args.rule = None + test_args.debug = False + test_args.es_debug_trace = None + + conf = load_conf(test_args) + + assert conf['rules_folder'] == '/opt/elastalert/rules' + assert conf['run_every'] == datetime.timedelta(seconds=10) + assert conf['buffer_time'] == datetime.timedelta(minutes=15) + + assert conf['es_host'] == 'elasticsearch' + assert conf['es_port'] == 9200 + + assert conf['es_username'] == 'elastic' + assert conf['es_password'] == 'password_from_env' + + assert conf['writeback_index'] == 'elastalert_status' + assert conf['writeback_alias'] == 'elastalert_alerts' + + assert conf['alert_time_limit'] == datetime.timedelta(days=2) diff --git a/tests/example.config.yaml b/tests/example.config.yaml new file mode 100644 index 000000000..44609eb27 --- /dev/null +++ b/tests/example.config.yaml @@ -0,0 +1,19 @@ +rules_folder: /opt/elastalert/rules + +run_every: + seconds: 10 + +buffer_time: + minutes: 15 + +es_host: elasticsearch +es_port: 9200 + +es_username: elastic +es_password: $ELASTIC_PASS + +writeback_index: elastalert_status +writeback_alias: elastalert_alerts + +alert_time_limit: + days: 2 diff --git a/tests/loaders_test.py b/tests/loaders_test.py index bb8d3d873..5a5ae0006 100644 --- a/tests/loaders_test.py +++ b/tests/loaders_test.py @@ -207,9 +207,9 @@ def test_file_rules_loader_get_names(): def test_load_rules(): test_rule_copy = copy.deepcopy(test_rule) test_config_copy = copy.deepcopy(test_config) - with mock.patch('elastalert.config.yaml_loader') as mock_conf_open: + with mock.patch('elastalert.config.read_yaml') as mock_conf_open: mock_conf_open.return_value = test_config_copy - with mock.patch('elastalert.loaders.yaml_loader') as mock_rule_open: + with mock.patch('elastalert.loaders.read_yaml') as mock_rule_open: mock_rule_open.return_value = test_rule_copy with mock.patch('os.walk') as mock_ls: @@ -233,9 +233,9 @@ def test_load_default_host_port(): test_rule_copy.pop('es_host') test_rule_copy.pop('es_port') test_config_copy = copy.deepcopy(test_config) - with mock.patch('elastalert.config.yaml_loader') as mock_conf_open: + with mock.patch('elastalert.config.read_yaml') as mock_conf_open: mock_conf_open.return_value = test_config_copy - with mock.patch('elastalert.loaders.yaml_loader') as mock_rule_open: + with mock.patch('elastalert.loaders.read_yaml') as mock_rule_open: mock_rule_open.return_value = test_rule_copy with mock.patch('os.walk') as mock_ls: @@ -253,9 +253,9 @@ def test_load_ssl_env_false(): test_rule_copy.pop('es_host') test_rule_copy.pop('es_port') test_config_copy = copy.deepcopy(test_config) - with mock.patch('elastalert.config.yaml_loader') as mock_conf_open: + with mock.patch('elastalert.config.read_yaml') as mock_conf_open: mock_conf_open.return_value = test_config_copy - with mock.patch('elastalert.loaders.yaml_loader') as mock_rule_open: + with mock.patch('elastalert.loaders.read_yaml') as mock_rule_open: mock_rule_open.return_value = test_rule_copy with mock.patch('os.listdir') as mock_ls: @@ -272,9 +272,9 @@ def test_load_ssl_env_true(): test_rule_copy.pop('es_host') test_rule_copy.pop('es_port') test_config_copy = copy.deepcopy(test_config) - with mock.patch('elastalert.config.yaml_loader') as mock_conf_open: + with mock.patch('elastalert.config.read_yaml') as mock_conf_open: mock_conf_open.return_value = test_config_copy - with mock.patch('elastalert.loaders.yaml_loader') as mock_rule_open: + with mock.patch('elastalert.loaders.read_yaml') as mock_rule_open: mock_rule_open.return_value = test_rule_copy with mock.patch('os.listdir') as mock_ls: @@ -291,9 +291,9 @@ def test_load_url_prefix_env(): test_rule_copy.pop('es_host') test_rule_copy.pop('es_port') test_config_copy = copy.deepcopy(test_config) - with mock.patch('elastalert.config.yaml_loader') as mock_conf_open: + with mock.patch('elastalert.config.read_yaml') as mock_conf_open: mock_conf_open.return_value = test_config_copy - with mock.patch('elastalert.loaders.yaml_loader') as mock_rule_open: + with mock.patch('elastalert.loaders.read_yaml') as mock_rule_open: mock_rule_open.return_value = test_rule_copy with mock.patch('os.listdir') as mock_ls: @@ -309,9 +309,9 @@ def test_load_disabled_rules(): test_rule_copy = copy.deepcopy(test_rule) test_rule_copy['is_enabled'] = False test_config_copy = copy.deepcopy(test_config) - with mock.patch('elastalert.config.yaml_loader') as mock_conf_open: + with mock.patch('elastalert.config.read_yaml') as mock_conf_open: mock_conf_open.return_value = test_config_copy - with mock.patch('elastalert.loaders.yaml_loader') as mock_rule_open: + with mock.patch('elastalert.loaders.read_yaml') as mock_rule_open: mock_rule_open.return_value = test_rule_copy with mock.patch('os.listdir') as mock_ls: @@ -334,9 +334,9 @@ def test_raises_on_missing_config(): if key in optional_keys: continue - with mock.patch('elastalert.config.yaml_loader') as mock_conf_open: + with mock.patch('elastalert.config.read_yaml') as mock_conf_open: mock_conf_open.return_value = test_config_copy - with mock.patch('elastalert.loaders.yaml_loader') as mock_rule_open: + with mock.patch('elastalert.loaders.read_yaml') as mock_rule_open: mock_rule_open.return_value = test_rule_copy with mock.patch('os.walk') as mock_walk: mock_walk.return_value = [('', [], ['testrule.yaml'])] From ac07f6c5ea071d3e97dfc430d000075fc464fdf6 Mon Sep 17 00:00:00 2001 From: "Charest, Cedric" Date: Thu, 13 May 2021 16:30:43 -0400 Subject: [PATCH 0215/1065] Fixes #156 Support multiple directories in FileRulesLoader --- config.yaml.example | 1 + docs/source/elastalert.rst | 2 +- elastalert/loaders.py | 30 +++++++++++++++++++----------- tests/loaders_test.py | 27 +++++++++++++++++---------- 4 files changed, 38 insertions(+), 22 deletions(-) diff --git a/config.yaml.example b/config.yaml.example index 36fd1b12d..6679e249b 100644 --- a/config.yaml.example +++ b/config.yaml.example @@ -1,4 +1,5 @@ # This is the folder that contains the rule yaml files +# This can also be a list of directories # Any .yaml file will be loaded as a rule rules_folder: example_rules diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 9663c4719..ba260b33b 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -152,7 +152,7 @@ The environment variable ``ES_USE_SSL`` will override this field. ``rules_loader``: Optional; sets the loader class to be used by ElastAlert to retrieve rules and hashes. Defaults to ``FileRulesLoader`` if not set. -``rules_folder``: The name of the folder which contains rule configuration files. ElastAlert will load all +``rules_folder``: The name of the folder or a list of folders which contains rule configuration files. ElastAlert will load all files in this folder, and all subdirectories, that end in .yaml. If the contents of this folder change, ElastAlert will load, reload or remove rules based on their respective config files. (only required when using ``FileRulesLoader``). diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 6228ca051..bd95e3e76 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -513,20 +513,28 @@ def get_names(self, conf, use_rule=None): # Passing a filename directly can bypass rules_folder and .yaml checks if use_rule and os.path.isfile(use_rule): return [use_rule] - rule_folder = conf['rules_folder'] + + # In case of a bad type, convert string to list: + rule_folders = conf['rules_folder'] if isinstance(conf['rules_folder'], list) else [conf['rules_folder']] rule_files = [] if 'scan_subdirectories' in conf and conf['scan_subdirectories']: - for root, folders, files in os.walk(rule_folder): - for filename in files: - if use_rule and use_rule != filename: - continue - if self.is_yaml(filename): - rule_files.append(os.path.join(root, filename)) + for ruledir in rule_folders: + for root, folders, files in os.walk(ruledir): + # Openshift/k8s configmap fix for ..data and ..2021_05..date directories that loop with os.walk() + folders[:] = [d for d in folders if not d.startswith('..')] + for filename in files: + if use_rule and use_rule != filename: + continue + if self.is_yaml(filename): + rule_files.append(os.path.join(root, filename)) else: - for filename in os.listdir(rule_folder): - fullpath = os.path.join(rule_folder, filename) - if os.path.isfile(fullpath) and self.is_yaml(filename): - rule_files.append(fullpath) + for ruledir in rule_folders: + if not os.path.isdir(ruledir): + continue + for file in os.scandir(ruledir): + fullpath = os.path.join(ruledir, file.name) + if os.path.isfile(fullpath) and self.is_yaml(file.name): + rule_files.append(fullpath) return rule_files def get_hashes(self, conf, use_rule=None): diff --git a/tests/loaders_test.py b/tests/loaders_test.py index 5a5ae0006..be29dfa9d 100644 --- a/tests/loaders_test.py +++ b/tests/loaders_test.py @@ -169,9 +169,9 @@ def test_load_inline_alert_rule(): def test_file_rules_loader_get_names_recursive(): conf = {'scan_subdirectories': True, 'rules_folder': 'root'} rules_loader = FileRulesLoader(conf) - walk_paths = (('root', ('folder_a', 'folder_b'), ('rule.yaml',)), - ('root/folder_a', (), ('a.yaml', 'ab.yaml')), - ('root/folder_b', (), ('b.yaml',))) + walk_paths = (('root', ['folder_a', 'folder_b'], ('rule.yaml',)), + ('root/folder_a', [], ('a.yaml', 'ab.yaml')), + ('root/folder_b', [], ('b.yaml',))) with mock.patch('os.walk') as mock_walk: mock_walk.return_value = walk_paths paths = rules_loader.get_names(conf) @@ -186,19 +186,26 @@ def test_file_rules_loader_get_names_recursive(): def test_file_rules_loader_get_names(): + + class MockDirEntry: + # os.DirEntry of os.scandir + def __init__(self, name): + self.name = name + # Check for no subdirectory conf = {'scan_subdirectories': False, 'rules_folder': 'root'} rules_loader = FileRulesLoader(conf) - files = ['badfile', 'a.yaml', 'b.yaml'] + files = [MockDirEntry(name='badfile'), MockDirEntry('a.yaml'), MockDirEntry('b.yaml')] - with mock.patch('os.listdir') as mock_list: - with mock.patch('os.path.isfile') as mock_path: - mock_path.return_value = True - mock_list.return_value = files - paths = rules_loader.get_names(conf) + with mock.patch('os.path.isdir') as mock_dir: + with mock.patch('os.scandir') as mock_list: + with mock.patch('os.path.isfile') as mock_path: + mock_dir.return_value = conf['rules_folder'] + mock_path.return_value = True + mock_list.return_value = files + paths = rules_loader.get_names(conf) paths = [p.replace(os.path.sep, '/') for p in paths] - assert 'root/a.yaml' in paths assert 'root/b.yaml' in paths assert len(paths) == 2 From 51fb52050357d7d603e705980c849b0621c408ed Mon Sep 17 00:00:00 2001 From: "Charest, Cedric" Date: Fri, 14 May 2021 10:06:20 -0400 Subject: [PATCH 0216/1065] Update changelog on #157 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d14b0df70..b90908ad7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,6 +26,7 @@ - Support a footer in alert text - [#133](https://github.com/jertel/elastalert2/pull/133) - @nsano-rururu - Support extra message features for Slack and Mattermost - [#140](https://github.com/jertel/elastalert2/pull/140) - @nsano-rururu - Support for environment variable substitutions in yaml config files +- Support for multiple rules directories and fix `..data` Kubernetes/Openshift recursive directories in FileRulesLoader [#157](https://github.com/jertel/elastalert2/pull/157) - @mrfroggg ## Other changes - Fix issue with testing alerts that contain Jinja templates - [#101](https://github.com/jertel/elastalert2/pull/101) - @jertel From 5061d1669f1574a2cc278f2e3058673ffff06315 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Fri, 14 May 2021 10:37:24 -0400 Subject: [PATCH 0217/1065] Fix Kubernetes readme default image name; Fix changelog.md to match template for PRs --- CHANGELOG.md | 2 +- chart/elastalert2/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b90908ad7..143024e19 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,7 +25,7 @@ - Added support for alerting via Amazon Simple Email System (SES) - [#105](https://github.com/jertel/elastalert2/pull/105) - @nsano-rururu - Support a footer in alert text - [#133](https://github.com/jertel/elastalert2/pull/133) - @nsano-rururu - Support extra message features for Slack and Mattermost - [#140](https://github.com/jertel/elastalert2/pull/140) - @nsano-rururu -- Support for environment variable substitutions in yaml config files +- Support for environment variable substitutions in yaml config files - [#149](https://github.com/jertel/elastalert2/pull/149) - @archfz - Support for multiple rules directories and fix `..data` Kubernetes/Openshift recursive directories in FileRulesLoader [#157](https://github.com/jertel/elastalert2/pull/157) - @mrfroggg ## Other changes diff --git a/chart/elastalert2/README.md b/chart/elastalert2/README.md index bb73c7192..fbbfbebe5 100644 --- a/chart/elastalert2/README.md +++ b/chart/elastalert2/README.md @@ -46,7 +46,7 @@ The command removes all the Kubernetes components associated with the chart and | Parameter | Description | Default | |----------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------|---------------------------------| -| `image.repository` | docker image | jertel/elastalert-docker | +| `image.repository` | docker image | jertel/elastalert2 | | `image.tag` | docker image tag | 2.0.4 | | `image.pullPolicy` | image pull policy | IfNotPresent | | `podAnnotations` | Annotations to be added to pods | {} | From 5bdab79e2899e64581039a6dbe6223abbed35bfd Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Fri, 14 May 2021 10:43:41 -0400 Subject: [PATCH 0218/1065] Fix README.md hyperlink to chart documentation --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 15f34a9a2..c2ff4846f 100644 --- a/README.md +++ b/README.md @@ -50,4 +50,4 @@ ElastAlert 2 is licensed under the [Apache License, Version 2.0][5]. [4]: https://elastalert2.readthedocs.io/en/latest/ruletypes.html#alerts [5]: https://www.apache.org/licenses/LICENSE-2.0 [6]: https://github.com/jertel/elastalert2/blob/master/CONTRIBUTING.md -[7]: https://github.com/jertel/elastalert2/chart/elastalert2/README.md \ No newline at end of file +[7]: https://github.com/jertel/elastalert2/tree/master/chart/elastalert2 \ No newline at end of file From 68fb1baf61697aa6dfc6a79a5e021fac3d70b5ef Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sun, 16 May 2021 12:08:50 +0100 Subject: [PATCH 0219/1065] Begin alerter refactoring and migration - Create a new alerters folder - Move the Zabbix and OpsGenie alerters to the folder - Refactor to load the alerters from the folder - Add a new test for the Zabbix alerter Once this is merged (and if we are happy with this format for the migration), I will do all the other alerters. --- elastalert/alerters/__init__.py | 0 elastalert/{ => alerters}/opsgenie.py | 10 ++++---- elastalert/{ => alerters}/zabbix.py | 4 ++-- elastalert/loaders.py | 4 ++-- tests/alerts_test.py | 33 ++++++++++++++++++++++++++- 5 files changed, 41 insertions(+), 10 deletions(-) create mode 100644 elastalert/alerters/__init__.py rename elastalert/{ => alerters}/opsgenie.py (97%) rename elastalert/{ => alerters}/zabbix.py (98%) diff --git a/elastalert/alerters/__init__.py b/elastalert/alerters/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/elastalert/opsgenie.py b/elastalert/alerters/opsgenie.py similarity index 97% rename from elastalert/opsgenie.py rename to elastalert/alerters/opsgenie.py index 8db52d89f..7b3ac7027 100644 --- a/elastalert/opsgenie.py +++ b/elastalert/alerters/opsgenie.py @@ -3,11 +3,11 @@ import os.path import requests -from .alerts import Alerter -from .alerts import BasicMatchString -from .util import EAException -from .util import elastalert_logger -from .util import lookup_es_key +from ..alerts import Alerter +from ..alerts import BasicMatchString +from ..util import EAException +from ..util import elastalert_logger +from ..util import lookup_es_key class OpsGenieAlerter(Alerter): diff --git a/elastalert/zabbix.py b/elastalert/alerters/zabbix.py similarity index 98% rename from elastalert/zabbix.py rename to elastalert/alerters/zabbix.py index e2b5f1ed6..1ec9eab09 100644 --- a/elastalert/zabbix.py +++ b/elastalert/alerters/zabbix.py @@ -2,8 +2,8 @@ from pyzabbix import ZabbixSender, ZabbixMetric, ZabbixAPI -from .alerts import Alerter -from .util import elastalert_logger, EAException +from ..alerts import Alerter +from ..util import elastalert_logger, EAException class ZabbixClient(ZabbixAPI): diff --git a/elastalert/loaders.py b/elastalert/loaders.py index bd95e3e76..013be0dd0 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -15,7 +15,8 @@ from . import alerts from . import enhancements from . import ruletypes -from .opsgenie import OpsGenieAlerter +from .alerters.opsgenie import OpsGenieAlerter +from .alerters.zabbix import ZabbixAlerter from .util import dt_to_ts from .util import dt_to_ts_with_format from .util import dt_to_unix @@ -27,7 +28,6 @@ from .util import ts_to_dt_with_format from .util import unix_to_dt from .util import unixms_to_dt -from .zabbix import ZabbixAlerter from .yaml import read_yaml diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 1d16018e5..4e22b33c0 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -36,7 +36,8 @@ from elastalert.alerts import SlackAlerter from elastalert.alerts import TelegramAlerter from elastalert.loaders import FileRulesLoader -from elastalert.opsgenie import OpsGenieAlerter +from elastalert.alerters.opsgenie import OpsGenieAlerter +from elastalert.alerters.zabbix import ZabbixAlerter from elastalert.alerts import VictorOpsAlerter from elastalert.util import ts_add from elastalert.util import ts_now @@ -7156,3 +7157,33 @@ def test_thehive_alerter(): del actual_data['sourceRef'] assert expected_data == actual_data + + +def test_zabbix_basic(): + rule = { + 'name': 'Basic Zabbix test', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'alert': [], + 'alert_subject': 'Test Zabbix', + 'zbx_host': 'example.com', + 'zbx_key': 'example-key' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ZabbixAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00Z', + 'somefield': 'foobarbaz' + } + with mock.patch('pyzabbix.ZabbixSender.send') as mock_zbx_send: + alert.alert([match]) + + zabbix_metrics = { + "host": "example.com", + "key": "example-key", + "value": "1", + "clock": 1609459200 + } + alerter_args = mock_zbx_send.call_args.args + assert vars(alerter_args[0][0]) == zabbix_metrics From 8642c72113de5cb64b634986f8bd39e397f4c6b7 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Mon, 17 May 2021 07:46:45 -0400 Subject: [PATCH 0220/1065] Prepare for new release --- CHANGELOG.md | 20 +++++++++++++------- CONTRIBUTING.md | 5 +++-- README.md | 2 +- chart/elastalert2/Chart.yaml | 5 ++--- chart/elastalert2/README.md | 2 +- chart/elastalert2/values.yaml | 2 +- setup.py | 2 +- 7 files changed, 22 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 143024e19..c5baaa020 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,26 +9,32 @@ ## Other changes - None -# Upcoming Release +# 2.1.0 ## Breaking changes +- TheHive alerter refactoring - [#142](https://github.com/jertel/elastalert2/pull/142) - @ferozsalam + - See the updated documentation for changes required to alert formatting - Dockerfile refactor for performance and size improvements - [#102](https://github.com/jertel/elastalert2/pull/102) - @jgregmac - Dockerfile base image changed from `python/alpine` to `python/slim-buster` to take advantage of pre-build python wheels, accelerate build times, and reduce image size. If you have customized an image, based on jertel/elastalert2, you may need to make adjustments. - Default base path changed to `/opt/elastalert` in the Dockerfile and in Helm charts. Update your volume binds accordingly. - Dockerfile now runs as a non-root user "elastalert". Ensure your volumes are accessible by this non-root user. - System packages removed from the Dockerfile: All dev packages, cargo, libmagic. Image size reduced to 250Mb. - `tmp` files and dev packages removed from the final container image. -- TheHive alerter refactoring - [#142](https://github.com/jertel/elastalert2/pull/142) - @ferozsalam - - See the updated documentation for changes required to alert formatting ## New features -- Added support for alerting via Amazon Simple Email System (SES) - [#105](https://github.com/jertel/elastalert2/pull/105) - @nsano-rururu -- Support a footer in alert text - [#133](https://github.com/jertel/elastalert2/pull/133) - @nsano-rururu -- Support extra message features for Slack and Mattermost - [#140](https://github.com/jertel/elastalert2/pull/140) - @nsano-rururu -- Support for environment variable substitutions in yaml config files - [#149](https://github.com/jertel/elastalert2/pull/149) - @archfz - Support for multiple rules directories and fix `..data` Kubernetes/Openshift recursive directories in FileRulesLoader [#157](https://github.com/jertel/elastalert2/pull/157) - @mrfroggg +- Support environment variable substition in yaml files - [#149](https://github.com/jertel/elastalert2/pull/149) - @archfz +- Update schema.yaml and enhance documentation for Email alerter - [#144](https://github.com/jertel/elastalert2/pull/144) - @nsano-rururu +- Default Email alerter to use port 25, and require http_post_url for HTTP Post alerter - [#143](https://github.com/jertel/elastalert2/pull/143) - @nsano-rururu +- Support extra message features for Slack and Mattermost - [#140](https://github.com/jertel/elastalert2/pull/140) - @nsano-rururu +- Support a footer in alert text - [#133](https://github.com/jertel/elastalert2/pull/133) - @nsano-rururu +- Added support for alerting via Amazon Simple Email System (SES) - [#105](https://github.com/jertel/elastalert2/pull/105) - @nsano-rururu ## Other changes +- Begin alerter refactoring to split large source code files into smaller files - [#161](https://github.com/jertel/elastalert2/pull/161) - @ferozsalam +- Update contribution guidelines with additional instructions for local testing - [#147](https://github.com/jertel/elastalert2/pull/147), [#148](https://github.com/jertel/elastalert2/pull/148) - @ferozsalam +- Add more unit test coverage - [#108](https://github.com/jertel/elastalert2/pull/108) - @nsano-rururu +- Update documentation: describe limit_execution, correct alerters list - [#107](https://github.com/jertel/elastalert2/pull/107) - @fberrez - Fix issue with testing alerts that contain Jinja templates - [#101](https://github.com/jertel/elastalert2/pull/101) - @jertel - Updated all references of Elastalert to use the mixed case ElastAlert, as that is the most prevalent formatting found in the documentation. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 710306db5..e69deb9fa 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -29,8 +29,9 @@ Maintainers, when creating a new release, follow the procedure below: - [setup.py](setup.py): Match the version to the new release version - [Chart.yaml](chart/elastalert2/Chart.yaml): Match chart version and the app version to the new release version (typically keep them in sync) - [values.yaml](chart/elastalert2/values.yaml): Match the default image version to the new release version. - - [README.md](chart/elastalert2/README.md): Match the default image version to the new release version. - - [CHANGELOG.md](CHANGELOG.md): This must contain all PRs and any other relevent notes about this release. + - [Chart README.md](chart/elastalert2/README.md): Match the default image version to the new release version. + - [Project README.md](README.md): Match the default image version to the new release version. + - [CHANGELOG.md](CHANGELOG.md): This must contain all PRs and any other relevent notes about this release 3. Publish a [new][1] release. - The title (and tag) of the release will be the same value as the new version determined in step 1. - Paste the new version change notes from CHANGELOG.md into the description field. diff --git a/README.md b/README.md index c2ff4846f..de2856600 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ Please see our [contributing guidelines][6]. If you're interested in a pre-built Docker image check out the [elastalert2][2] project on Docker Hub. -Be aware that the `latest` tag of the image represents the latest commit into the master branch. If you prefer to upgrade more slowly you will need utilize a versioned tag, such as `2.0.4` instead, or `2` if you are comfortable with always using the latest released version of ElastAlert 2. +Be aware that the `latest` tag of the image represents the latest commit into the master branch. If you prefer to upgrade more slowly you will need utilize a versioned tag, such as `2.1.0` instead, or `2` if you are comfortable with always using the latest released version of ElastAlert 2. A properly configured config.yaml file must be mounted into the container during startup of the container. Use the [example file][1] provided as a template, and once saved locally to a file such as `/tmp/elastalert.yaml`, run the container as follows: diff --git a/chart/elastalert2/Chart.yaml b/chart/elastalert2/Chart.yaml index 2e1a05f8e..b713a29dd 100644 --- a/chart/elastalert2/Chart.yaml +++ b/chart/elastalert2/Chart.yaml @@ -1,12 +1,11 @@ apiVersion: v1 description: Automated rule-based alerting for Elasticsearch name: elastalert2 -version: 2.0.4 -appVersion: 2.0.4 +version: 2.1.0 +appVersion: 2.1.0 home: https://github.com/jertel/elastalert2 sources: - https://github.com/jertel/elastalert2 maintainers: - name: jertel - email: jertel@codesim.com engine: gotpl diff --git a/chart/elastalert2/README.md b/chart/elastalert2/README.md index fbbfbebe5..ee837de0e 100644 --- a/chart/elastalert2/README.md +++ b/chart/elastalert2/README.md @@ -47,7 +47,7 @@ The command removes all the Kubernetes components associated with the chart and | Parameter | Description | Default | |----------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------|---------------------------------| | `image.repository` | docker image | jertel/elastalert2 | -| `image.tag` | docker image tag | 2.0.4 | +| `image.tag` | docker image tag | 2.1.0 | | `image.pullPolicy` | image pull policy | IfNotPresent | | `podAnnotations` | Annotations to be added to pods | {} | | `command` | command override for container | `NULL` | diff --git a/chart/elastalert2/values.yaml b/chart/elastalert2/values.yaml index 46a6c9fa2..9b13734d6 100644 --- a/chart/elastalert2/values.yaml +++ b/chart/elastalert2/values.yaml @@ -25,7 +25,7 @@ image: # docker image repository: jertel/elastalert2 # docker image tag - tag: 2.0.4 + tag: 2.1.0 pullPolicy: IfNotPresent resources: {} diff --git a/setup.py b/setup.py index 9f6181335..da89708c9 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ base_dir = os.path.dirname(__file__) setup( name='elastalert2', - version='2.0.4', + version='2.1.0', description='Automated rule-based alerting for Elasticsearch', setup_requires='setuptools', license='Apache 2.0', From 57de88491b18ebab7e95398458446e5b0cf25d14 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 17 May 2021 15:58:03 +0100 Subject: [PATCH 0221/1065] Migrate email alerter --- elastalert/alerters/email.py | 133 +++++++++++++++++++++++++++++++++++ elastalert/alerts.py | 127 --------------------------------- elastalert/loaders.py | 3 +- tests/alerts_test.py | 30 ++++---- tests/loaders_test.py | 5 +- 5 files changed, 153 insertions(+), 145 deletions(-) create mode 100644 elastalert/alerters/email.py diff --git a/elastalert/alerters/email.py b/elastalert/alerters/email.py new file mode 100644 index 000000000..c921373fa --- /dev/null +++ b/elastalert/alerters/email.py @@ -0,0 +1,133 @@ +import os + +from ..alerts import Alerter +from ..util import elastalert_logger +from ..util import lookup_es_key +from ..util import EAException +from email.mime.text import MIMEText +from email.mime.multipart import MIMEMultipart +from email.mime.image import MIMEImage +from email.utils import formatdate +from socket import error +from smtplib import SMTP +from smtplib import SMTP_SSL +from smtplib import SMTPAuthenticationError +from smtplib import SMTPException + + +class EmailAlerter(Alerter): + """ Sends an email alert """ + required_options = frozenset(['email']) + + def __init__(self, *args): + super(EmailAlerter, self).__init__(*args) + + self.assets_dir = self.rule.get('assets_dir', '/tmp') + self.images_dictionary = dict(zip(self.rule.get('email_image_keys', []), self.rule.get('email_image_values', []))) + self.smtp_host = self.rule.get('smtp_host', 'localhost') + self.smtp_ssl = self.rule.get('smtp_ssl', False) + self.from_addr = self.rule.get('from_addr', 'ElastAlert') + self.smtp_port = self.rule.get('smtp_port', 25) + if self.rule.get('smtp_auth_file'): + self.get_account(self.rule['smtp_auth_file']) + self.smtp_key_file = self.rule.get('smtp_key_file') + self.smtp_cert_file = self.rule.get('smtp_cert_file') + # Convert email to a list if it isn't already + if isinstance(self.rule['email'], str): + self.rule['email'] = [self.rule['email']] + # If there is a cc then also convert it a list if it isn't + cc = self.rule.get('cc') + if cc and isinstance(cc, str): + self.rule['cc'] = [self.rule['cc']] + # If there is a bcc then also convert it to a list if it isn't + bcc = self.rule.get('bcc') + if bcc and isinstance(bcc, str): + self.rule['bcc'] = [self.rule['bcc']] + add_suffix = self.rule.get('email_add_domain') + if add_suffix and not add_suffix.startswith('@'): + self.rule['email_add_domain'] = '@' + add_suffix + + def alert(self, matches): + body = self.create_alert_body(matches) + + # Add JIRA ticket if it exists + if self.pipeline is not None and 'jira_ticket' in self.pipeline: + url = '%s/browse/%s' % (self.pipeline['jira_server'], self.pipeline['jira_ticket']) + body += '\nJIRA ticket: %s' % (url) + + to_addr = self.rule['email'] + if 'email_from_field' in self.rule: + recipient = lookup_es_key(matches[0], self.rule['email_from_field']) + if isinstance(recipient, str): + if '@' in recipient: + to_addr = [recipient] + elif 'email_add_domain' in self.rule: + to_addr = [recipient + self.rule['email_add_domain']] + elif isinstance(recipient, list): + to_addr = recipient + if 'email_add_domain' in self.rule: + to_addr = [name + self.rule['email_add_domain'] for name in to_addr] + if self.rule.get('email_format') == 'html': + # email_msg = MIMEText(body, 'html', _charset='UTF-8') # old way + email_msg = MIMEMultipart() + msgText = MIMEText(body, 'html', _charset='UTF-8') + email_msg.attach(msgText) # Added, and edited the previous line + + for image_key in self.images_dictionary: + fp = open(os.path.join(self.assets_dir, self.images_dictionary[image_key]), 'rb') + img = MIMEImage(fp.read()) + fp.close() + img.add_header('Content-ID', '<{}>'.format(image_key)) + email_msg.attach(img) + else: + email_msg = MIMEText(body, _charset='UTF-8') + email_msg['Subject'] = self.create_title(matches) + email_msg['To'] = ', '.join(to_addr) + email_msg['From'] = self.from_addr + email_msg['Reply-To'] = self.rule.get('email_reply_to', email_msg['To']) + email_msg['Date'] = formatdate() + if self.rule.get('cc'): + email_msg['CC'] = ','.join(self.rule['cc']) + to_addr = to_addr + self.rule['cc'] + if self.rule.get('bcc'): + to_addr = to_addr + self.rule['bcc'] + + try: + if self.smtp_ssl: + if self.smtp_port: + self.smtp = SMTP_SSL(self.smtp_host, self.smtp_port, keyfile=self.smtp_key_file, certfile=self.smtp_cert_file) + else: + self.smtp = SMTP_SSL(self.smtp_host, keyfile=self.smtp_key_file, certfile=self.smtp_cert_file) + else: + if self.smtp_port: + self.smtp = SMTP(self.smtp_host, self.smtp_port) + else: + self.smtp = SMTP(self.smtp_host) + self.smtp.ehlo() + if self.smtp.has_extn('STARTTLS'): + self.smtp.starttls(keyfile=self.smtp_key_file, certfile=self.smtp_cert_file) + if 'smtp_auth_file' in self.rule: + self.smtp.login(self.user, self.password) + except (SMTPException, error) as e: + raise EAException("Error connecting to SMTP host: %s" % (e)) + except SMTPAuthenticationError as e: + raise EAException("SMTP username/password rejected: %s" % (e)) + self.smtp.sendmail(self.from_addr, to_addr, email_msg.as_string()) + self.smtp.quit() + + elastalert_logger.info("Sent email to %s" % (to_addr)) + + def create_default_title(self, matches): + subject = 'ElastAlert: %s' % (self.rule['name']) + + # If the rule has a query_key, add that value plus timestamp to subject + if 'query_key' in self.rule: + qk = matches[0].get(self.rule['query_key']) + if qk: + subject += ' - %s' % (qk) + + return subject + + def get_info(self): + return {'type': 'email', + 'recipients': self.rule['email']} diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 1b508148c..7052d014e 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -8,15 +8,6 @@ import time import uuid import warnings -from email.mime.text import MIMEText -from email.mime.multipart import MIMEMultipart -from email.mime.image import MIMEImage -from email.utils import formatdate -from smtplib import SMTP -from smtplib import SMTP_SSL -from smtplib import SMTPAuthenticationError -from smtplib import SMTPException -from socket import error import boto3 import requests @@ -403,124 +394,6 @@ def get_info(self): return {'type': 'debug'} -class EmailAlerter(Alerter): - """ Sends an email alert """ - required_options = frozenset(['email']) - - def __init__(self, *args): - super(EmailAlerter, self).__init__(*args) - - self.assets_dir = self.rule.get('assets_dir', '/tmp') - self.images_dictionary = dict(zip(self.rule.get('email_image_keys', []), self.rule.get('email_image_values', []))) - self.smtp_host = self.rule.get('smtp_host', 'localhost') - self.smtp_ssl = self.rule.get('smtp_ssl', False) - self.from_addr = self.rule.get('from_addr', 'ElastAlert') - self.smtp_port = self.rule.get('smtp_port', 25) - if self.rule.get('smtp_auth_file'): - self.get_account(self.rule['smtp_auth_file']) - self.smtp_key_file = self.rule.get('smtp_key_file') - self.smtp_cert_file = self.rule.get('smtp_cert_file') - # Convert email to a list if it isn't already - if isinstance(self.rule['email'], str): - self.rule['email'] = [self.rule['email']] - # If there is a cc then also convert it a list if it isn't - cc = self.rule.get('cc') - if cc and isinstance(cc, str): - self.rule['cc'] = [self.rule['cc']] - # If there is a bcc then also convert it to a list if it isn't - bcc = self.rule.get('bcc') - if bcc and isinstance(bcc, str): - self.rule['bcc'] = [self.rule['bcc']] - add_suffix = self.rule.get('email_add_domain') - if add_suffix and not add_suffix.startswith('@'): - self.rule['email_add_domain'] = '@' + add_suffix - - def alert(self, matches): - body = self.create_alert_body(matches) - - # Add JIRA ticket if it exists - if self.pipeline is not None and 'jira_ticket' in self.pipeline: - url = '%s/browse/%s' % (self.pipeline['jira_server'], self.pipeline['jira_ticket']) - body += '\nJIRA ticket: %s' % (url) - - to_addr = self.rule['email'] - if 'email_from_field' in self.rule: - recipient = lookup_es_key(matches[0], self.rule['email_from_field']) - if isinstance(recipient, str): - if '@' in recipient: - to_addr = [recipient] - elif 'email_add_domain' in self.rule: - to_addr = [recipient + self.rule['email_add_domain']] - elif isinstance(recipient, list): - to_addr = recipient - if 'email_add_domain' in self.rule: - to_addr = [name + self.rule['email_add_domain'] for name in to_addr] - if self.rule.get('email_format') == 'html': - # email_msg = MIMEText(body, 'html', _charset='UTF-8') # old way - email_msg = MIMEMultipart() - msgText = MIMEText(body, 'html', _charset='UTF-8') - email_msg.attach(msgText) # Added, and edited the previous line - - for image_key in self.images_dictionary: - fp = open(os.path.join(self.assets_dir, self.images_dictionary[image_key]), 'rb') - img = MIMEImage(fp.read()) - fp.close() - img.add_header('Content-ID', '<{}>'.format(image_key)) - email_msg.attach(img) - else: - email_msg = MIMEText(body, _charset='UTF-8') - email_msg['Subject'] = self.create_title(matches) - email_msg['To'] = ', '.join(to_addr) - email_msg['From'] = self.from_addr - email_msg['Reply-To'] = self.rule.get('email_reply_to', email_msg['To']) - email_msg['Date'] = formatdate() - if self.rule.get('cc'): - email_msg['CC'] = ','.join(self.rule['cc']) - to_addr = to_addr + self.rule['cc'] - if self.rule.get('bcc'): - to_addr = to_addr + self.rule['bcc'] - - try: - if self.smtp_ssl: - if self.smtp_port: - self.smtp = SMTP_SSL(self.smtp_host, self.smtp_port, keyfile=self.smtp_key_file, certfile=self.smtp_cert_file) - else: - self.smtp = SMTP_SSL(self.smtp_host, keyfile=self.smtp_key_file, certfile=self.smtp_cert_file) - else: - if self.smtp_port: - self.smtp = SMTP(self.smtp_host, self.smtp_port) - else: - self.smtp = SMTP(self.smtp_host) - self.smtp.ehlo() - if self.smtp.has_extn('STARTTLS'): - self.smtp.starttls(keyfile=self.smtp_key_file, certfile=self.smtp_cert_file) - if 'smtp_auth_file' in self.rule: - self.smtp.login(self.user, self.password) - except (SMTPException, error) as e: - raise EAException("Error connecting to SMTP host: %s" % (e)) - except SMTPAuthenticationError as e: - raise EAException("SMTP username/password rejected: %s" % (e)) - self.smtp.sendmail(self.from_addr, to_addr, email_msg.as_string()) - self.smtp.quit() - - elastalert_logger.info("Sent email to %s" % (to_addr)) - - def create_default_title(self, matches): - subject = 'ElastAlert: %s' % (self.rule['name']) - - # If the rule has a query_key, add that value plus timestamp to subject - if 'query_key' in self.rule: - qk = matches[0].get(self.rule['query_key']) - if qk: - subject += ' - %s' % (qk) - - return subject - - def get_info(self): - return {'type': 'email', - 'recipients': self.rule['email']} - - class JiraAlerter(Alerter): """ Creates a Jira ticket for each alert """ required_options = frozenset(['jira_server', 'jira_account_file', 'jira_project', 'jira_issuetype']) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 013be0dd0..5a336c0ce 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -15,6 +15,7 @@ from . import alerts from . import enhancements from . import ruletypes +from .alerters.email import EmailAlerter from .alerters.opsgenie import OpsGenieAlerter from .alerters.zabbix import ZabbixAlerter from .util import dt_to_ts @@ -59,7 +60,7 @@ class RulesLoader(object): # Used to map names of alerts to their classes alerts_mapping = { - 'email': alerts.EmailAlerter, + 'email': EmailAlerter, 'jira': alerts.JiraAlerter, 'opsgenie': OpsGenieAlerter, 'stomp': alerts.StompAlerter, diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 4e22b33c0..db6eba7d8 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -20,7 +20,6 @@ from elastalert.alerts import DatadogAlerter from elastalert.alerts import DingTalkAlerter from elastalert.alerts import DiscordAlerter -from elastalert.alerts import EmailAlerter from elastalert.alerts import GitterAlerter from elastalert.alerts import GoogleChatAlerter from elastalert.alerts import HiveAlerter @@ -36,6 +35,7 @@ from elastalert.alerts import SlackAlerter from elastalert.alerts import TelegramAlerter from elastalert.loaders import FileRulesLoader +from elastalert.alerters.email import EmailAlerter from elastalert.alerters.opsgenie import OpsGenieAlerter from elastalert.alerters.zabbix import ZabbixAlerter from elastalert.alerts import VictorOpsAlerter @@ -113,7 +113,7 @@ def test_email(): rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', 'owner': 'owner_value', 'alert_subject': 'Test alert for {0}, owned by {1}', 'alert_subject_args': ['test_term', 'owner'], 'snowman': '☃'} - with mock.patch('elastalert.alerts.SMTP') as mock_smtp: + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() alert = EmailAlerter(rule) @@ -138,7 +138,7 @@ def test_email_from_field(): rule = {'name': 'test alert', 'email': ['testing@test.test'], 'email_add_domain': 'example.com', 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_from_field': 'data.user', 'owner': 'owner_value'} # Found, without @ - with mock.patch('elastalert.alerts.SMTP') as mock_smtp: + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() alert = EmailAlerter(rule) alert.alert([{'data': {'user': 'qlo'}}]) @@ -146,28 +146,28 @@ def test_email_from_field(): # Found, with @ rule['email_add_domain'] = '@example.com' - with mock.patch('elastalert.alerts.SMTP') as mock_smtp: + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() alert = EmailAlerter(rule) alert.alert([{'data': {'user': 'qlo'}}]) assert mock_smtp.mock_calls[4][1][1] == ['qlo@example.com'] # Found, list - with mock.patch('elastalert.alerts.SMTP') as mock_smtp: + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() alert = EmailAlerter(rule) alert.alert([{'data': {'user': ['qlo', 'foo']}}]) assert mock_smtp.mock_calls[4][1][1] == ['qlo@example.com', 'foo@example.com'] # Not found - with mock.patch('elastalert.alerts.SMTP') as mock_smtp: + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() alert = EmailAlerter(rule) alert.alert([{'data': {'foo': 'qlo'}}]) assert mock_smtp.mock_calls[4][1][1] == ['testing@test.test'] # Found, wrong type - with mock.patch('elastalert.alerts.SMTP') as mock_smtp: + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() alert = EmailAlerter(rule) alert.alert([{'data': {'user': 17}}]) @@ -178,7 +178,7 @@ def test_email_with_unicode_strings(): rule = {'name': 'test alert', 'email': 'testing@test.test', 'from_addr': 'testfrom@test.test', 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', 'owner': 'owner_value', 'alert_subject': 'Test alert for {0}, owned by {1}', 'alert_subject_args': ['test_term', 'owner'], 'snowman': '☃'} - with mock.patch('elastalert.alerts.SMTP') as mock_smtp: + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() alert = EmailAlerter(rule) @@ -204,7 +204,7 @@ def test_email_with_auth(): 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', 'alert_subject': 'Test alert for {0}', 'alert_subject_args': ['test_term'], 'smtp_auth_file': 'file.txt', 'rule_file': '/tmp/foo.yaml'} - with mock.patch('elastalert.alerts.SMTP') as mock_smtp: + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: with mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'someone', 'password': 'hunter2'} mock_smtp.return_value = mock.Mock() @@ -226,7 +226,7 @@ def test_email_with_cert_key(): 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', 'alert_subject': 'Test alert for {0}', 'alert_subject_args': ['test_term'], 'smtp_auth_file': 'file.txt', 'smtp_cert_file': 'dummy/cert.crt', 'smtp_key_file': 'dummy/client.key', 'rule_file': '/tmp/foo.yaml'} - with mock.patch('elastalert.alerts.SMTP') as mock_smtp: + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: with mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'someone', 'password': 'hunter2'} mock_smtp.return_value = mock.Mock() @@ -247,7 +247,7 @@ def test_email_with_cc(): rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', 'cc': 'tester@testing.testing'} - with mock.patch('elastalert.alerts.SMTP') as mock_smtp: + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() alert = EmailAlerter(rule) @@ -272,7 +272,7 @@ def test_email_with_bcc(): rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', 'bcc': 'tester@testing.testing'} - with mock.patch('elastalert.alerts.SMTP') as mock_smtp: + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() alert = EmailAlerter(rule) @@ -297,7 +297,7 @@ def test_email_with_cc_and_bcc(): rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', 'cc': ['test1@test.com', 'test2@test.com'], 'bcc': 'tester@testing.testing'} - with mock.patch('elastalert.alerts.SMTP') as mock_smtp: + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() alert = EmailAlerter(rule) @@ -342,7 +342,7 @@ def test_email_with_args(): 'alert_text_args': ['test_arg1', 'test_arg2', 'test.arg3'], 'alert_missing_value': '' } - with mock.patch('elastalert.alerts.SMTP') as mock_smtp: + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() alert = EmailAlerter(rule) @@ -373,7 +373,7 @@ def test_email_query_key_in_subject(): rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', 'query_key': 'username'} - with mock.patch('elastalert.alerts.SMTP') as mock_smtp: + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() alert = EmailAlerter(rule) diff --git a/tests/loaders_test.py b/tests/loaders_test.py index be29dfa9d..834dbc8cd 100644 --- a/tests/loaders_test.py +++ b/tests/loaders_test.py @@ -8,6 +8,7 @@ import elastalert.alerts import elastalert.ruletypes +from elastalert.alerters.email import EmailAlerter from elastalert.config import load_conf from elastalert.loaders import FileRulesLoader from elastalert.util import EAException @@ -160,8 +161,8 @@ def test_load_inline_alert_rule(): with mock.patch.object(rules_loader, 'get_yaml') as mock_open: mock_open.side_effect = [test_config_copy, test_rule_copy] rules_loader.load_modules(test_rule_copy) - assert isinstance(test_rule_copy['alert'][0], elastalert.alerts.EmailAlerter) - assert isinstance(test_rule_copy['alert'][1], elastalert.alerts.EmailAlerter) + assert isinstance(test_rule_copy['alert'][0], EmailAlerter) + assert isinstance(test_rule_copy['alert'][1], EmailAlerter) assert 'foo@bar.baz' in test_rule_copy['alert'][0].rule['email'] assert 'baz@foo.bar' in test_rule_copy['alert'][1].rule['email'] From 2ca5969ebe3045037d2043ecfc3fb3fc305b5aa2 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 17 May 2021 17:02:28 +0100 Subject: [PATCH 0222/1065] Migrate Jira alerter --- elastalert/alerters/jira.py | 399 ++++++++++++++++++++++++++++++++++++ elastalert/alerts.py | 390 ----------------------------------- elastalert/loaders.py | 3 +- tests/alerts_test.py | 24 +-- 4 files changed, 413 insertions(+), 403 deletions(-) create mode 100644 elastalert/alerters/jira.py diff --git a/elastalert/alerters/jira.py b/elastalert/alerters/jira.py new file mode 100644 index 000000000..4614ea40a --- /dev/null +++ b/elastalert/alerters/jira.py @@ -0,0 +1,399 @@ +import datetime +import sys + +from ..alerts import Alerter +from ..alerts import BasicMatchString +from ..util import elastalert_logger +from ..util import lookup_es_key +from ..util import pretty_ts +from ..util import ts_now +from ..util import ts_to_dt +from ..util import EAException +from jira.client import JIRA +from jira.exceptions import JIRAError + + +class JiraFormattedMatchString(BasicMatchString): + def _add_match_items(self): + match_items = dict([(x, y) for x, y in list(self.match.items()) if not x.startswith('top_events_')]) + json_blob = self._pretty_print_as_json(match_items) + preformatted_text = '{{code}}{0}{{code}}'.format(json_blob) + self.text += preformatted_text + + +class JiraAlerter(Alerter): + """ Creates a Jira ticket for each alert """ + required_options = frozenset(['jira_server', 'jira_account_file', 'jira_project', 'jira_issuetype']) + + # Maintain a static set of built-in fields that we explicitly know how to set + # For anything else, we will do best-effort and try to set a string value + known_field_list = [ + 'jira_account_file', + 'jira_assignee', + 'jira_bump_after_inactivity', + 'jira_bump_in_statuses', + 'jira_bump_not_in_statuses', + 'jira_bump_only', + 'jira_bump_tickets', + 'jira_component', + 'jira_components', + 'jira_description', + 'jira_ignore_in_title', + 'jira_issuetype', + 'jira_label', + 'jira_labels', + 'jira_max_age', + 'jira_priority', + 'jira_project', + 'jira_server', + 'jira_transition_to', + 'jira_watchers', + ] + + # Some built-in jira types that can be used as custom fields require special handling + # Here is a sample of one of them: + # {"id":"customfield_12807","name":"My Custom Field","custom":true,"orderable":true,"navigable":true,"searchable":true, + # "clauseNames":["cf[12807]","My Custom Field"],"schema":{"type":"array","items":"string", + # "custom":"com.atlassian.jira.plugin.system.customfieldtypes:multiselect","customId":12807}} + # There are likely others that will need to be updated on a case-by-case basis + custom_string_types_with_special_handling = [ + 'com.atlassian.jira.plugin.system.customfieldtypes:multicheckboxes', + 'com.atlassian.jira.plugin.system.customfieldtypes:multiselect', + 'com.atlassian.jira.plugin.system.customfieldtypes:radiobuttons', + ] + + def __init__(self, rule): + super(JiraAlerter, self).__init__(rule) + self.server = self.rule['jira_server'] + self.get_account(self.rule['jira_account_file']) + self.project = self.rule['jira_project'] + self.issue_type = self.rule['jira_issuetype'] + + # Deferred settings refer to values that can only be resolved when a match + # is found and as such loading them will be delayed until we find a match + self.deferred_settings = [] + + # We used to support only a single component. This allows us to maintain backwards compatibility + # while also giving the user-facing API a more representative name + self.components = self.rule.get('jira_components', self.rule.get('jira_component')) + + # We used to support only a single label. This allows us to maintain backwards compatibility + # while also giving the user-facing API a more representative name + self.labels = self.rule.get('jira_labels', self.rule.get('jira_label')) + + self.description = self.rule.get('jira_description', '') + self.assignee = self.rule.get('jira_assignee') + self.max_age = self.rule.get('jira_max_age', 30) + self.priority = self.rule.get('jira_priority') + self.bump_tickets = self.rule.get('jira_bump_tickets', False) + self.bump_not_in_statuses = self.rule.get('jira_bump_not_in_statuses') + self.bump_in_statuses = self.rule.get('jira_bump_in_statuses') + self.bump_after_inactivity = self.rule.get('jira_bump_after_inactivity', 0) + self.bump_only = self.rule.get('jira_bump_only', False) + self.transition = self.rule.get('jira_transition_to', False) + self.watchers = self.rule.get('jira_watchers') + self.client = None + + if self.bump_in_statuses and self.bump_not_in_statuses: + msg = 'Both jira_bump_in_statuses (%s) and jira_bump_not_in_statuses (%s) are set.' % \ + (','.join(self.bump_in_statuses), ','.join(self.bump_not_in_statuses)) + intersection = list(set(self.bump_in_statuses) & set(self.bump_in_statuses)) + if intersection: + msg = '%s Both have common statuses of (%s). As such, no tickets will ever be found.' % ( + msg, ','.join(intersection)) + msg += ' This should be simplified to use only one or the other.' + elastalert_logger.warning(msg) + + self.reset_jira_args() + + try: + self.client = JIRA(self.server, basic_auth=(self.user, self.password)) + self.get_priorities() + self.jira_fields = self.client.fields() + self.get_arbitrary_fields() + except JIRAError as e: + # JIRAError may contain HTML, pass along only first 1024 chars + raise EAException("Error connecting to JIRA: %s" % (str(e)[:1024])).with_traceback(sys.exc_info()[2]) + + self.set_priority() + + def set_priority(self): + try: + if self.priority is not None and self.client is not None: + self.jira_args['priority'] = {'id': self.priority_ids[self.priority]} + except KeyError: + elastalert_logger.error("Priority %s not found. Valid priorities are %s" % (self.priority, list(self.priority_ids.keys()))) + + def reset_jira_args(self): + self.jira_args = {'project': {'key': self.project}, + 'issuetype': {'name': self.issue_type}} + + if self.components: + # Support single component or list + if type(self.components) != list: + self.jira_args['components'] = [{'name': self.components}] + else: + self.jira_args['components'] = [{'name': component} for component in self.components] + if self.labels: + # Support single label or list + if type(self.labels) != list: + self.labels = [self.labels] + self.jira_args['labels'] = self.labels + if self.watchers: + # Support single watcher or list + if type(self.watchers) != list: + self.watchers = [self.watchers] + if self.assignee: + self.jira_args['assignee'] = {'name': self.assignee} + + self.set_priority() + + def set_jira_arg(self, jira_field, value, fields): + # Remove the jira_ part. Convert underscores to spaces + normalized_jira_field = jira_field[5:].replace('_', ' ').lower() + # All jira fields should be found in the 'id' or the 'name' field. Therefore, try both just in case + for identifier in ['name', 'id']: + field = next((f for f in fields if normalized_jira_field == f[identifier].replace('_', ' ').lower()), None) + if field: + break + if not field: + # Log a warning to ElastAlert saying that we couldn't find that type? + # OR raise and fail to load the alert entirely? Probably the latter... + raise Exception("Could not find a definition for the jira field '{0}'".format(normalized_jira_field)) + arg_name = field['id'] + # Check the schema information to decide how to set the value correctly + # If the schema information is not available, raise an exception since we don't know how to set it + # Note this is only the case for two built-in types, id: issuekey and id: thumbnail + if not ('schema' in field or 'type' in field['schema']): + raise Exception("Could not determine schema information for the jira field '{0}'".format(normalized_jira_field)) + arg_type = field['schema']['type'] + + # Handle arrays of simple types like strings or numbers + if arg_type == 'array': + # As a convenience, support the scenario wherein the user only provides + # a single value for a multi-value field e.g. jira_labels: Only_One_Label + if type(value) != list: + value = [value] + array_items = field['schema']['items'] + # Simple string types + if array_items in ['string', 'date', 'datetime']: + # Special case for multi-select custom types (the JIRA metadata says that these are strings, but + # in reality, they are required to be provided as an object. + if 'custom' in field['schema'] and field['schema']['custom'] in self.custom_string_types_with_special_handling: + self.jira_args[arg_name] = [{'value': v} for v in value] + else: + self.jira_args[arg_name] = value + elif array_items == 'number': + self.jira_args[arg_name] = [int(v) for v in value] + # Also attempt to handle arrays of complex types that have to be passed as objects with an identifier 'key' + elif array_items == 'option': + self.jira_args[arg_name] = [{'value': v} for v in value] + else: + # Try setting it as an object, using 'name' as the key + # This may not work, as the key might actually be 'key', 'id', 'value', or something else + # If it works, great! If not, it will manifest itself as an API error that will bubble up + self.jira_args[arg_name] = [{'name': v} for v in value] + # Handle non-array types + else: + # Simple string types + if arg_type in ['string', 'date', 'datetime']: + # Special case for custom types (the JIRA metadata says that these are strings, but + # in reality, they are required to be provided as an object. + if 'custom' in field['schema'] and field['schema']['custom'] in self.custom_string_types_with_special_handling: + self.jira_args[arg_name] = {'value': value} + else: + self.jira_args[arg_name] = value + # Number type + elif arg_type == 'number': + self.jira_args[arg_name] = int(value) + elif arg_type == 'option': + self.jira_args[arg_name] = {'value': value} + # Complex type + else: + self.jira_args[arg_name] = {'name': value} + + def get_arbitrary_fields(self): + # Clear jira_args + self.reset_jira_args() + + for jira_field, value in self.rule.items(): + # If we find a field that is not covered by the set that we are aware of, it means it is either: + # 1. A built-in supported field in JIRA that we don't have on our radar + # 2. A custom field that a JIRA admin has configured + if jira_field.startswith('jira_') and jira_field not in self.known_field_list and str(value)[:1] != '#': + self.set_jira_arg(jira_field, value, self.jira_fields) + if jira_field.startswith('jira_') and jira_field not in self.known_field_list and str(value)[:1] == '#': + self.deferred_settings.append(jira_field) + + def get_priorities(self): + """ Creates a mapping of priority index to id. """ + priorities = self.client.priorities() + self.priority_ids = {} + for x in range(len(priorities)): + self.priority_ids[x] = priorities[x].id + + def set_assignee(self, assignee): + self.assignee = assignee + if assignee: + self.jira_args['assignee'] = {'name': assignee} + elif 'assignee' in self.jira_args: + self.jira_args.pop('assignee') + + def find_existing_ticket(self, matches): + # Default title, get stripped search version + if 'alert_subject' not in self.rule: + title = self.create_default_title(matches, True) + else: + title = self.create_title(matches) + + if 'jira_ignore_in_title' in self.rule: + title = title.replace(matches[0].get(self.rule['jira_ignore_in_title'], ''), '') + + # This is necessary for search to work. Other special characters and dashes + # directly adjacent to words appear to be ok + title = title.replace(' - ', ' ') + title = title.replace('\\', '\\\\') + + date = (datetime.datetime.now() - datetime.timedelta(days=self.max_age)).strftime('%Y-%m-%d') + jql = 'project=%s AND summary~"%s" and created >= "%s"' % (self.project, title, date) + if self.bump_in_statuses: + jql = '%s and status in (%s)' % (jql, ','.join(["\"%s\"" % status if ' ' in status else status for status + in self.bump_in_statuses])) + if self.bump_not_in_statuses: + jql = '%s and status not in (%s)' % (jql, ','.join(["\"%s\"" % status if ' ' in status else status + for status in self.bump_not_in_statuses])) + try: + issues = self.client.search_issues(jql) + except JIRAError as e: + elastalert_logger.exception("Error while searching for JIRA ticket using jql '%s': %s" % (jql, e)) + return None + + if len(issues): + return issues[0] + + def comment_on_ticket(self, ticket, match): + text = str(JiraFormattedMatchString(self.rule, match)) + timestamp = pretty_ts(lookup_es_key(match, self.rule['timestamp_field'])) + comment = "This alert was triggered again at %s\n%s" % (timestamp, text) + self.client.add_comment(ticket, comment) + + def transition_ticket(self, ticket): + transitions = self.client.transitions(ticket) + for t in transitions: + if t['name'] == self.transition: + self.client.transition_issue(ticket, t['id']) + + def alert(self, matches): + # Reset arbitrary fields to pick up changes + self.get_arbitrary_fields() + if len(self.deferred_settings) > 0: + fields = self.client.fields() + for jira_field in self.deferred_settings: + value = lookup_es_key(matches[0], self.rule[jira_field][1:]) + self.set_jira_arg(jira_field, value, fields) + + title = self.create_title(matches) + + if self.bump_tickets: + ticket = self.find_existing_ticket(matches) + if ticket: + inactivity_datetime = ts_now() - datetime.timedelta(days=self.bump_after_inactivity) + if ts_to_dt(ticket.fields.updated) >= inactivity_datetime: + if self.pipeline is not None: + self.pipeline['jira_ticket'] = None + self.pipeline['jira_server'] = self.server + return None + elastalert_logger.info('Commenting on existing ticket %s' % (ticket.key)) + for match in matches: + try: + self.comment_on_ticket(ticket, match) + except JIRAError as e: + elastalert_logger.exception("Error while commenting on ticket %s: %s" % (ticket, e)) + if self.labels: + for label in self.labels: + try: + ticket.fields.labels.append(label) + except JIRAError as e: + elastalert_logger.exception("Error while appending labels to ticket %s: %s" % (ticket, e)) + if self.transition: + elastalert_logger.info('Transitioning existing ticket %s' % (ticket.key)) + try: + self.transition_ticket(ticket) + except JIRAError as e: + elastalert_logger.exception("Error while transitioning ticket %s: %s" % (ticket, e)) + + if self.pipeline is not None: + self.pipeline['jira_ticket'] = ticket + self.pipeline['jira_server'] = self.server + return None + if self.bump_only: + return None + + self.jira_args['summary'] = title + self.jira_args['description'] = self.create_alert_body(matches) + + try: + self.issue = self.client.create_issue(**self.jira_args) + + # You can not add watchers on initial creation. Only as a follow-up action + if self.watchers: + for watcher in self.watchers: + try: + self.client.add_watcher(self.issue.key, watcher) + except Exception as ex: + # Re-raise the exception, preserve the stack-trace, and give some + # context as to which watcher failed to be added + raise Exception( + "Exception encountered when trying to add '{0}' as a watcher. Does the user exist?\n{1}" .format( + watcher, + ex + )).with_traceback(sys.exc_info()[2]) + + except JIRAError as e: + raise EAException("Error creating JIRA ticket using jira_args (%s): %s" % (self.jira_args, e)) + elastalert_logger.info("Opened Jira ticket: %s" % (self.issue)) + + if self.pipeline is not None: + self.pipeline['jira_ticket'] = self.issue + self.pipeline['jira_server'] = self.server + + def create_alert_body(self, matches): + body = self.description + '\n' + body += self.get_aggregation_summary_text(matches) + if self.rule.get('alert_text_type') != 'aggregation_summary_only': + for match in matches: + body += str(JiraFormattedMatchString(self.rule, match)) + if len(matches) > 1: + body += '\n----------------------------------------\n' + return body + + def get_aggregation_summary_text(self, matches): + text = super(JiraAlerter, self).get_aggregation_summary_text(matches) + if text: + text = '{{noformat}}{0}{{noformat}}'.format(text) + return text + + def create_default_title(self, matches, for_search=False): + # If there is a query_key, use that in the title + + if 'query_key' in self.rule and lookup_es_key(matches[0], self.rule['query_key']): + title = 'ElastAlert: %s matched %s' % (lookup_es_key(matches[0], self.rule['query_key']), self.rule['name']) + else: + title = 'ElastAlert: %s' % (self.rule['name']) + + if for_search: + return title + + timestamp = matches[0].get(self.rule['timestamp_field']) + if timestamp: + title += ' - %s' % (pretty_ts(timestamp, self.rule.get('use_local_time'))) + + # Add count for spikes + count = matches[0].get('spike_count') + if count: + title += ' - %s+ events' % (count) + + return title + + def get_info(self): + return {'type': 'jira'} diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 7052d014e..e207f914a 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -13,8 +13,6 @@ import requests import stomp from exotel import Exotel -from jira.client import JIRA -from jira.exceptions import JIRAError from requests.auth import HTTPProxyAuth from requests.exceptions import RequestException from texttable import Texttable @@ -24,9 +22,7 @@ from .util import EAException from .util import elastalert_logger from .util import lookup_es_key -from .util import pretty_ts from .util import resolve_string -from .util import ts_now from .util import ts_to_dt from .yaml import read_yaml @@ -148,14 +144,6 @@ def __str__(self): return self.text -class JiraFormattedMatchString(BasicMatchString): - def _add_match_items(self): - match_items = dict([(x, y) for x, y in list(self.match.items()) if not x.startswith('top_events_')]) - json_blob = self._pretty_print_as_json(match_items) - preformatted_text = '{{code}}{0}{{code}}'.format(json_blob) - self.text += preformatted_text - - class Alerter(object): """ Base class for types of alerts. @@ -394,384 +382,6 @@ def get_info(self): return {'type': 'debug'} -class JiraAlerter(Alerter): - """ Creates a Jira ticket for each alert """ - required_options = frozenset(['jira_server', 'jira_account_file', 'jira_project', 'jira_issuetype']) - - # Maintain a static set of built-in fields that we explicitly know how to set - # For anything else, we will do best-effort and try to set a string value - known_field_list = [ - 'jira_account_file', - 'jira_assignee', - 'jira_bump_after_inactivity', - 'jira_bump_in_statuses', - 'jira_bump_not_in_statuses', - 'jira_bump_only', - 'jira_bump_tickets', - 'jira_component', - 'jira_components', - 'jira_description', - 'jira_ignore_in_title', - 'jira_issuetype', - 'jira_label', - 'jira_labels', - 'jira_max_age', - 'jira_priority', - 'jira_project', - 'jira_server', - 'jira_transition_to', - 'jira_watchers', - ] - - # Some built-in jira types that can be used as custom fields require special handling - # Here is a sample of one of them: - # {"id":"customfield_12807","name":"My Custom Field","custom":true,"orderable":true,"navigable":true,"searchable":true, - # "clauseNames":["cf[12807]","My Custom Field"],"schema":{"type":"array","items":"string", - # "custom":"com.atlassian.jira.plugin.system.customfieldtypes:multiselect","customId":12807}} - # There are likely others that will need to be updated on a case-by-case basis - custom_string_types_with_special_handling = [ - 'com.atlassian.jira.plugin.system.customfieldtypes:multicheckboxes', - 'com.atlassian.jira.plugin.system.customfieldtypes:multiselect', - 'com.atlassian.jira.plugin.system.customfieldtypes:radiobuttons', - ] - - def __init__(self, rule): - super(JiraAlerter, self).__init__(rule) - self.server = self.rule['jira_server'] - self.get_account(self.rule['jira_account_file']) - self.project = self.rule['jira_project'] - self.issue_type = self.rule['jira_issuetype'] - - # Deferred settings refer to values that can only be resolved when a match - # is found and as such loading them will be delayed until we find a match - self.deferred_settings = [] - - # We used to support only a single component. This allows us to maintain backwards compatibility - # while also giving the user-facing API a more representative name - self.components = self.rule.get('jira_components', self.rule.get('jira_component')) - - # We used to support only a single label. This allows us to maintain backwards compatibility - # while also giving the user-facing API a more representative name - self.labels = self.rule.get('jira_labels', self.rule.get('jira_label')) - - self.description = self.rule.get('jira_description', '') - self.assignee = self.rule.get('jira_assignee') - self.max_age = self.rule.get('jira_max_age', 30) - self.priority = self.rule.get('jira_priority') - self.bump_tickets = self.rule.get('jira_bump_tickets', False) - self.bump_not_in_statuses = self.rule.get('jira_bump_not_in_statuses') - self.bump_in_statuses = self.rule.get('jira_bump_in_statuses') - self.bump_after_inactivity = self.rule.get('jira_bump_after_inactivity', 0) - self.bump_only = self.rule.get('jira_bump_only', False) - self.transition = self.rule.get('jira_transition_to', False) - self.watchers = self.rule.get('jira_watchers') - self.client = None - - if self.bump_in_statuses and self.bump_not_in_statuses: - msg = 'Both jira_bump_in_statuses (%s) and jira_bump_not_in_statuses (%s) are set.' % \ - (','.join(self.bump_in_statuses), ','.join(self.bump_not_in_statuses)) - intersection = list(set(self.bump_in_statuses) & set(self.bump_in_statuses)) - if intersection: - msg = '%s Both have common statuses of (%s). As such, no tickets will ever be found.' % ( - msg, ','.join(intersection)) - msg += ' This should be simplified to use only one or the other.' - elastalert_logger.warning(msg) - - self.reset_jira_args() - - try: - self.client = JIRA(self.server, basic_auth=(self.user, self.password)) - self.get_priorities() - self.jira_fields = self.client.fields() - self.get_arbitrary_fields() - except JIRAError as e: - # JIRAError may contain HTML, pass along only first 1024 chars - raise EAException("Error connecting to JIRA: %s" % (str(e)[:1024])).with_traceback(sys.exc_info()[2]) - - self.set_priority() - - def set_priority(self): - try: - if self.priority is not None and self.client is not None: - self.jira_args['priority'] = {'id': self.priority_ids[self.priority]} - except KeyError: - elastalert_logger.error("Priority %s not found. Valid priorities are %s" % (self.priority, list(self.priority_ids.keys()))) - - def reset_jira_args(self): - self.jira_args = {'project': {'key': self.project}, - 'issuetype': {'name': self.issue_type}} - - if self.components: - # Support single component or list - if type(self.components) != list: - self.jira_args['components'] = [{'name': self.components}] - else: - self.jira_args['components'] = [{'name': component} for component in self.components] - if self.labels: - # Support single label or list - if type(self.labels) != list: - self.labels = [self.labels] - self.jira_args['labels'] = self.labels - if self.watchers: - # Support single watcher or list - if type(self.watchers) != list: - self.watchers = [self.watchers] - if self.assignee: - self.jira_args['assignee'] = {'name': self.assignee} - - self.set_priority() - - def set_jira_arg(self, jira_field, value, fields): - # Remove the jira_ part. Convert underscores to spaces - normalized_jira_field = jira_field[5:].replace('_', ' ').lower() - # All jira fields should be found in the 'id' or the 'name' field. Therefore, try both just in case - for identifier in ['name', 'id']: - field = next((f for f in fields if normalized_jira_field == f[identifier].replace('_', ' ').lower()), None) - if field: - break - if not field: - # Log a warning to ElastAlert saying that we couldn't find that type? - # OR raise and fail to load the alert entirely? Probably the latter... - raise Exception("Could not find a definition for the jira field '{0}'".format(normalized_jira_field)) - arg_name = field['id'] - # Check the schema information to decide how to set the value correctly - # If the schema information is not available, raise an exception since we don't know how to set it - # Note this is only the case for two built-in types, id: issuekey and id: thumbnail - if not ('schema' in field or 'type' in field['schema']): - raise Exception("Could not determine schema information for the jira field '{0}'".format(normalized_jira_field)) - arg_type = field['schema']['type'] - - # Handle arrays of simple types like strings or numbers - if arg_type == 'array': - # As a convenience, support the scenario wherein the user only provides - # a single value for a multi-value field e.g. jira_labels: Only_One_Label - if type(value) != list: - value = [value] - array_items = field['schema']['items'] - # Simple string types - if array_items in ['string', 'date', 'datetime']: - # Special case for multi-select custom types (the JIRA metadata says that these are strings, but - # in reality, they are required to be provided as an object. - if 'custom' in field['schema'] and field['schema']['custom'] in self.custom_string_types_with_special_handling: - self.jira_args[arg_name] = [{'value': v} for v in value] - else: - self.jira_args[arg_name] = value - elif array_items == 'number': - self.jira_args[arg_name] = [int(v) for v in value] - # Also attempt to handle arrays of complex types that have to be passed as objects with an identifier 'key' - elif array_items == 'option': - self.jira_args[arg_name] = [{'value': v} for v in value] - else: - # Try setting it as an object, using 'name' as the key - # This may not work, as the key might actually be 'key', 'id', 'value', or something else - # If it works, great! If not, it will manifest itself as an API error that will bubble up - self.jira_args[arg_name] = [{'name': v} for v in value] - # Handle non-array types - else: - # Simple string types - if arg_type in ['string', 'date', 'datetime']: - # Special case for custom types (the JIRA metadata says that these are strings, but - # in reality, they are required to be provided as an object. - if 'custom' in field['schema'] and field['schema']['custom'] in self.custom_string_types_with_special_handling: - self.jira_args[arg_name] = {'value': value} - else: - self.jira_args[arg_name] = value - # Number type - elif arg_type == 'number': - self.jira_args[arg_name] = int(value) - elif arg_type == 'option': - self.jira_args[arg_name] = {'value': value} - # Complex type - else: - self.jira_args[arg_name] = {'name': value} - - def get_arbitrary_fields(self): - # Clear jira_args - self.reset_jira_args() - - for jira_field, value in self.rule.items(): - # If we find a field that is not covered by the set that we are aware of, it means it is either: - # 1. A built-in supported field in JIRA that we don't have on our radar - # 2. A custom field that a JIRA admin has configured - if jira_field.startswith('jira_') and jira_field not in self.known_field_list and str(value)[:1] != '#': - self.set_jira_arg(jira_field, value, self.jira_fields) - if jira_field.startswith('jira_') and jira_field not in self.known_field_list and str(value)[:1] == '#': - self.deferred_settings.append(jira_field) - - def get_priorities(self): - """ Creates a mapping of priority index to id. """ - priorities = self.client.priorities() - self.priority_ids = {} - for x in range(len(priorities)): - self.priority_ids[x] = priorities[x].id - - def set_assignee(self, assignee): - self.assignee = assignee - if assignee: - self.jira_args['assignee'] = {'name': assignee} - elif 'assignee' in self.jira_args: - self.jira_args.pop('assignee') - - def find_existing_ticket(self, matches): - # Default title, get stripped search version - if 'alert_subject' not in self.rule: - title = self.create_default_title(matches, True) - else: - title = self.create_title(matches) - - if 'jira_ignore_in_title' in self.rule: - title = title.replace(matches[0].get(self.rule['jira_ignore_in_title'], ''), '') - - # This is necessary for search to work. Other special characters and dashes - # directly adjacent to words appear to be ok - title = title.replace(' - ', ' ') - title = title.replace('\\', '\\\\') - - date = (datetime.datetime.now() - datetime.timedelta(days=self.max_age)).strftime('%Y-%m-%d') - jql = 'project=%s AND summary~"%s" and created >= "%s"' % (self.project, title, date) - if self.bump_in_statuses: - jql = '%s and status in (%s)' % (jql, ','.join(["\"%s\"" % status if ' ' in status else status for status - in self.bump_in_statuses])) - if self.bump_not_in_statuses: - jql = '%s and status not in (%s)' % (jql, ','.join(["\"%s\"" % status if ' ' in status else status - for status in self.bump_not_in_statuses])) - try: - issues = self.client.search_issues(jql) - except JIRAError as e: - elastalert_logger.exception("Error while searching for JIRA ticket using jql '%s': %s" % (jql, e)) - return None - - if len(issues): - return issues[0] - - def comment_on_ticket(self, ticket, match): - text = str(JiraFormattedMatchString(self.rule, match)) - timestamp = pretty_ts(lookup_es_key(match, self.rule['timestamp_field'])) - comment = "This alert was triggered again at %s\n%s" % (timestamp, text) - self.client.add_comment(ticket, comment) - - def transition_ticket(self, ticket): - transitions = self.client.transitions(ticket) - for t in transitions: - if t['name'] == self.transition: - self.client.transition_issue(ticket, t['id']) - - def alert(self, matches): - # Reset arbitrary fields to pick up changes - self.get_arbitrary_fields() - if len(self.deferred_settings) > 0: - fields = self.client.fields() - for jira_field in self.deferred_settings: - value = lookup_es_key(matches[0], self.rule[jira_field][1:]) - self.set_jira_arg(jira_field, value, fields) - - title = self.create_title(matches) - - if self.bump_tickets: - ticket = self.find_existing_ticket(matches) - if ticket: - inactivity_datetime = ts_now() - datetime.timedelta(days=self.bump_after_inactivity) - if ts_to_dt(ticket.fields.updated) >= inactivity_datetime: - if self.pipeline is not None: - self.pipeline['jira_ticket'] = None - self.pipeline['jira_server'] = self.server - return None - elastalert_logger.info('Commenting on existing ticket %s' % (ticket.key)) - for match in matches: - try: - self.comment_on_ticket(ticket, match) - except JIRAError as e: - elastalert_logger.exception("Error while commenting on ticket %s: %s" % (ticket, e)) - if self.labels: - for label in self.labels: - try: - ticket.fields.labels.append(label) - except JIRAError as e: - elastalert_logger.exception("Error while appending labels to ticket %s: %s" % (ticket, e)) - if self.transition: - elastalert_logger.info('Transitioning existing ticket %s' % (ticket.key)) - try: - self.transition_ticket(ticket) - except JIRAError as e: - elastalert_logger.exception("Error while transitioning ticket %s: %s" % (ticket, e)) - - if self.pipeline is not None: - self.pipeline['jira_ticket'] = ticket - self.pipeline['jira_server'] = self.server - return None - if self.bump_only: - return None - - self.jira_args['summary'] = title - self.jira_args['description'] = self.create_alert_body(matches) - - try: - self.issue = self.client.create_issue(**self.jira_args) - - # You can not add watchers on initial creation. Only as a follow-up action - if self.watchers: - for watcher in self.watchers: - try: - self.client.add_watcher(self.issue.key, watcher) - except Exception as ex: - # Re-raise the exception, preserve the stack-trace, and give some - # context as to which watcher failed to be added - raise Exception( - "Exception encountered when trying to add '{0}' as a watcher. Does the user exist?\n{1}" .format( - watcher, - ex - )).with_traceback(sys.exc_info()[2]) - - except JIRAError as e: - raise EAException("Error creating JIRA ticket using jira_args (%s): %s" % (self.jira_args, e)) - elastalert_logger.info("Opened Jira ticket: %s" % (self.issue)) - - if self.pipeline is not None: - self.pipeline['jira_ticket'] = self.issue - self.pipeline['jira_server'] = self.server - - def create_alert_body(self, matches): - body = self.description + '\n' - body += self.get_aggregation_summary_text(matches) - if self.rule.get('alert_text_type') != 'aggregation_summary_only': - for match in matches: - body += str(JiraFormattedMatchString(self.rule, match)) - if len(matches) > 1: - body += '\n----------------------------------------\n' - return body - - def get_aggregation_summary_text(self, matches): - text = super(JiraAlerter, self).get_aggregation_summary_text(matches) - if text: - text = '{{noformat}}{0}{{noformat}}'.format(text) - return text - - def create_default_title(self, matches, for_search=False): - # If there is a query_key, use that in the title - - if 'query_key' in self.rule and lookup_es_key(matches[0], self.rule['query_key']): - title = 'ElastAlert: %s matched %s' % (lookup_es_key(matches[0], self.rule['query_key']), self.rule['name']) - else: - title = 'ElastAlert: %s' % (self.rule['name']) - - if for_search: - return title - - timestamp = matches[0].get(self.rule['timestamp_field']) - if timestamp: - title += ' - %s' % (pretty_ts(timestamp, self.rule.get('use_local_time'))) - - # Add count for spikes - count = matches[0].get('spike_count') - if count: - title += ' - %s+ events' % (count) - - return title - - def get_info(self): - return {'type': 'jira'} - - class CommandAlerter(Alerter): required_options = set(['command']) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 5a336c0ce..0ae3e9d00 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -16,6 +16,7 @@ from . import enhancements from . import ruletypes from .alerters.email import EmailAlerter +from .alerters.jira import JiraAlerter from .alerters.opsgenie import OpsGenieAlerter from .alerters.zabbix import ZabbixAlerter from .util import dt_to_ts @@ -61,7 +62,7 @@ class RulesLoader(object): # Used to map names of alerts to their classes alerts_mapping = { 'email': EmailAlerter, - 'jira': alerts.JiraAlerter, + 'jira': JiraAlerter, 'opsgenie': OpsGenieAlerter, 'stomp': alerts.StompAlerter, 'debug': alerts.DebugAlerter, diff --git a/tests/alerts_test.py b/tests/alerts_test.py index db6eba7d8..69f85c0c4 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -24,8 +24,6 @@ from elastalert.alerts import GoogleChatAlerter from elastalert.alerts import HiveAlerter from elastalert.alerts import HTTPPostAlerter -from elastalert.alerts import JiraAlerter -from elastalert.alerts import JiraFormattedMatchString from elastalert.alerts import LineNotifyAlerter from elastalert.alerts import MattermostAlerter from elastalert.alerts import MsTeamsAlerter @@ -35,6 +33,8 @@ from elastalert.alerts import SlackAlerter from elastalert.alerts import TelegramAlerter from elastalert.loaders import FileRulesLoader +from elastalert.alerters.jira import JiraAlerter +from elastalert.alerters.jira import JiraFormattedMatchString from elastalert.alerters.email import EmailAlerter from elastalert.alerters.opsgenie import OpsGenieAlerter from elastalert.alerters.zabbix import ZabbixAlerter @@ -1279,7 +1279,7 @@ def test_jira(): mock_priority = mock.Mock(id='5') - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value.priorities.return_value = [mock_priority] @@ -1310,7 +1310,7 @@ def test_jira(): # Search called if jira_bump_tickets rule['jira_bump_tickets'] = True - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value = mock.Mock() @@ -1326,7 +1326,7 @@ def test_jira(): # Remove a field if jira_ignore_in_title set rule['jira_ignore_in_title'] = 'test_term' - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value = mock.Mock() @@ -1340,7 +1340,7 @@ def test_jira(): assert 'test_value' not in mock_jira.mock_calls[3][1][0] # Issue is still created if search_issues throws an exception - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value = mock.Mock() @@ -1359,7 +1359,7 @@ def test_jira(): # Check ticket is bumped if it is updated 4 days ago mock_issue.fields.updated = str(ts_now() - datetime.timedelta(days=4)) - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value = mock.Mock() @@ -1375,7 +1375,7 @@ def test_jira(): # Check ticket is bumped is not bumped if ticket is updated right now mock_issue.fields.updated = str(ts_now()) - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value = mock.Mock() @@ -1410,7 +1410,7 @@ def test_jira(): mock_fields = [ {'name': 'affected user', 'id': 'affected_user_id', 'schema': {'type': 'string'}} ] - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value = mock.Mock() @@ -1483,7 +1483,7 @@ def test_jira_arbitrary_field_support(): }, ] - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value.priorities.return_value = [mock_priority] @@ -1524,7 +1524,7 @@ def test_jira_arbitrary_field_support(): # Reference an arbitrary string field that is not defined on the JIRA server rule['jira_nonexistent_field'] = 'nonexistent field value' - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value.priorities.return_value = [mock_priority] @@ -1540,7 +1540,7 @@ def test_jira_arbitrary_field_support(): # Reference a watcher that does not exist rule['jira_watchers'] = 'invalid_watcher' - with mock.patch('elastalert.alerts.JIRA') as mock_jira, \ + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} mock_jira.return_value.priorities.return_value = [mock_priority] From 83e18099c1c935b32ea309d851f4644ae883a126 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Tue, 18 May 2021 08:18:36 +0100 Subject: [PATCH 0223/1065] Speed up unit tests by adding default parallelism - Also update pytest to a newer version, making other changes required to enable this - On a six-core machine, the test suite now runs in 8s as opposed to ~40s - On a single-core machine, the test suite now runs in ~60s as opposed to ~85s - Have also removed one incorrect use of `raises()` which has been deprecated --- requirements-dev.txt | 3 ++- tests/base_test.py | 12 ++++++------ tests/loaders_test.py | 2 +- tox.ini | 2 +- 4 files changed, 10 insertions(+), 9 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index a64fe430a..c94d8dc36 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -5,7 +5,8 @@ m2r2 pluggy>=0.12.0 pre-commit pylint<2.9 -pytest<3.7.0 +pytest==6.0.0 +pytest-xdist==2.2.1 setuptools sphinx_rtd_theme tox==3.23.1 diff --git a/tests/base_test.py b/tests/base_test.py index b86498b1d..cbb41272a 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -1159,7 +1159,7 @@ def test_wait_until_responsive(ea): ] -def test_wait_until_responsive_timeout_es_not_available(ea, capsys): +def test_wait_until_responsive_timeout_es_not_available(ea, caplog): """Bail out if ElasticSearch doesn't (quickly) become responsive.""" # Never becomes responsive :-) @@ -1175,8 +1175,8 @@ def test_wait_until_responsive_timeout_es_not_available(ea, capsys): assert exc.value.code == 1 # Ensure we get useful diagnostics. - output, errors = capsys.readouterr() - assert 'Could not reach ElasticSearch at "es:14900".' in errors + user, level, message = caplog.record_tuples[0] + assert 'Could not reach ElasticSearch at "es:14900".' in message # Slept until we passed the deadline. sleep.mock_calls == [ @@ -1186,7 +1186,7 @@ def test_wait_until_responsive_timeout_es_not_available(ea, capsys): ] -def test_wait_until_responsive_timeout_index_does_not_exist(ea, capsys): +def test_wait_until_responsive_timeout_index_does_not_exist(ea, caplog): """Bail out if ElasticSearch doesn't (quickly) become responsive.""" # Never becomes responsive :-) @@ -1202,8 +1202,8 @@ def test_wait_until_responsive_timeout_index_does_not_exist(ea, capsys): assert exc.value.code == 1 # Ensure we get useful diagnostics. - output, errors = capsys.readouterr() - assert 'Writeback alias "wb_a" does not exist, did you run `elastalert-create-index`?' in errors + user, level, message = caplog.record_tuples[0] + assert 'Writeback alias "wb_a" does not exist, did you run `elastalert-create-index`?' in message # Slept until we passed the deadline. sleep.mock_calls == [ diff --git a/tests/loaders_test.py b/tests/loaders_test.py index be29dfa9d..c45c73ce8 100644 --- a/tests/loaders_test.py +++ b/tests/loaders_test.py @@ -347,7 +347,7 @@ def test_raises_on_missing_config(): mock_rule_open.return_value = test_rule_copy with mock.patch('os.walk') as mock_walk: mock_walk.return_value = [('', [], ['testrule.yaml'])] - with pytest.raises(EAException, message='key %s should be required' % key): + with pytest.raises(EAException): rules = load_conf(test_args) rules['rules'] = rules['rules_loader'].load(rules) diff --git a/tox.ini b/tox.ini index 47e62caa6..52a8cbbe4 100644 --- a/tox.ini +++ b/tox.ini @@ -5,7 +5,7 @@ envlist = py39,docs [testenv] deps = -rrequirements-dev.txt commands = - coverage run --source=elastalert/,tests/ -m pytest --strict {posargs} + coverage run --source=elastalert/,tests/ -m pytest --strict -n 4 {posargs} coverage report -m flake8 . From fbf9916f80f6a8a0a69a36eadb2a720e88bb7703 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Tue, 18 May 2021 08:25:16 +0100 Subject: [PATCH 0224/1065] Update changelog --- CHANGELOG.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c5baaa020..6ce888988 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,17 @@ ## Other changes - None +# Unreleased + +## Breaking changes +- None + +## New features +- None + +## Other changes +- Speed up unit tests by adding default parallelism - [164](https://github.com/jertel/elastalert2/pull/164) - @ferozsalam + # 2.1.0 ## Breaking changes From 2eb266bbe8879d258bca132c72ad50ddd276cd3b Mon Sep 17 00:00:00 2001 From: "Charest, Cedric" Date: Tue, 18 May 2021 15:10:18 -0400 Subject: [PATCH 0225/1065] Fix #166 Remove unused writeback_alias --- config.yaml.example | 1 - elastalert/config.py | 1 - elastalert/create_index.py | 5 ----- elastalert/elastalert.py | 7 +++---- elastalert/test_rule.py | 1 - tests/config_test.py | 1 - tests/conftest.py | 2 -- tests/example.config.yaml | 1 - tests/loaders_test.py | 3 +-- 9 files changed, 4 insertions(+), 18 deletions(-) diff --git a/config.yaml.example b/config.yaml.example index 6679e249b..d0e6299b4 100644 --- a/config.yaml.example +++ b/config.yaml.example @@ -66,7 +66,6 @@ es_port: 9200 # This can be a unmapped index, but it is recommended that you run # elastalert-create-index to set a mapping writeback_index: elastalert_status -writeback_alias: elastalert_alerts # If an alert fails for some reason, ElastAlert will retry # sending the alert until this time period has elapsed diff --git a/elastalert/config.py b/elastalert/config.py index 28684adc9..1c1669448 100644 --- a/elastalert/config.py +++ b/elastalert/config.py @@ -72,7 +72,6 @@ def load_conf(args, defaults=None, overwrites=None): if required_globals - frozenset(list(conf.keys())): raise EAException('%s must contain %s' % (filename, ', '.join(required_globals - frozenset(list(conf.keys()))))) - conf.setdefault('writeback_alias', 'elastalert_alerts') conf.setdefault('max_query_size', 10000) conf.setdefault('scroll_keepalive', '30s') conf.setdefault('max_scrolling_count', 0) diff --git a/elastalert/create_index.py b/elastalert/create_index.py index d18a8d913..b01c82609 100644 --- a/elastalert/create_index.py +++ b/elastalert/create_index.py @@ -161,7 +161,6 @@ def main(): parser.add_argument('--no-verify-certs', dest='verify_certs', action='store_false', help='Do not verify TLS certificates') parser.add_argument('--index', help='Index name to create') - parser.add_argument('--alias', help='Alias name to create') parser.add_argument('--old-index', help='Old index name to copy') parser.add_argument('--send_get_body_as', default='GET', help='Method for querying Elasticsearch - POST, GET or source') @@ -208,7 +207,6 @@ def main(): client_cert = data.get('client_cert') client_key = data.get('client_key') index = args.index if args.index is not None else data.get('writeback_index') - alias = args.alias if args.alias is not None else data.get('writeback_alias') old_index = args.old_index if args.old_index is not None else None else: username = args.username if args.username else None @@ -236,9 +234,6 @@ def main(): index = args.index if args.index is not None else input('New index name? (Default elastalert_status) ') if not index: index = 'elastalert_status' - alias = args.alias if args.alias is not None else input('New alias name? (Default elastalert_alerts) ') - if not alias: - alias = 'elastalert_alias' old_index = (args.old_index if args.old_index is not None else input('Name of existing index to copy? (Default None) ')) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 9a47b1916..c25d211b2 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -154,7 +154,6 @@ def __init__(self, args): self.max_query_size = self.conf['max_query_size'] self.scroll_keepalive = self.conf['scroll_keepalive'] self.writeback_index = self.conf['writeback_index'] - self.writeback_alias = self.conf['writeback_alias'] self.run_every = self.conf['run_every'] self.alert_time_limit = self.conf['alert_time_limit'] self.old_query_limit = self.conf['old_query_limit'] @@ -1233,7 +1232,7 @@ def wait_until_responsive(self, timeout, clock=timeit.default_timer): ref = clock() while (clock() - ref) < timeout: try: - if self.writeback_es.indices.exists(self.writeback_alias): + if self.writeback_es.indices.exists(self.writeback_index): return except ConnectionError: pass @@ -1241,8 +1240,8 @@ def wait_until_responsive(self, timeout, clock=timeit.default_timer): if self.writeback_es.ping(): elastalert_logger.error( - 'Writeback alias "%s" does not exist, did you run `elastalert-create-index`?', - self.writeback_alias, + 'Writeback index "%s" does not exist, did you run `elastalert-create-index`?', + self.writeback_index, ) else: elastalert_logger.error( diff --git a/elastalert/test_rule.py b/elastalert/test_rule.py index d019eadde..0797985dd 100644 --- a/elastalert/test_rule.py +++ b/elastalert/test_rule.py @@ -390,7 +390,6 @@ def run_rule_test(self): 'es_host': 'localhost', 'es_port': 14900, 'writeback_index': 'wb', - 'writeback_alias': 'wb_a', 'max_query_size': 10000, 'alert_time_limit': {'hours': 24}, 'old_query_limit': {'weeks': 1}, diff --git a/tests/config_test.py b/tests/config_test.py index 1b6a16ee5..6c03f185e 100644 --- a/tests/config_test.py +++ b/tests/config_test.py @@ -29,6 +29,5 @@ def test_config_loads(): assert conf['es_password'] == 'password_from_env' assert conf['writeback_index'] == 'elastalert_status' - assert conf['writeback_alias'] == 'elastalert_alerts' assert conf['alert_time_limit'] == datetime.timedelta(days=2) diff --git a/tests/conftest.py b/tests/conftest.py index 6844296ee..0bb0b5325 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -166,7 +166,6 @@ def ea(): 'es_host': 'es', 'es_port': 14900, 'writeback_index': 'wb', - 'writeback_alias': 'wb_a', 'rules': rules, 'max_query_size': 10000, 'old_query_limit': datetime.timedelta(weeks=1), @@ -220,7 +219,6 @@ def ea_sixsix(): 'es_host': 'es', 'es_port': 14900, 'writeback_index': writeback_index, - 'writeback_alias': 'wb_a', 'rules': rules, 'max_query_size': 10000, 'old_query_limit': datetime.timedelta(weeks=1), diff --git a/tests/example.config.yaml b/tests/example.config.yaml index 44609eb27..113a55959 100644 --- a/tests/example.config.yaml +++ b/tests/example.config.yaml @@ -13,7 +13,6 @@ es_username: elastic es_password: $ELASTIC_PASS writeback_index: elastalert_status -writeback_alias: elastalert_alerts alert_time_limit: days: 2 diff --git a/tests/loaders_test.py b/tests/loaders_test.py index e93a4f690..1bd570659 100644 --- a/tests/loaders_test.py +++ b/tests/loaders_test.py @@ -18,8 +18,7 @@ 'buffer_time': {'minutes': 10}, 'es_host': 'elasticsearch.test', 'es_port': 12345, - 'writeback_index': 'test_index', - 'writeback_alias': 'test_alias'} + 'writeback_index': 'test_index'} test_rule = {'es_host': 'test_host', 'es_port': 12345, From 8969d665eb669614149b6e2160cdc1e5b477297c Mon Sep 17 00:00:00 2001 From: "Charest, Cedric" Date: Tue, 18 May 2021 16:43:55 -0400 Subject: [PATCH 0226/1065] CHANGLOG update for #167 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6ce888988..08745f8a6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,7 @@ ## Other changes - Speed up unit tests by adding default parallelism - [164](https://github.com/jertel/elastalert2/pull/164) - @ferozsalam +- Remove unused writeback_alias and fix --patience argument [167](https://github.com/jertel/elastalert2/pull/167) - @mrfroggg. # 2.1.0 From 23c774afb68181fa9b0ffbaa9a9ba8e2fc0983b4 Mon Sep 17 00:00:00 2001 From: "Charest, Cedric" Date: Tue, 18 May 2021 16:52:11 -0400 Subject: [PATCH 0227/1065] Fix base_test assert Writeback index --- tests/base_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/base_test.py b/tests/base_test.py index cbb41272a..3ff375118 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -1203,7 +1203,7 @@ def test_wait_until_responsive_timeout_index_does_not_exist(ea, caplog): # Ensure we get useful diagnostics. user, level, message = caplog.record_tuples[0] - assert 'Writeback alias "wb_a" does not exist, did you run `elastalert-create-index`?' in message + assert 'Writeback index "wb" does not exist, did you run `elastalert-create-index`?' in message # Slept until we passed the deadline. sleep.mock_calls == [ From b56bd1f6a2c832e719607b9cd761979d833e33f8 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Wed, 19 May 2021 07:48:55 +0100 Subject: [PATCH 0228/1065] Fix bearer header initialization --- elastalert/create_index.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/elastalert/create_index.py b/elastalert/create_index.py index b01c82609..3664eb362 100644 --- a/elastalert/create_index.py +++ b/elastalert/create_index.py @@ -245,6 +245,11 @@ def main(): password=password, aws_region=aws_region, profile_name=args.profile) + + headers = {} + if bearer is not None: + headers.update({'Authorization': f'Bearer {bearer}'}) + es = Elasticsearch( host=host, port=port, @@ -253,7 +258,7 @@ def main(): verify_certs=verify_certs, connection_class=RequestsHttpConnection, http_auth=http_auth, - headers=bearer, + headers=headers, url_prefix=url_prefix, send_get_body_as=send_get_body_as, client_cert=client_cert, From e4a5f8a8bf7ce82ba917f6c2519fa353753b8a8c Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Thu, 20 May 2021 08:07:38 +0100 Subject: [PATCH 0229/1065] Update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 08745f8a6..cd53aa35a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ ## Other changes - Speed up unit tests by adding default parallelism - [164](https://github.com/jertel/elastalert2/pull/164) - @ferozsalam - Remove unused writeback_alias and fix --patience argument [167](https://github.com/jertel/elastalert2/pull/167) - @mrfroggg. +- Fix Bearer token auth in initialisation script - [169](https://github.com/jertel/elastalert2/pull/169) - @ferozsalam # 2.1.0 From 0db9e66ef63c0ef98f91f4a618440f67510373a3 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Thu, 20 May 2021 09:00:52 +0100 Subject: [PATCH 0230/1065] Migrate four more alerters out of alerts.py Migrate the following alerters: - Teams - Slack - Mattermost - AWS SNS --- elastalert/alerters/mattermost.py | 153 +++++++++++++++++++++++++++ elastalert/alerters/slack.py | 168 ++++++++++++++++++++++++++++++ elastalert/alerters/sns.py | 41 ++++++++ elastalert/alerters/teams.py | 59 +++++++++++ elastalert/loaders.py | 12 ++- 5 files changed, 429 insertions(+), 4 deletions(-) create mode 100644 elastalert/alerters/mattermost.py create mode 100644 elastalert/alerters/slack.py create mode 100644 elastalert/alerters/sns.py create mode 100644 elastalert/alerters/teams.py diff --git a/elastalert/alerters/mattermost.py b/elastalert/alerters/mattermost.py new file mode 100644 index 000000000..56f4c860c --- /dev/null +++ b/elastalert/alerters/mattermost.py @@ -0,0 +1,153 @@ +import copy +import json +import requests +import warnings + +from ..alerts import Alerter +from ..alerts import DateTimeEncoder +from ..util import elastalert_logger +from ..util import lookup_es_key +from ..util import EAException +from requests import RequestException + + +class MattermostAlerter(Alerter): + """ Creates a Mattermsot post for each alert """ + required_options = frozenset(['mattermost_webhook_url']) + + def __init__(self, rule): + super(MattermostAlerter, self).__init__(rule) + + # HTTP config + self.mattermost_webhook_url = self.rule['mattermost_webhook_url'] + if isinstance(self.mattermost_webhook_url, str): + self.mattermost_webhook_url = [self.mattermost_webhook_url] + self.mattermost_proxy = self.rule.get('mattermost_proxy', None) + self.mattermost_ignore_ssl_errors = self.rule.get('mattermost_ignore_ssl_errors', False) + + # Override webhook config + self.mattermost_username_override = self.rule.get('mattermost_username_override', 'elastalert') + self.mattermost_channel_override = self.rule.get('mattermost_channel_override', '') + self.mattermost_icon_url_override = self.rule.get('mattermost_icon_url_override', '') + + # Message properties + self.mattermost_msg_pretext = self.rule.get('mattermost_msg_pretext', '') + self.mattermost_msg_color = self.rule.get('mattermost_msg_color', 'danger') + self.mattermost_msg_fields = self.rule.get('mattermost_msg_fields', '') + self.mattermost_image_url = self.rule.get('mattermost_image_url', '') + self.mattermost_title_link = self.rule.get('mattermost_title_link', '') + self.mattermost_footer = self.rule.get('mattermost_footer', '') + self.mattermost_footer_icon = self.rule.get('mattermost_footer_icon', '') + self.mattermost_image_url = self.rule.get('mattermost_image_url', '') + self.mattermost_thumb_url = self.rule.get('mattermost_thumb_url', '') + self.mattermost_author_name = self.rule.get('mattermost_author_name', '') + self.mattermost_author_link = self.rule.get('mattermost_author_link', '') + self.mattermost_author_icon = self.rule.get('mattermost_author_icon', '') + + def get_aggregation_summary_text__maximum_width(self): + width = super(MattermostAlerter, self).get_aggregation_summary_text__maximum_width() + # Reduced maximum width for prettier Mattermost display. + return min(width, 75) + + def get_aggregation_summary_text(self, matches): + text = super(MattermostAlerter, self).get_aggregation_summary_text(matches) + if text: + text = '```\n{0}```\n'.format(text) + return text + + def populate_fields(self, matches): + alert_fields = [] + missing = self.rule.get('alert_missing_value', '') + for field in self.mattermost_msg_fields: + field = copy.copy(field) + if 'args' in field: + args_values = [lookup_es_key(matches[0], arg) or missing for arg in field['args']] + if 'value' in field: + field['value'] = field['value'].format(*args_values) + else: + field['value'] = "\n".join(str(arg) for arg in args_values) + del(field['args']) + alert_fields.append(field) + return alert_fields + + def alert(self, matches): + body = self.create_alert_body(matches) + title = self.create_title(matches) + + # post to mattermost + headers = {'content-type': 'application/json'} + # set https proxy, if it was provided + proxies = {'https': self.mattermost_proxy} if self.mattermost_proxy else None + payload = { + 'attachments': [ + { + 'fallback': "{0}: {1}".format(title, self.mattermost_msg_pretext), + 'color': self.mattermost_msg_color, + 'title': title, + 'pretext': self.mattermost_msg_pretext, + 'fields': [] + } + ] + } + + if self.rule.get('alert_text_type') == 'alert_text_only': + payload['attachments'][0]['text'] = body + else: + payload['text'] = body + + if self.mattermost_msg_fields != '': + payload['attachments'][0]['fields'] = self.populate_fields(matches) + + if self.mattermost_icon_url_override != '': + payload['icon_url'] = self.mattermost_icon_url_override + + if self.mattermost_username_override != '': + payload['username'] = self.mattermost_username_override + + if self.mattermost_channel_override != '': + payload['channel'] = self.mattermost_channel_override + + if self.mattermost_title_link != '': + payload['attachments'][0]['title_link'] = self.mattermost_title_link + + if self.mattermost_footer != '': + payload['attachments'][0]['footer'] = self.mattermost_footer + + if self.mattermost_footer_icon != '': + payload['attachments'][0]['footer_icon'] = self.mattermost_footer_icon + + if self.mattermost_image_url != '': + payload['attachments'][0]['image_url'] = self.mattermost_image_url + + if self.mattermost_thumb_url != '': + payload['attachments'][0]['thumb_url'] = self.mattermost_thumb_url + + if self.mattermost_author_name != '': + payload['attachments'][0]['author_name'] = self.mattermost_author_name + + if self.mattermost_author_link != '': + payload['attachments'][0]['author_link'] = self.mattermost_author_link + + if self.mattermost_author_icon != '': + payload['attachments'][0]['author_icon'] = self.mattermost_author_icon + + for url in self.mattermost_webhook_url: + try: + if self.mattermost_ignore_ssl_errors: + requests.urllib3.disable_warnings() + + response = requests.post( + url, data=json.dumps(payload, cls=DateTimeEncoder), + headers=headers, verify=not self.mattermost_ignore_ssl_errors, + proxies=proxies) + + warnings.resetwarnings() + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to Mattermost: %s" % e) + elastalert_logger.info("Alert sent to Mattermost") + + def get_info(self): + return {'type': 'mattermost', + 'mattermost_username_override': self.mattermost_username_override, + 'mattermost_webhook_url': self.mattermost_webhook_url} diff --git a/elastalert/alerters/slack.py b/elastalert/alerters/slack.py new file mode 100644 index 000000000..578128e95 --- /dev/null +++ b/elastalert/alerters/slack.py @@ -0,0 +1,168 @@ +import copy +import json +import requests +import warnings + +from ..alerts import Alerter +from ..alerts import DateTimeEncoder +from ..util import elastalert_logger +from ..util import lookup_es_key +from ..util import EAException +from requests.exceptions import RequestException + + +class SlackAlerter(Alerter): + """ Creates a Slack room message for each alert """ + required_options = frozenset(['slack_webhook_url']) + + def __init__(self, rule): + super(SlackAlerter, self).__init__(rule) + self.slack_webhook_url = self.rule['slack_webhook_url'] + if isinstance(self.slack_webhook_url, str): + self.slack_webhook_url = [self.slack_webhook_url] + self.slack_proxy = self.rule.get('slack_proxy', None) + self.slack_username_override = self.rule.get('slack_username_override', 'elastalert') + self.slack_channel_override = self.rule.get('slack_channel_override', '') + if isinstance(self.slack_channel_override, str): + self.slack_channel_override = [self.slack_channel_override] + self.slack_title_link = self.rule.get('slack_title_link', '') + self.slack_title = self.rule.get('slack_title', '') + self.slack_emoji_override = self.rule.get('slack_emoji_override', ':ghost:') + self.slack_icon_url_override = self.rule.get('slack_icon_url_override', '') + self.slack_msg_color = self.rule.get('slack_msg_color', 'danger') + self.slack_parse_override = self.rule.get('slack_parse_override', 'none') + self.slack_text_string = self.rule.get('slack_text_string', '') + self.slack_alert_fields = self.rule.get('slack_alert_fields', '') + self.slack_ignore_ssl_errors = self.rule.get('slack_ignore_ssl_errors', False) + self.slack_timeout = self.rule.get('slack_timeout', 10) + self.slack_ca_certs = self.rule.get('slack_ca_certs') + self.slack_attach_kibana_discover_url = self.rule.get('slack_attach_kibana_discover_url', False) + self.slack_kibana_discover_color = self.rule.get('slack_kibana_discover_color', '#ec4b98') + self.slack_kibana_discover_title = self.rule.get('slack_kibana_discover_title', 'Discover in Kibana') + self.slack_footer = self.rule.get('slack_footer', '') + self.slack_footer_icon = self.rule.get('slack_footer_icon', '') + self.slack_image_url = self.rule.get('slack_image_url', '') + self.slack_thumb_url = self.rule.get('slack_thumb_url', '') + self.slack_author_name = self.rule.get('slack_author_name', '') + self.slack_author_link = self.rule.get('slack_author_link', '') + self.slack_author_icon = self.rule.get('slack_author_icon', '') + self.slack_msg_pretext = self.rule.get('slack_msg_pretext', '') + + def format_body(self, body): + # https://api.slack.com/docs/formatting + return body + + def get_aggregation_summary_text__maximum_width(self): + width = super(SlackAlerter, self).get_aggregation_summary_text__maximum_width() + # Reduced maximum width for prettier Slack display. + return min(width, 75) + + def get_aggregation_summary_text(self, matches): + text = super(SlackAlerter, self).get_aggregation_summary_text(matches) + if text: + text = '```\n{0}```\n'.format(text) + return text + + def populate_fields(self, matches): + alert_fields = [] + for arg in self.slack_alert_fields: + arg = copy.copy(arg) + arg['value'] = lookup_es_key(matches[0], arg['value']) + alert_fields.append(arg) + return alert_fields + + def alert(self, matches): + body = self.create_alert_body(matches) + + body = self.format_body(body) + # post to slack + headers = {'content-type': 'application/json'} + # set https proxy, if it was provided + proxies = {'https': self.slack_proxy} if self.slack_proxy else None + payload = { + 'username': self.slack_username_override, + 'parse': self.slack_parse_override, + 'text': self.slack_text_string, + 'attachments': [ + { + 'color': self.slack_msg_color, + 'title': self.create_title(matches), + 'text': body, + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ] + } + + # if we have defined fields, populate noteable fields for the alert + if self.slack_alert_fields != '': + payload['attachments'][0]['fields'] = self.populate_fields(matches) + + if self.slack_icon_url_override != '': + payload['icon_url'] = self.slack_icon_url_override + else: + payload['icon_emoji'] = self.slack_emoji_override + + if self.slack_title != '': + payload['attachments'][0]['title'] = self.slack_title + + if self.slack_title_link != '': + payload['attachments'][0]['title_link'] = self.slack_title_link + + if self.slack_footer != '': + payload['attachments'][0]['footer'] = self.slack_footer + + if self.slack_footer_icon != '': + payload['attachments'][0]['footer_icon'] = self.slack_footer_icon + + if self.slack_image_url != '': + payload['attachments'][0]['image_url'] = self.slack_image_url + + if self.slack_thumb_url != '': + payload['attachments'][0]['thumb_url'] = self.slack_thumb_url + + if self.slack_author_name != '': + payload['attachments'][0]['author_name'] = self.slack_author_name + + if self.slack_author_link != '': + payload['attachments'][0]['author_link'] = self.slack_author_link + + if self.slack_author_icon != '': + payload['attachments'][0]['author_icon'] = self.slack_author_icon + + if self.slack_msg_pretext != '': + payload['attachments'][0]['pretext'] = self.slack_msg_pretext + + if self.slack_attach_kibana_discover_url: + kibana_discover_url = lookup_es_key(matches[0], 'kibana_discover_url') + if kibana_discover_url: + payload['attachments'].append({ + 'color': self.slack_kibana_discover_color, + 'title': self.slack_kibana_discover_title, + 'title_link': kibana_discover_url + }) + + for url in self.slack_webhook_url: + for channel_override in self.slack_channel_override: + try: + if self.slack_ca_certs: + verify = self.slack_ca_certs + else: + verify = not self.slack_ignore_ssl_errors + if self.slack_ignore_ssl_errors: + requests.packages.urllib3.disable_warnings() + payload['channel'] = channel_override + response = requests.post( + url, data=json.dumps(payload, cls=DateTimeEncoder), + headers=headers, verify=verify, + proxies=proxies, + timeout=self.slack_timeout) + warnings.resetwarnings() + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to slack: %s" % e) + elastalert_logger.info("Alert '%s' sent to Slack" % self.rule['name']) + + def get_info(self): + return {'type': 'slack', + 'slack_username_override': self.slack_username_override} diff --git a/elastalert/alerters/sns.py b/elastalert/alerters/sns.py new file mode 100644 index 000000000..06bc7b835 --- /dev/null +++ b/elastalert/alerters/sns.py @@ -0,0 +1,41 @@ +import boto3 + +from ..alerts import Alerter +from ..util import elastalert_logger + + +class SnsAlerter(Alerter): + """ Send alert using AWS SNS service """ + required_options = frozenset(['sns_topic_arn']) + + def __init__(self, *args): + super(SnsAlerter, self).__init__(*args) + self.sns_topic_arn = self.rule.get('sns_topic_arn', '') + self.sns_aws_access_key_id = self.rule.get('sns_aws_access_key_id') + self.sns_aws_secret_access_key = self.rule.get('sns_aws_secret_access_key') + self.sns_aws_region = self.rule.get('sns_aws_region', 'us-east-1') + self.profile = self.rule.get('sns_aws_profile', None) + + def create_default_title(self, matches): + subject = 'ElastAlert: %s' % (self.rule['name']) + return subject + + def alert(self, matches): + body = self.create_alert_body(matches) + + if self.profile is None: + session = boto3.Session( + aws_access_key_id=self.sns_aws_access_key_id, + aws_secret_access_key=self.sns_aws_access_key_id, + region_name=self.sns_aws_region + ) + else: + session = boto3.Session(profile_name=self.profile) + + sns_client = session.client('sns') + sns_client.publish( + TopicArn=self.sns_topic_arn, + Message=body, + Subject=self.create_title(matches) + ) + elastalert_logger.info("Sent sns notification to %s" % (self.sns_topic_arn)) diff --git a/elastalert/alerters/teams.py b/elastalert/alerters/teams.py new file mode 100644 index 000000000..c242982a9 --- /dev/null +++ b/elastalert/alerters/teams.py @@ -0,0 +1,59 @@ +import json +import requests + +from ..alerts import Alerter +from ..alerts import DateTimeEncoder +from ..util import EAException +from ..util import elastalert_logger +from requests.exceptions import RequestException + + +class MsTeamsAlerter(Alerter): + """ Creates a Microsoft Teams Conversation Message for each alert """ + required_options = frozenset(['ms_teams_webhook_url', 'ms_teams_alert_summary']) + + def __init__(self, rule): + super(MsTeamsAlerter, self).__init__(rule) + self.ms_teams_webhook_url = self.rule['ms_teams_webhook_url'] + if isinstance(self.ms_teams_webhook_url, str): + self.ms_teams_webhook_url = [self.ms_teams_webhook_url] + self.ms_teams_proxy = self.rule.get('ms_teams_proxy', None) + self.ms_teams_alert_summary = self.rule.get('ms_teams_alert_summary', 'ElastAlert Message') + self.ms_teams_alert_fixed_width = self.rule.get('ms_teams_alert_fixed_width', False) + self.ms_teams_theme_color = self.rule.get('ms_teams_theme_color', '') + + def format_body(self, body): + if self.ms_teams_alert_fixed_width: + body = body.replace('`', "'") + body = "```{0}```".format('```\n\n```'.join(x for x in body.split('\n'))).replace('\n``````', '') + return body + + def alert(self, matches): + body = self.create_alert_body(matches) + + body = self.format_body(body) + # post to Teams + headers = {'content-type': 'application/json'} + # set https proxy, if it was provided + proxies = {'https': self.ms_teams_proxy} if self.ms_teams_proxy else None + payload = { + '@type': 'MessageCard', + '@context': 'http://schema.org/extensions', + 'summary': self.ms_teams_alert_summary, + 'title': self.create_title(matches), + 'text': body + } + if self.ms_teams_theme_color != '': + payload['themeColor'] = self.ms_teams_theme_color + + for url in self.ms_teams_webhook_url: + try: + response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies) + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to ms teams: %s" % e) + elastalert_logger.info("Alert sent to MS Teams") + + def get_info(self): + return {'type': 'ms_teams', + 'ms_teams_webhook_url': self.ms_teams_webhook_url} diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 0ae3e9d00..5898866d2 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -17,7 +17,11 @@ from . import ruletypes from .alerters.email import EmailAlerter from .alerters.jira import JiraAlerter +from .alerters.mattermost import MattermostAlerter from .alerters.opsgenie import OpsGenieAlerter +from .alerters.teams import MsTeamsAlerter +from .alerters.slack import SlackAlerter +from .alerters.sns import SnsAlerter from .alerters.zabbix import ZabbixAlerter from .util import dt_to_ts from .util import dt_to_ts_with_format @@ -67,10 +71,10 @@ class RulesLoader(object): 'stomp': alerts.StompAlerter, 'debug': alerts.DebugAlerter, 'command': alerts.CommandAlerter, - 'sns': alerts.SnsAlerter, - 'ms_teams': alerts.MsTeamsAlerter, - 'slack': alerts.SlackAlerter, - 'mattermost': alerts.MattermostAlerter, + 'sns': SnsAlerter, + 'ms_teams': MsTeamsAlerter, + 'slack': SlackAlerter, + 'mattermost': MattermostAlerter, 'pagerduty': alerts.PagerDutyAlerter, 'exotel': alerts.ExotelAlerter, 'twilio': alerts.TwilioAlerter, From e4f4909d4897414658a0a36f7eb48572559f2bdd Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Thu, 20 May 2021 09:28:52 +0100 Subject: [PATCH 0231/1065] Remove migrated alerters from alerts.py --- elastalert/alerts.py | 388 ------------------------------------------- tests/alerts_test.py | 6 +- 2 files changed, 3 insertions(+), 391 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index e207f914a..f059cfbbd 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -425,394 +425,6 @@ def get_info(self): 'command': ' '.join(self.last_command)} -class SnsAlerter(Alerter): - """ Send alert using AWS SNS service """ - required_options = frozenset(['sns_topic_arn']) - - def __init__(self, *args): - super(SnsAlerter, self).__init__(*args) - self.sns_topic_arn = self.rule.get('sns_topic_arn', '') - self.sns_aws_access_key_id = self.rule.get('sns_aws_access_key_id') - self.sns_aws_secret_access_key = self.rule.get('sns_aws_secret_access_key') - self.sns_aws_region = self.rule.get('sns_aws_region', 'us-east-1') - self.profile = self.rule.get('boto_profile', None) # Deprecated - self.profile = self.rule.get('sns_aws_profile', None) - - def create_default_title(self, matches): - subject = 'ElastAlert: %s' % (self.rule['name']) - return subject - - def alert(self, matches): - body = self.create_alert_body(matches) - - if self.profile is None: - session = boto3.Session( - aws_access_key_id=self.sns_aws_access_key_id, - aws_secret_access_key=self.sns_aws_access_key_id, - region_name=self.sns_aws_region - ) - else: - session = boto3.Session(profile_name=self.profile) - - sns_client = session.client('sns') - sns_client.publish( - TopicArn=self.sns_topic_arn, - Message=body, - Subject=self.create_title(matches) - ) - elastalert_logger.info("Sent sns notification to %s" % (self.sns_topic_arn)) - - -class MsTeamsAlerter(Alerter): - """ Creates a Microsoft Teams Conversation Message for each alert """ - required_options = frozenset(['ms_teams_webhook_url', 'ms_teams_alert_summary']) - - def __init__(self, rule): - super(MsTeamsAlerter, self).__init__(rule) - self.ms_teams_webhook_url = self.rule['ms_teams_webhook_url'] - if isinstance(self.ms_teams_webhook_url, str): - self.ms_teams_webhook_url = [self.ms_teams_webhook_url] - self.ms_teams_proxy = self.rule.get('ms_teams_proxy', None) - self.ms_teams_alert_summary = self.rule.get('ms_teams_alert_summary', 'ElastAlert Message') - self.ms_teams_alert_fixed_width = self.rule.get('ms_teams_alert_fixed_width', False) - self.ms_teams_theme_color = self.rule.get('ms_teams_theme_color', '') - - def format_body(self, body): - if self.ms_teams_alert_fixed_width: - body = body.replace('`', "'") - body = "```{0}```".format('```\n\n```'.join(x for x in body.split('\n'))).replace('\n``````', '') - return body - - def alert(self, matches): - body = self.create_alert_body(matches) - - body = self.format_body(body) - # post to Teams - headers = {'content-type': 'application/json'} - # set https proxy, if it was provided - proxies = {'https': self.ms_teams_proxy} if self.ms_teams_proxy else None - payload = { - '@type': 'MessageCard', - '@context': 'http://schema.org/extensions', - 'summary': self.ms_teams_alert_summary, - 'title': self.create_title(matches), - 'text': body - } - if self.ms_teams_theme_color != '': - payload['themeColor'] = self.ms_teams_theme_color - - for url in self.ms_teams_webhook_url: - try: - response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies) - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to ms teams: %s" % e) - elastalert_logger.info("Alert sent to MS Teams") - - def get_info(self): - return {'type': 'ms_teams', - 'ms_teams_webhook_url': self.ms_teams_webhook_url} - - -class SlackAlerter(Alerter): - """ Creates a Slack room message for each alert """ - required_options = frozenset(['slack_webhook_url']) - - def __init__(self, rule): - super(SlackAlerter, self).__init__(rule) - self.slack_webhook_url = self.rule['slack_webhook_url'] - if isinstance(self.slack_webhook_url, str): - self.slack_webhook_url = [self.slack_webhook_url] - self.slack_proxy = self.rule.get('slack_proxy', None) - self.slack_username_override = self.rule.get('slack_username_override', 'elastalert') - self.slack_channel_override = self.rule.get('slack_channel_override', '') - if isinstance(self.slack_channel_override, str): - self.slack_channel_override = [self.slack_channel_override] - self.slack_title_link = self.rule.get('slack_title_link', '') - self.slack_title = self.rule.get('slack_title', '') - self.slack_emoji_override = self.rule.get('slack_emoji_override', ':ghost:') - self.slack_icon_url_override = self.rule.get('slack_icon_url_override', '') - self.slack_msg_color = self.rule.get('slack_msg_color', 'danger') - self.slack_parse_override = self.rule.get('slack_parse_override', 'none') - self.slack_text_string = self.rule.get('slack_text_string', '') - self.slack_alert_fields = self.rule.get('slack_alert_fields', '') - self.slack_ignore_ssl_errors = self.rule.get('slack_ignore_ssl_errors', False) - self.slack_timeout = self.rule.get('slack_timeout', 10) - self.slack_ca_certs = self.rule.get('slack_ca_certs') - self.slack_attach_kibana_discover_url = self.rule.get('slack_attach_kibana_discover_url', False) - self.slack_kibana_discover_color = self.rule.get('slack_kibana_discover_color', '#ec4b98') - self.slack_kibana_discover_title = self.rule.get('slack_kibana_discover_title', 'Discover in Kibana') - self.slack_footer = self.rule.get('slack_footer', '') - self.slack_footer_icon = self.rule.get('slack_footer_icon', '') - self.slack_image_url = self.rule.get('slack_image_url', '') - self.slack_thumb_url = self.rule.get('slack_thumb_url', '') - self.slack_author_name = self.rule.get('slack_author_name', '') - self.slack_author_link = self.rule.get('slack_author_link', '') - self.slack_author_icon = self.rule.get('slack_author_icon', '') - self.slack_msg_pretext = self.rule.get('slack_msg_pretext', '') - - def format_body(self, body): - # https://api.slack.com/docs/formatting - return body - - def get_aggregation_summary_text__maximum_width(self): - width = super(SlackAlerter, self).get_aggregation_summary_text__maximum_width() - # Reduced maximum width for prettier Slack display. - return min(width, 75) - - def get_aggregation_summary_text(self, matches): - text = super(SlackAlerter, self).get_aggregation_summary_text(matches) - if text: - text = '```\n{0}```\n'.format(text) - return text - - def populate_fields(self, matches): - alert_fields = [] - for arg in self.slack_alert_fields: - arg = copy.copy(arg) - arg['value'] = lookup_es_key(matches[0], arg['value']) - alert_fields.append(arg) - return alert_fields - - def alert(self, matches): - body = self.create_alert_body(matches) - - body = self.format_body(body) - # post to slack - headers = {'content-type': 'application/json'} - # set https proxy, if it was provided - proxies = {'https': self.slack_proxy} if self.slack_proxy else None - payload = { - 'username': self.slack_username_override, - 'parse': self.slack_parse_override, - 'text': self.slack_text_string, - 'attachments': [ - { - 'color': self.slack_msg_color, - 'title': self.create_title(matches), - 'text': body, - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ] - } - - # if we have defined fields, populate noteable fields for the alert - if self.slack_alert_fields != '': - payload['attachments'][0]['fields'] = self.populate_fields(matches) - - if self.slack_icon_url_override != '': - payload['icon_url'] = self.slack_icon_url_override - else: - payload['icon_emoji'] = self.slack_emoji_override - - if self.slack_title != '': - payload['attachments'][0]['title'] = self.slack_title - - if self.slack_title_link != '': - payload['attachments'][0]['title_link'] = self.slack_title_link - - if self.slack_footer != '': - payload['attachments'][0]['footer'] = self.slack_footer - - if self.slack_footer_icon != '': - payload['attachments'][0]['footer_icon'] = self.slack_footer_icon - - if self.slack_image_url != '': - payload['attachments'][0]['image_url'] = self.slack_image_url - - if self.slack_thumb_url != '': - payload['attachments'][0]['thumb_url'] = self.slack_thumb_url - - if self.slack_author_name != '': - payload['attachments'][0]['author_name'] = self.slack_author_name - - if self.slack_author_link != '': - payload['attachments'][0]['author_link'] = self.slack_author_link - - if self.slack_author_icon != '': - payload['attachments'][0]['author_icon'] = self.slack_author_icon - - if self.slack_msg_pretext != '': - payload['attachments'][0]['pretext'] = self.slack_msg_pretext - - if self.slack_attach_kibana_discover_url: - kibana_discover_url = lookup_es_key(matches[0], 'kibana_discover_url') - if kibana_discover_url: - payload['attachments'].append({ - 'color': self.slack_kibana_discover_color, - 'title': self.slack_kibana_discover_title, - 'title_link': kibana_discover_url - }) - - for url in self.slack_webhook_url: - for channel_override in self.slack_channel_override: - try: - if self.slack_ca_certs: - verify = self.slack_ca_certs - else: - verify = not self.slack_ignore_ssl_errors - if self.slack_ignore_ssl_errors: - requests.packages.urllib3.disable_warnings() - payload['channel'] = channel_override - response = requests.post( - url, data=json.dumps(payload, cls=DateTimeEncoder), - headers=headers, verify=verify, - proxies=proxies, - timeout=self.slack_timeout) - warnings.resetwarnings() - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to slack: %s" % e) - elastalert_logger.info("Alert '%s' sent to Slack" % self.rule['name']) - - def get_info(self): - return {'type': 'slack', - 'slack_username_override': self.slack_username_override} - - -class MattermostAlerter(Alerter): - """ Creates a Mattermsot post for each alert """ - required_options = frozenset(['mattermost_webhook_url']) - - def __init__(self, rule): - super(MattermostAlerter, self).__init__(rule) - - # HTTP config - self.mattermost_webhook_url = self.rule['mattermost_webhook_url'] - if isinstance(self.mattermost_webhook_url, str): - self.mattermost_webhook_url = [self.mattermost_webhook_url] - self.mattermost_proxy = self.rule.get('mattermost_proxy', None) - self.mattermost_ignore_ssl_errors = self.rule.get('mattermost_ignore_ssl_errors', False) - - # Override webhook config - self.mattermost_username_override = self.rule.get('mattermost_username_override', 'elastalert') - self.mattermost_channel_override = self.rule.get('mattermost_channel_override', '') - self.mattermost_icon_url_override = self.rule.get('mattermost_icon_url_override', '') - - # Message properties - self.mattermost_msg_pretext = self.rule.get('mattermost_msg_pretext', '') - self.mattermost_msg_color = self.rule.get('mattermost_msg_color', 'danger') - self.mattermost_msg_fields = self.rule.get('mattermost_msg_fields', '') - self.mattermost_image_url = self.rule.get('mattermost_image_url', '') - self.mattermost_title_link = self.rule.get('mattermost_title_link', '') - self.mattermost_footer = self.rule.get('mattermost_footer', '') - self.mattermost_footer_icon = self.rule.get('mattermost_footer_icon', '') - self.mattermost_image_url = self.rule.get('mattermost_image_url', '') - self.mattermost_thumb_url = self.rule.get('mattermost_thumb_url', '') - self.mattermost_author_name = self.rule.get('mattermost_author_name', '') - self.mattermost_author_link = self.rule.get('mattermost_author_link', '') - self.mattermost_author_icon = self.rule.get('mattermost_author_icon', '') - - def get_aggregation_summary_text__maximum_width(self): - width = super(MattermostAlerter, self).get_aggregation_summary_text__maximum_width() - # Reduced maximum width for prettier Mattermost display. - return min(width, 75) - - def get_aggregation_summary_text(self, matches): - text = super(MattermostAlerter, self).get_aggregation_summary_text(matches) - if text: - text = '```\n{0}```\n'.format(text) - return text - - def populate_fields(self, matches): - alert_fields = [] - missing = self.rule.get('alert_missing_value', '') - for field in self.mattermost_msg_fields: - field = copy.copy(field) - if 'args' in field: - args_values = [lookup_es_key(matches[0], arg) or missing for arg in field['args']] - if 'value' in field: - field['value'] = field['value'].format(*args_values) - else: - field['value'] = "\n".join(str(arg) for arg in args_values) - del(field['args']) - alert_fields.append(field) - return alert_fields - - def alert(self, matches): - body = self.create_alert_body(matches) - title = self.create_title(matches) - - # post to mattermost - headers = {'content-type': 'application/json'} - # set https proxy, if it was provided - proxies = {'https': self.mattermost_proxy} if self.mattermost_proxy else None - payload = { - 'attachments': [ - { - 'fallback': "{0}: {1}".format(title, self.mattermost_msg_pretext), - 'color': self.mattermost_msg_color, - 'title': title, - 'pretext': self.mattermost_msg_pretext, - 'fields': [] - } - ] - } - - if self.rule.get('alert_text_type') == 'alert_text_only': - payload['attachments'][0]['text'] = body - else: - payload['text'] = body - - if self.mattermost_msg_fields != '': - payload['attachments'][0]['fields'] = self.populate_fields(matches) - - if self.mattermost_icon_url_override != '': - payload['icon_url'] = self.mattermost_icon_url_override - - if self.mattermost_username_override != '': - payload['username'] = self.mattermost_username_override - - if self.mattermost_channel_override != '': - payload['channel'] = self.mattermost_channel_override - - if self.mattermost_title_link != '': - payload['attachments'][0]['title_link'] = self.mattermost_title_link - - if self.mattermost_footer != '': - payload['attachments'][0]['footer'] = self.mattermost_footer - - if self.mattermost_footer_icon != '': - payload['attachments'][0]['footer_icon'] = self.mattermost_footer_icon - - if self.mattermost_image_url != '': - payload['attachments'][0]['image_url'] = self.mattermost_image_url - - if self.mattermost_thumb_url != '': - payload['attachments'][0]['thumb_url'] = self.mattermost_thumb_url - - if self.mattermost_author_name != '': - payload['attachments'][0]['author_name'] = self.mattermost_author_name - - if self.mattermost_author_link != '': - payload['attachments'][0]['author_link'] = self.mattermost_author_link - - if self.mattermost_author_icon != '': - payload['attachments'][0]['author_icon'] = self.mattermost_author_icon - - for url in self.mattermost_webhook_url: - try: - if self.mattermost_ignore_ssl_errors: - requests.urllib3.disable_warnings() - - response = requests.post( - url, data=json.dumps(payload, cls=DateTimeEncoder), - headers=headers, verify=not self.mattermost_ignore_ssl_errors, - proxies=proxies) - - warnings.resetwarnings() - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to Mattermost: %s" % e) - elastalert_logger.info("Alert sent to Mattermost") - - def get_info(self): - return {'type': 'mattermost', - 'mattermost_username_override': self.mattermost_username_override, - 'mattermost_webhook_url': self.mattermost_webhook_url} - - class PagerDutyAlerter(Alerter): """ Create an incident on PagerDuty for each alert """ required_options = frozenset(['pagerduty_service_key', 'pagerduty_client_name']) diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 69f85c0c4..ccbdc8842 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -25,18 +25,18 @@ from elastalert.alerts import HiveAlerter from elastalert.alerts import HTTPPostAlerter from elastalert.alerts import LineNotifyAlerter -from elastalert.alerts import MattermostAlerter -from elastalert.alerts import MsTeamsAlerter from elastalert.alerts import PagerDutyAlerter from elastalert.alerts import PagerTreeAlerter from elastalert.alerts import ServiceNowAlerter -from elastalert.alerts import SlackAlerter from elastalert.alerts import TelegramAlerter from elastalert.loaders import FileRulesLoader from elastalert.alerters.jira import JiraAlerter from elastalert.alerters.jira import JiraFormattedMatchString from elastalert.alerters.email import EmailAlerter +from elastalert.alerters.mattermost import MattermostAlerter from elastalert.alerters.opsgenie import OpsGenieAlerter +from elastalert.alerters.slack import SlackAlerter +from elastalert.alerters.teams import MsTeamsAlerter from elastalert.alerters.zabbix import ZabbixAlerter from elastalert.alerts import VictorOpsAlerter from elastalert.util import ts_add From 918c9bcb39d90d8c23aa6add003802fa210e86d3 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Fri, 21 May 2021 08:03:42 +0100 Subject: [PATCH 0232/1065] Migrate the PagerDuty alerter --- elastalert/alerts.py | 141 ------------------------------------------ elastalert/loaders.py | 3 +- tests/alerts_test.py | 2 +- 3 files changed, 3 insertions(+), 143 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index f059cfbbd..4605757d8 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -425,147 +425,6 @@ def get_info(self): 'command': ' '.join(self.last_command)} -class PagerDutyAlerter(Alerter): - """ Create an incident on PagerDuty for each alert """ - required_options = frozenset(['pagerduty_service_key', 'pagerduty_client_name']) - - def __init__(self, rule): - super(PagerDutyAlerter, self).__init__(rule) - self.pagerduty_service_key = self.rule['pagerduty_service_key'] - self.pagerduty_client_name = self.rule['pagerduty_client_name'] - self.pagerduty_incident_key = self.rule.get('pagerduty_incident_key', '') - self.pagerduty_incident_key_args = self.rule.get('pagerduty_incident_key_args', None) - self.pagerduty_event_type = self.rule.get('pagerduty_event_type', 'trigger') - self.pagerduty_proxy = self.rule.get('pagerduty_proxy', None) - - self.pagerduty_api_version = self.rule.get('pagerduty_api_version', 'v1') - self.pagerduty_v2_payload_class = self.rule.get('pagerduty_v2_payload_class', '') - self.pagerduty_v2_payload_class_args = self.rule.get('pagerduty_v2_payload_class_args', None) - self.pagerduty_v2_payload_component = self.rule.get('pagerduty_v2_payload_component', '') - self.pagerduty_v2_payload_component_args = self.rule.get('pagerduty_v2_payload_component_args', None) - self.pagerduty_v2_payload_group = self.rule.get('pagerduty_v2_payload_group', '') - self.pagerduty_v2_payload_group_args = self.rule.get('pagerduty_v2_payload_group_args', None) - self.pagerduty_v2_payload_severity = self.rule.get('pagerduty_v2_payload_severity', 'critical') - self.pagerduty_v2_payload_source = self.rule.get('pagerduty_v2_payload_source', 'ElastAlert') - self.pagerduty_v2_payload_source_args = self.rule.get('pagerduty_v2_payload_source_args', None) - self.pagerduty_v2_payload_custom_details = self.rule.get('pagerduty_v2_payload_custom_details', {}) - self.pagerduty_v2_payload_include_all_info = self.rule.get('pagerduty_v2_payload_include_all_info', True) - - if self.pagerduty_api_version == 'v2': - self.url = 'https://events.pagerduty.com/v2/enqueue' - else: - self.url = 'https://events.pagerduty.com/generic/2010-04-15/create_event.json' - - def alert(self, matches): - body = self.create_alert_body(matches) - - # post to pagerduty - headers = {'content-type': 'application/json'} - if self.pagerduty_api_version == 'v2': - - custom_details_payload = {'information': body} if self.pagerduty_v2_payload_include_all_info else {} - if self.pagerduty_v2_payload_custom_details: - for match in matches: - for custom_details_key, es_key in list(self.pagerduty_v2_payload_custom_details.items()): - custom_details_payload[custom_details_key] = lookup_es_key(match, es_key) - - payload = { - 'routing_key': self.pagerduty_service_key, - 'event_action': self.pagerduty_event_type, - 'dedup_key': self.get_incident_key(matches), - 'client': self.pagerduty_client_name, - 'payload': { - 'class': self.resolve_formatted_key(self.pagerduty_v2_payload_class, - self.pagerduty_v2_payload_class_args, - matches), - 'component': self.resolve_formatted_key(self.pagerduty_v2_payload_component, - self.pagerduty_v2_payload_component_args, - matches), - 'group': self.resolve_formatted_key(self.pagerduty_v2_payload_group, - self.pagerduty_v2_payload_group_args, - matches), - 'severity': self.pagerduty_v2_payload_severity, - 'source': self.resolve_formatted_key(self.pagerduty_v2_payload_source, - self.pagerduty_v2_payload_source_args, - matches), - 'summary': self.create_title(matches), - 'custom_details': custom_details_payload, - }, - } - match_timestamp = lookup_es_key(matches[0], self.rule.get('timestamp_field', '@timestamp')) - if match_timestamp: - payload['payload']['timestamp'] = match_timestamp - else: - payload = { - 'service_key': self.pagerduty_service_key, - 'description': self.create_title(matches), - 'event_type': self.pagerduty_event_type, - 'incident_key': self.get_incident_key(matches), - 'client': self.pagerduty_client_name, - 'details': { - "information": body, - }, - } - - # set https proxy, if it was provided - proxies = {'https': self.pagerduty_proxy} if self.pagerduty_proxy else None - try: - response = requests.post( - self.url, - data=json.dumps(payload, cls=DateTimeEncoder, ensure_ascii=False), - headers=headers, - proxies=proxies - ) - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to pagerduty: %s" % e) - - if self.pagerduty_event_type == 'trigger': - elastalert_logger.info("Trigger sent to PagerDuty") - elif self.pagerduty_event_type == 'resolve': - elastalert_logger.info("Resolve sent to PagerDuty") - elif self.pagerduty_event_type == 'acknowledge': - elastalert_logger.info("acknowledge sent to PagerDuty") - - def resolve_formatted_key(self, key, args, matches): - if args: - key_values = [lookup_es_key(matches[0], arg) for arg in args] - - # Populate values with rule level properties too - for i in range(len(key_values)): - if key_values[i] is None: - key_value = self.rule.get(args[i]) - if key_value: - key_values[i] = key_value - - missing = self.rule.get('alert_missing_value', '') - key_values = [missing if val is None else val for val in key_values] - return key.format(*key_values) - else: - return key - - def get_incident_key(self, matches): - if self.pagerduty_incident_key_args: - incident_key_values = [lookup_es_key(matches[0], arg) for arg in self.pagerduty_incident_key_args] - - # Populate values with rule level properties too - for i in range(len(incident_key_values)): - if incident_key_values[i] is None: - key_value = self.rule.get(self.pagerduty_incident_key_args[i]) - if key_value: - incident_key_values[i] = key_value - - missing = self.rule.get('alert_missing_value', '') - incident_key_values = [missing if val is None else val for val in incident_key_values] - return self.pagerduty_incident_key.format(*incident_key_values) - else: - return self.pagerduty_incident_key - - def get_info(self): - return {'type': 'pagerduty', - 'pagerduty_client_name': self.pagerduty_client_name} - - class PagerTreeAlerter(Alerter): """ Creates a PagerTree Incident for each alert """ required_options = frozenset(['pagertree_integration_url']) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 5898866d2..99d0db6ad 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -19,6 +19,7 @@ from .alerters.jira import JiraAlerter from .alerters.mattermost import MattermostAlerter from .alerters.opsgenie import OpsGenieAlerter +from elastalert.alerters.pagerduty import PagerDutyAlerter from .alerters.teams import MsTeamsAlerter from .alerters.slack import SlackAlerter from .alerters.sns import SnsAlerter @@ -75,7 +76,7 @@ class RulesLoader(object): 'ms_teams': MsTeamsAlerter, 'slack': SlackAlerter, 'mattermost': MattermostAlerter, - 'pagerduty': alerts.PagerDutyAlerter, + 'pagerduty': PagerDutyAlerter, 'exotel': alerts.ExotelAlerter, 'twilio': alerts.TwilioAlerter, 'victorops': alerts.VictorOpsAlerter, diff --git a/tests/alerts_test.py b/tests/alerts_test.py index ccbdc8842..898d26777 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -25,7 +25,6 @@ from elastalert.alerts import HiveAlerter from elastalert.alerts import HTTPPostAlerter from elastalert.alerts import LineNotifyAlerter -from elastalert.alerts import PagerDutyAlerter from elastalert.alerts import PagerTreeAlerter from elastalert.alerts import ServiceNowAlerter from elastalert.alerts import TelegramAlerter @@ -35,6 +34,7 @@ from elastalert.alerters.email import EmailAlerter from elastalert.alerters.mattermost import MattermostAlerter from elastalert.alerters.opsgenie import OpsGenieAlerter +from elastalert.alerters.pagerduty import PagerDutyAlerter from elastalert.alerters.slack import SlackAlerter from elastalert.alerters.teams import MsTeamsAlerter from elastalert.alerters.zabbix import ZabbixAlerter From dd502b6b07f25ce02fb1d51ba4472a251bcebf47 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Fri, 21 May 2021 10:49:16 +0100 Subject: [PATCH 0233/1065] Add migrated PD alerter --- elastalert/alerters/pagerduty.py | 148 +++++++++++++++++++++++++++++++ 1 file changed, 148 insertions(+) create mode 100644 elastalert/alerters/pagerduty.py diff --git a/elastalert/alerters/pagerduty.py b/elastalert/alerters/pagerduty.py new file mode 100644 index 000000000..107546f04 --- /dev/null +++ b/elastalert/alerters/pagerduty.py @@ -0,0 +1,148 @@ +import json +import requests + +from elastalert.util import EAException, lookup_es_key, elastalert_logger +from elastalert.alerts import Alerter, DateTimeEncoder +from requests import RequestException + + +class PagerDutyAlerter(Alerter): + """ Create an incident on PagerDuty for each alert """ + required_options = frozenset(['pagerduty_service_key', 'pagerduty_client_name']) + + def __init__(self, rule): + super(PagerDutyAlerter, self).__init__(rule) + self.pagerduty_service_key = self.rule['pagerduty_service_key'] + self.pagerduty_client_name = self.rule['pagerduty_client_name'] + self.pagerduty_incident_key = self.rule.get('pagerduty_incident_key', '') + self.pagerduty_incident_key_args = self.rule.get('pagerduty_incident_key_args', None) + self.pagerduty_event_type = self.rule.get('pagerduty_event_type', 'trigger') + self.pagerduty_proxy = self.rule.get('pagerduty_proxy', None) + + self.pagerduty_api_version = self.rule.get('pagerduty_api_version', 'v1') + self.pagerduty_v2_payload_class = self.rule.get('pagerduty_v2_payload_class', '') + self.pagerduty_v2_payload_class_args = self.rule.get('pagerduty_v2_payload_class_args', None) + self.pagerduty_v2_payload_component = self.rule.get('pagerduty_v2_payload_component', '') + self.pagerduty_v2_payload_component_args = self.rule.get('pagerduty_v2_payload_component_args', None) + self.pagerduty_v2_payload_group = self.rule.get('pagerduty_v2_payload_group', '') + self.pagerduty_v2_payload_group_args = self.rule.get('pagerduty_v2_payload_group_args', None) + self.pagerduty_v2_payload_severity = self.rule.get('pagerduty_v2_payload_severity', 'critical') + self.pagerduty_v2_payload_source = self.rule.get('pagerduty_v2_payload_source', 'ElastAlert') + self.pagerduty_v2_payload_source_args = self.rule.get('pagerduty_v2_payload_source_args', None) + self.pagerduty_v2_payload_custom_details = self.rule.get('pagerduty_v2_payload_custom_details', {}) + self.pagerduty_v2_payload_include_all_info = self.rule.get('pagerduty_v2_payload_include_all_info', True) + + if self.pagerduty_api_version == 'v2': + self.url = 'https://events.pagerduty.com/v2/enqueue' + else: + self.url = 'https://events.pagerduty.com/generic/2010-04-15/create_event.json' + + def alert(self, matches): + body = self.create_alert_body(matches) + + # post to pagerduty + headers = {'content-type': 'application/json'} + if self.pagerduty_api_version == 'v2': + + custom_details_payload = {'information': body} if self.pagerduty_v2_payload_include_all_info else {} + if self.pagerduty_v2_payload_custom_details: + for match in matches: + for custom_details_key, es_key in list(self.pagerduty_v2_payload_custom_details.items()): + custom_details_payload[custom_details_key] = lookup_es_key(match, es_key) + + payload = { + 'routing_key': self.pagerduty_service_key, + 'event_action': self.pagerduty_event_type, + 'dedup_key': self.get_incident_key(matches), + 'client': self.pagerduty_client_name, + 'payload': { + 'class': self.resolve_formatted_key(self.pagerduty_v2_payload_class, + self.pagerduty_v2_payload_class_args, + matches), + 'component': self.resolve_formatted_key(self.pagerduty_v2_payload_component, + self.pagerduty_v2_payload_component_args, + matches), + 'group': self.resolve_formatted_key(self.pagerduty_v2_payload_group, + self.pagerduty_v2_payload_group_args, + matches), + 'severity': self.pagerduty_v2_payload_severity, + 'source': self.resolve_formatted_key(self.pagerduty_v2_payload_source, + self.pagerduty_v2_payload_source_args, + matches), + 'summary': self.create_title(matches), + 'custom_details': custom_details_payload, + }, + } + match_timestamp = lookup_es_key(matches[0], self.rule.get('timestamp_field', '@timestamp')) + if match_timestamp: + payload['payload']['timestamp'] = match_timestamp + else: + payload = { + 'service_key': self.pagerduty_service_key, + 'description': self.create_title(matches), + 'event_type': self.pagerduty_event_type, + 'incident_key': self.get_incident_key(matches), + 'client': self.pagerduty_client_name, + 'details': { + "information": body, + }, + } + + # set https proxy, if it was provided + proxies = {'https': self.pagerduty_proxy} if self.pagerduty_proxy else None + try: + response = requests.post( + self.url, + data=json.dumps(payload, cls=DateTimeEncoder, ensure_ascii=False), + headers=headers, + proxies=proxies + ) + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to pagerduty: %s" % e) + + if self.pagerduty_event_type == 'trigger': + elastalert_logger.info("Trigger sent to PagerDuty") + elif self.pagerduty_event_type == 'resolve': + elastalert_logger.info("Resolve sent to PagerDuty") + elif self.pagerduty_event_type == 'acknowledge': + elastalert_logger.info("acknowledge sent to PagerDuty") + + def resolve_formatted_key(self, key, args, matches): + if args: + key_values = [lookup_es_key(matches[0], arg) for arg in args] + + # Populate values with rule level properties too + for i in range(len(key_values)): + if key_values[i] is None: + key_value = self.rule.get(args[i]) + if key_value: + key_values[i] = key_value + + missing = self.rule.get('alert_missing_value', '') + key_values = [missing if val is None else val for val in key_values] + return key.format(*key_values) + else: + return key + + def get_incident_key(self, matches): + if self.pagerduty_incident_key_args: + incident_key_values = [lookup_es_key(matches[0], arg) for arg in self.pagerduty_incident_key_args] + + # Populate values with rule level properties too + for i in range(len(incident_key_values)): + if incident_key_values[i] is None: + key_value = self.rule.get(self.pagerduty_incident_key_args[i]) + if key_value: + incident_key_values[i] = key_value + + missing = self.rule.get('alert_missing_value', '') + incident_key_values = [missing if val is None else val for val in incident_key_values] + return self.pagerduty_incident_key.format(*incident_key_values) + else: + return self.pagerduty_incident_key + + def get_info(self): + return {'type': 'pagerduty', + 'pagerduty_client_name': self.pagerduty_client_name} + From af2b22876e67e345ec7fc12444ff66dd094c76f2 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Fri, 21 May 2021 11:31:55 +0100 Subject: [PATCH 0234/1065] Migrate all remaining alerters out of alerts.py --- elastalert/alerters/alerta.py | 118 +++ elastalert/alerters/chatwork.py | 47 ++ elastalert/alerters/command.py | 48 ++ elastalert/alerters/datadog.py | 38 + elastalert/alerters/debug.py | 19 + elastalert/alerters/discord.py | 71 ++ elastalert/alerters/exotel.py | 34 + elastalert/alerters/gitter.py | 41 + elastalert/alerters/googlechat.py | 96 +++ elastalert/alerters/httppost.py | 59 ++ elastalert/alerters/line.py | 34 + elastalert/alerters/pagerduty.py | 1 - elastalert/alerters/pagertree.py | 41 + elastalert/alerters/servicenow.py | 66 ++ elastalert/alerters/ses.py | 109 +++ elastalert/alerters/stomp.py | 75 ++ elastalert/alerters/telegram.py | 60 ++ elastalert/alerters/twilio.py | 45 ++ elastalert/alerters/victorops.py | 50 ++ elastalert/alerts.py | 1160 ----------------------------- elastalert/dingtalk.py | 99 +++ elastalert/elastalert.py | 2 +- elastalert/loaders.py | 60 +- elastalert/thehive.py | 131 ++++ tests/alerts_test.py | 44 +- 25 files changed, 1344 insertions(+), 1204 deletions(-) create mode 100644 elastalert/alerters/alerta.py create mode 100644 elastalert/alerters/chatwork.py create mode 100644 elastalert/alerters/command.py create mode 100644 elastalert/alerters/datadog.py create mode 100644 elastalert/alerters/debug.py create mode 100644 elastalert/alerters/discord.py create mode 100644 elastalert/alerters/exotel.py create mode 100644 elastalert/alerters/gitter.py create mode 100644 elastalert/alerters/googlechat.py create mode 100644 elastalert/alerters/httppost.py create mode 100644 elastalert/alerters/line.py create mode 100644 elastalert/alerters/pagertree.py create mode 100644 elastalert/alerters/servicenow.py create mode 100644 elastalert/alerters/ses.py create mode 100644 elastalert/alerters/stomp.py create mode 100644 elastalert/alerters/telegram.py create mode 100644 elastalert/alerters/twilio.py create mode 100644 elastalert/alerters/victorops.py create mode 100644 elastalert/dingtalk.py create mode 100644 elastalert/thehive.py diff --git a/elastalert/alerters/alerta.py b/elastalert/alerters/alerta.py new file mode 100644 index 000000000..109bccde7 --- /dev/null +++ b/elastalert/alerters/alerta.py @@ -0,0 +1,118 @@ +import datetime +import json + +import requests +from requests import RequestException + +from elastalert.alerts import Alerter, DateTimeEncoder +from elastalert.util import lookup_es_key, EAException, elastalert_logger, resolve_string, ts_to_dt + + +class AlertaAlerter(Alerter): + """ Creates an Alerta event for each alert """ + required_options = frozenset(['alerta_api_url']) + + def __init__(self, rule): + super(AlertaAlerter, self).__init__(rule) + + # Setup defaul parameters + self.url = self.rule.get('alerta_api_url', None) + self.api_key = self.rule.get('alerta_api_key', None) + self.timeout = self.rule.get('alerta_timeout', 86400) + self.use_match_timestamp = self.rule.get('alerta_use_match_timestamp', False) + self.use_qk_as_resource = self.rule.get('alerta_use_qk_as_resource', False) + self.verify_ssl = not self.rule.get('alerta_api_skip_ssl', False) + self.missing_text = self.rule.get('alert_missing_value', '') + + # Fill up default values of the API JSON payload + self.severity = self.rule.get('alerta_severity', 'warning') + self.resource = self.rule.get('alerta_resource', 'elastalert') + self.environment = self.rule.get('alerta_environment', 'Production') + self.origin = self.rule.get('alerta_origin', 'elastalert') + self.service = self.rule.get('alerta_service', ['elastalert']) + self.text = self.rule.get('alerta_text', 'elastalert') + self.type = self.rule.get('alerta_type', 'elastalert') + self.event = self.rule.get('alerta_event', 'elastalert') + self.correlate = self.rule.get('alerta_correlate', []) + self.tags = self.rule.get('alerta_tags', []) + self.group = self.rule.get('alerta_group', '') + self.attributes_keys = self.rule.get('alerta_attributes_keys', []) + self.attributes_values = self.rule.get('alerta_attributes_values', []) + self.value = self.rule.get('alerta_value', '') + + def alert(self, matches): + # Override the resource if requested + if self.use_qk_as_resource and 'query_key' in self.rule and lookup_es_key(matches[0], self.rule['query_key']): + self.resource = lookup_es_key(matches[0], self.rule['query_key']) + + headers = {'content-type': 'application/json'} + if self.api_key is not None: + headers['Authorization'] = 'Key %s' % (self.rule['alerta_api_key']) + alerta_payload = self.get_json_payload(matches[0]) + + try: + response = requests.post(self.url, data=alerta_payload, headers=headers, verify=self.verify_ssl) + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to Alerta: %s" % e) + elastalert_logger.info("Alert sent to Alerta") + + def create_default_title(self, matches): + title = '%s' % (self.rule['name']) + # If the rule has a query_key, add that value + if 'query_key' in self.rule: + qk = matches[0].get(self.rule['query_key']) + if qk: + title += '.%s' % (qk) + return title + + def get_info(self): + return {'type': 'alerta', + 'alerta_url': self.url} + + def get_json_payload(self, match): + """ + Builds the API Create Alert body, as in + http://alerta.readthedocs.io/en/latest/api/reference.html#create-an-alert + + For the values that could have references to fields on the match, resolve those references. + + """ + + # Using default text and event title if not defined in rule + alerta_text = self.rule['type'].get_match_str([match]) if self.text == '' else resolve_string(self.text, match, self.missing_text) + alerta_event = self.create_default_title([match]) if self.event == '' else resolve_string(self.event, match, self.missing_text) + + match_timestamp = lookup_es_key(match, self.rule.get('timestamp_field', '@timestamp')) + if match_timestamp is None: + match_timestamp = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ") + if self.use_match_timestamp: + createTime = ts_to_dt(match_timestamp).strftime("%Y-%m-%dT%H:%M:%S.%fZ") + else: + createTime = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ") + + alerta_payload_dict = { + 'resource': resolve_string(self.resource, match, self.missing_text), + 'severity': resolve_string(self.severity, match), + 'timeout': self.timeout, + 'createTime': createTime, + 'type': self.type, + 'environment': resolve_string(self.environment, match, self.missing_text), + 'origin': resolve_string(self.origin, match, self.missing_text), + 'group': resolve_string(self.group, match, self.missing_text), + 'event': alerta_event, + 'text': alerta_text, + 'value': resolve_string(self.value, match, self.missing_text), + 'service': [resolve_string(a_service, match, self.missing_text) for a_service in self.service], + 'tags': [resolve_string(a_tag, match, self.missing_text) for a_tag in self.tags], + 'correlate': [resolve_string(an_event, match, self.missing_text) for an_event in self.correlate], + 'attributes': dict(list(zip(self.attributes_keys, + [resolve_string(a_value, match, self.missing_text) for a_value in self.attributes_values]))), + 'rawData': self.create_alert_body([match]), + } + + try: + payload = json.dumps(alerta_payload_dict, cls=DateTimeEncoder) + except Exception as e: + raise Exception("Error building Alerta request: %s" % e) + return payload diff --git a/elastalert/alerters/chatwork.py b/elastalert/alerters/chatwork.py new file mode 100644 index 000000000..f7f7db3fd --- /dev/null +++ b/elastalert/alerters/chatwork.py @@ -0,0 +1,47 @@ +import warnings + +import requests +from requests import RequestException +from requests.auth import HTTPProxyAuth + +from elastalert.alerts import Alerter +from elastalert.util import EAException, elastalert_logger + + +class ChatworkAlerter(Alerter): + """ Creates a Chatwork room message for each alert """ + required_options = frozenset(['chatwork_apikey', 'chatwork_room_id']) + + def __init__(self, rule): + super(ChatworkAlerter, self).__init__(rule) + self.chatwork_apikey = self.rule.get('chatwork_apikey') + self.chatwork_room_id = self.rule.get('chatwork_room_id') + self.url = 'https://api.chatwork.com/v2/rooms/%s/messages' % (self.chatwork_room_id) + self.chatwork_proxy = self.rule.get('chatwork_proxy', None) + self.chatwork_proxy_login = self.rule.get('chatwork_proxy_login', None) + self.chatwork_proxy_pass = self.rule.get('chatwork_proxy_pass', None) + + def alert(self, matches): + body = self.create_alert_body(matches) + + headers = {'X-ChatWorkToken': self.chatwork_apikey} + # set https proxy, if it was provided + proxies = {'https': self.chatwork_proxy} if self.chatwork_proxy else None + auth = HTTPProxyAuth(self.chatwork_proxy_login, self.chatwork_proxy_pass) if self.chatwork_proxy_login else None + params = {'body': body} + + try: + response = requests.post(self.url, params=params, headers=headers, proxies=proxies, auth=auth) + warnings.resetwarnings() + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to Chattwork: %s. Details: %s" % (e, "" if e.response is None else e.response.text)) + + elastalert_logger.info( + "Alert sent to Chatwork room %s" % self.chatwork_room_id) + + def get_info(self): + return { + "type": "chatwork", + "chatwork_room_id": self.chatwork_room_id + } diff --git a/elastalert/alerters/command.py b/elastalert/alerters/command.py new file mode 100644 index 000000000..ed7edc046 --- /dev/null +++ b/elastalert/alerters/command.py @@ -0,0 +1,48 @@ +import json +import subprocess + +from elastalert.alerts import Alerter, DateTimeEncoder +from elastalert.util import elastalert_logger, resolve_string, EAException + + +class CommandAlerter(Alerter): + required_options = set(['command']) + + def __init__(self, *args): + super(CommandAlerter, self).__init__(*args) + + self.last_command = [] + + self.shell = False + if isinstance(self.rule['command'], str): + self.shell = True + if '%' in self.rule['command']: + elastalert_logger.warning('Warning! You could be vulnerable to shell injection!') + self.rule['command'] = [self.rule['command']] + + def alert(self, matches): + # Format the command and arguments + try: + command = [resolve_string(command_arg, matches[0]) for command_arg in self.rule['command']] + self.last_command = command + except KeyError as e: + raise EAException("Error formatting command: %s" % (e)) + + # Run command and pipe data + try: + subp = subprocess.Popen(command, stdin=subprocess.PIPE, shell=self.shell) + + if self.rule.get('pipe_match_json'): + match_json = json.dumps(matches, cls=DateTimeEncoder) + '\n' + stdout, stderr = subp.communicate(input=match_json.encode()) + elif self.rule.get('pipe_alert_text'): + alert_text = self.create_alert_body(matches) + stdout, stderr = subp.communicate(input=alert_text.encode()) + if self.rule.get("fail_on_non_zero_exit", False) and subp.wait(): + raise EAException("Non-zero exit code while running command %s" % (' '.join(command))) + except OSError as e: + raise EAException("Error while running command %s: %s" % (' '.join(command), e)) + + def get_info(self): + return {'type': 'command', + 'command': ' '.join(self.last_command)} diff --git a/elastalert/alerters/datadog.py b/elastalert/alerters/datadog.py new file mode 100644 index 000000000..b5796e95d --- /dev/null +++ b/elastalert/alerters/datadog.py @@ -0,0 +1,38 @@ +import json + +import requests +from requests import RequestException + +from elastalert.alerts import Alerter, DateTimeEncoder +from elastalert.util import EAException, elastalert_logger + + +class DatadogAlerter(Alerter): + ''' Creates a Datadog Event for each alert ''' + required_options = frozenset(['datadog_api_key', 'datadog_app_key']) + + def __init__(self, rule): + super(DatadogAlerter, self).__init__(rule) + self.dd_api_key = self.rule.get('datadog_api_key', None) + self.dd_app_key = self.rule.get('datadog_app_key', None) + + def alert(self, matches): + url = 'https://api.datadoghq.com/api/v1/events' + headers = { + 'Content-Type': 'application/json', + 'DD-API-KEY': self.dd_api_key, + 'DD-APPLICATION-KEY': self.dd_app_key + } + payload = { + 'title': self.create_title(matches), + 'text': self.create_alert_body(matches) + } + try: + response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers) + response.raise_for_status() + except RequestException as e: + raise EAException('Error posting event to Datadog: %s' % e) + elastalert_logger.info('Alert sent to Datadog') + + def get_info(self): + return {'type': 'datadog'} diff --git a/elastalert/alerters/debug.py b/elastalert/alerters/debug.py new file mode 100644 index 000000000..61aa460e9 --- /dev/null +++ b/elastalert/alerters/debug.py @@ -0,0 +1,19 @@ +from elastalert.alerts import Alerter, BasicMatchString +from elastalert.util import elastalert_logger, lookup_es_key + + +class DebugAlerter(Alerter): + """ The debug alerter uses a Python logger (by default, alerting to terminal). """ + + def alert(self, matches): + qk = self.rule.get('query_key', None) + for match in matches: + if qk in match: + elastalert_logger.info( + 'Alert for %s, %s at %s:' % (self.rule['name'], match[qk], lookup_es_key(match, self.rule['timestamp_field']))) + else: + elastalert_logger.info('Alert for %s at %s:' % (self.rule['name'], lookup_es_key(match, self.rule['timestamp_field']))) + elastalert_logger.info(str(BasicMatchString(self.rule, match))) + + def get_info(self): + return {'type': 'debug'} diff --git a/elastalert/alerters/discord.py b/elastalert/alerters/discord.py new file mode 100644 index 000000000..3cbde63db --- /dev/null +++ b/elastalert/alerters/discord.py @@ -0,0 +1,71 @@ +import json +import warnings + +import requests +from requests import RequestException +from requests.auth import HTTPProxyAuth + +from elastalert.alerts import Alerter, BasicMatchString +from elastalert.util import EAException, elastalert_logger + + +class DiscordAlerter(Alerter): + """ Created a Discord for each alert """ + required_options = frozenset(['discord_webhook_url']) + + def __init__(self, rule): + super(DiscordAlerter, self).__init__(rule) + self.discord_webhook_url = self.rule['discord_webhook_url'] + self.discord_emoji_title = self.rule.get('discord_emoji_title', ':warning:') + self.discord_proxy = self.rule.get('discord_proxy', None) + self.discord_proxy_login = self.rule.get('discord_proxy_login', None) + self.discord_proxy_password = self.rule.get('discord_proxy_password', None) + self.discord_embed_color = self.rule.get('discord_embed_color', 0xffffff) + self.discord_embed_footer = self.rule.get('discord_embed_footer', None) + self.discord_embed_icon_url = self.rule.get('discord_embed_icon_url', None) + + def alert(self, matches): + body = '' + title = u'%s' % (self.create_title(matches)) + for match in matches: + body += str(BasicMatchString(self.rule, match)) + if len(matches) > 1: + body += '\n----------------------------------------\n' + if len(body) > 2047: + body = body[0:1950] + '\n *message was cropped according to discord embed description limits!* ' + + body += '```' + + proxies = {'https': self.discord_proxy} if self.discord_proxy else None + auth = HTTPProxyAuth(self.discord_proxy_login, self.discord_proxy_password) if self.discord_proxy_login else None + headers = {"Content-Type": "application/json"} + + data = {} + data["content"] = "%s %s %s" % (self.discord_emoji_title, title, self.discord_emoji_title) + data["embeds"] = [] + embed = {} + embed["description"] = "%s" % (body) + embed["color"] = (self.discord_embed_color) + + if self.discord_embed_footer: + embed["footer"] = {} + embed["footer"]["text"] = (self.discord_embed_footer) if self.discord_embed_footer else None + embed["footer"]["icon_url"] = (self.discord_embed_icon_url) if self.discord_embed_icon_url else None + else: + None + + data["embeds"].append(embed) + + try: + response = requests.post(self.discord_webhook_url, data=json.dumps(data), headers=headers, proxies=proxies, auth=auth) + warnings.resetwarnings() + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to Discord: %s. Details: %s" % (e, "" if e.response is None else e.response.text)) + + elastalert_logger.info( + "Alert sent to the webhook %s" % self.discord_webhook_url) + + def get_info(self): + return {'type': 'discord', + 'discord_webhook_url': self.discord_webhook_url} diff --git a/elastalert/alerters/exotel.py b/elastalert/alerters/exotel.py new file mode 100644 index 000000000..6c1cef688 --- /dev/null +++ b/elastalert/alerters/exotel.py @@ -0,0 +1,34 @@ +import sys + +from exotel import Exotel +from requests import RequestException + +from elastalert.alerts import Alerter +from elastalert.util import EAException, elastalert_logger + + +class ExotelAlerter(Alerter): + required_options = frozenset(['exotel_account_sid', 'exotel_auth_token', 'exotel_to_number', 'exotel_from_number']) + + def __init__(self, rule): + super(ExotelAlerter, self).__init__(rule) + self.exotel_account_sid = self.rule['exotel_account_sid'] + self.exotel_auth_token = self.rule['exotel_auth_token'] + self.exotel_to_number = self.rule['exotel_to_number'] + self.exotel_from_number = self.rule['exotel_from_number'] + self.sms_body = self.rule.get('exotel_message_body', '') + + def alert(self, matches): + client = Exotel(self.exotel_account_sid, self.exotel_auth_token) + + try: + message_body = self.rule['name'] + self.sms_body + response = client.sms(self.rule['exotel_from_number'], self.rule['exotel_to_number'], message_body) + if response != 200: + raise EAException("Error posting to Exotel, response code is %s" % response) + except RequestException: + raise EAException("Error posting to Exotel").with_traceback(sys.exc_info()[2]) + elastalert_logger.info("Trigger sent to Exotel") + + def get_info(self): + return {'type': 'exotel', 'exotel_account': self.exotel_account_sid} diff --git a/elastalert/alerters/gitter.py b/elastalert/alerters/gitter.py new file mode 100644 index 000000000..326d86eb0 --- /dev/null +++ b/elastalert/alerters/gitter.py @@ -0,0 +1,41 @@ +import json + +import requests +from requests import RequestException + +from elastalert.alerts import Alerter, DateTimeEncoder +from elastalert.util import EAException, elastalert_logger + + +class GitterAlerter(Alerter): + """ Creates a Gitter activity message for each alert """ + required_options = frozenset(['gitter_webhook_url']) + + def __init__(self, rule): + super(GitterAlerter, self).__init__(rule) + self.gitter_webhook_url = self.rule['gitter_webhook_url'] + self.gitter_proxy = self.rule.get('gitter_proxy', None) + self.gitter_msg_level = self.rule.get('gitter_msg_level', 'error') + + def alert(self, matches): + body = self.create_alert_body(matches) + + # post to Gitter + headers = {'content-type': 'application/json'} + # set https proxy, if it was provided + proxies = {'https': self.gitter_proxy} if self.gitter_proxy else None + payload = { + 'message': body, + 'level': self.gitter_msg_level + } + + try: + response = requests.post(self.gitter_webhook_url, json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies) + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to Gitter: %s" % e) + elastalert_logger.info("Alert sent to Gitter") + + def get_info(self): + return {'type': 'gitter', + 'gitter_webhook_url': self.gitter_webhook_url} diff --git a/elastalert/alerters/googlechat.py b/elastalert/alerters/googlechat.py new file mode 100644 index 000000000..9de439de1 --- /dev/null +++ b/elastalert/alerters/googlechat.py @@ -0,0 +1,96 @@ +import json + +import requests +from requests import RequestException + +from elastalert.alerts import Alerter +from elastalert.util import EAException, elastalert_logger + + +class GoogleChatAlerter(Alerter): + """ Send a notification via Google Chat webhooks """ + required_options = frozenset(['googlechat_webhook_url']) + + def __init__(self, rule): + super(GoogleChatAlerter, self).__init__(rule) + self.googlechat_webhook_url = self.rule['googlechat_webhook_url'] + if isinstance(self.googlechat_webhook_url, str): + self.googlechat_webhook_url = [self.googlechat_webhook_url] + self.googlechat_format = self.rule.get('googlechat_format', 'basic') + self.googlechat_header_title = self.rule.get('googlechat_header_title', None) + self.googlechat_header_subtitle = self.rule.get('googlechat_header_subtitle', None) + self.googlechat_header_image = self.rule.get('googlechat_header_image', None) + self.googlechat_footer_kibanalink = self.rule.get('googlechat_footer_kibanalink', None) + + def create_header(self): + header = None + if self.googlechat_header_title: + header = { + "title": self.googlechat_header_title, + "subtitle": self.googlechat_header_subtitle, + "imageUrl": self.googlechat_header_image + } + return header + + def create_footer(self): + footer = None + if self.googlechat_footer_kibanalink: + footer = {"widgets": [{ + "buttons": [{ + "textButton": { + "text": "VISIT KIBANA", + "onClick": { + "openLink": { + "url": self.googlechat_footer_kibanalink + } + } + } + }] + }] + } + return footer + + def create_card(self, matches): + card = {"cards": [{ + "sections": [{ + "widgets": [ + {"textParagraph": {"text": self.create_alert_body(matches)}} + ]} + ]} + ]} + + # Add the optional header + header = self.create_header() + if header: + card['cards'][0]['header'] = header + + # Add the optional footer + footer = self.create_footer() + if footer: + card['cards'][0]['sections'].append(footer) + return card + + def create_basic(self, matches): + body = self.create_alert_body(matches) + return {'text': body} + + def alert(self, matches): + # Format message + if self.googlechat_format == 'card': + message = self.create_card(matches) + else: + message = self.create_basic(matches) + + # Post to webhook + headers = {'content-type': 'application/json'} + for url in self.googlechat_webhook_url: + try: + response = requests.post(url, data=json.dumps(message), headers=headers) + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to google chat: {}".format(e)) + elastalert_logger.info("Alert sent to Google Chat!") + + def get_info(self): + return {'type': 'googlechat', + 'googlechat_webhook_url': self.googlechat_webhook_url} diff --git a/elastalert/alerters/httppost.py b/elastalert/alerters/httppost.py new file mode 100644 index 000000000..74f1635b0 --- /dev/null +++ b/elastalert/alerters/httppost.py @@ -0,0 +1,59 @@ +import json + +import requests +from requests import RequestException + +from elastalert.alerts import Alerter, DateTimeEncoder +from elastalert.util import lookup_es_key, EAException, elastalert_logger + + +class HTTPPostAlerter(Alerter): + """ Requested elasticsearch indices are sent by HTTP POST. Encoded with JSON. """ + required_options = frozenset(['http_post_url']) + + def __init__(self, rule): + super(HTTPPostAlerter, self).__init__(rule) + post_url = self.rule.get('http_post_url') + if isinstance(post_url, str): + post_url = [post_url] + self.post_url = post_url + self.post_proxy = self.rule.get('http_post_proxy') + self.post_payload = self.rule.get('http_post_payload', {}) + self.post_static_payload = self.rule.get('http_post_static_payload', {}) + self.post_all_values = self.rule.get('http_post_all_values', not self.post_payload) + self.post_http_headers = self.rule.get('http_post_headers', {}) + self.post_ca_certs = self.rule.get('http_post_ca_certs') + self.post_ignore_ssl_errors = self.rule.get('http_post_ignore_ssl_errors', False) + self.timeout = self.rule.get('http_post_timeout', 10) + + def alert(self, matches): + """ Each match will trigger a POST to the specified endpoint(s). """ + for match in matches: + payload = match if self.post_all_values else {} + payload.update(self.post_static_payload) + for post_key, es_key in list(self.post_payload.items()): + payload[post_key] = lookup_es_key(match, es_key) + headers = { + "Content-Type": "application/json", + "Accept": "application/json;charset=utf-8" + } + if self.post_ca_certs: + verify = self.post_ca_certs + else: + verify = not self.post_ignore_ssl_errors + + headers.update(self.post_http_headers) + proxies = {'https': self.post_proxy} if self.post_proxy else None + for url in self.post_url: + try: + response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder), + headers=headers, proxies=proxies, timeout=self.timeout, + verify=verify) + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting HTTP Post alert: %s" % e) + elastalert_logger.info("HTTP Post alert sent.") + + def get_info(self): + return {'type': 'http_post', + 'http_post_webhook_url': self.post_url} diff --git a/elastalert/alerters/line.py b/elastalert/alerters/line.py new file mode 100644 index 000000000..7d79b5558 --- /dev/null +++ b/elastalert/alerters/line.py @@ -0,0 +1,34 @@ +import requests +from requests import RequestException + +from elastalert.alerts import Alerter +from elastalert.util import EAException, elastalert_logger + + +class LineNotifyAlerter(Alerter): + """ Created a Line Notify for each alert """ + required_option = frozenset(["linenotify_access_token"]) + + def __init__(self, rule): + super(LineNotifyAlerter, self).__init__(rule) + self.linenotify_access_token = self.rule["linenotify_access_token"] + + def alert(self, matches): + body = self.create_alert_body(matches) + # post to Line Notify + headers = { + "Content-Type": "application/x-www-form-urlencoded", + "Authorization": "Bearer {}".format(self.linenotify_access_token) + } + payload = { + "message": body + } + try: + response = requests.post("https://notify-api.line.me/api/notify", data=payload, headers=headers) + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to Line Notify: %s" % e) + elastalert_logger.info("Alert sent to Line Notify") + + def get_info(self): + return {"type": "linenotify", "linenotify_access_token": self.linenotify_access_token} diff --git a/elastalert/alerters/pagerduty.py b/elastalert/alerters/pagerduty.py index 107546f04..69ebafb3d 100644 --- a/elastalert/alerters/pagerduty.py +++ b/elastalert/alerters/pagerduty.py @@ -145,4 +145,3 @@ def get_incident_key(self, matches): def get_info(self): return {'type': 'pagerduty', 'pagerduty_client_name': self.pagerduty_client_name} - diff --git a/elastalert/alerters/pagertree.py b/elastalert/alerters/pagertree.py new file mode 100644 index 000000000..360f405fd --- /dev/null +++ b/elastalert/alerters/pagertree.py @@ -0,0 +1,41 @@ +import json +import uuid + +import requests +from requests import RequestException + +from elastalert.alerts import Alerter, DateTimeEncoder +from elastalert.util import EAException, elastalert_logger + + +class PagerTreeAlerter(Alerter): + """ Creates a PagerTree Incident for each alert """ + required_options = frozenset(['pagertree_integration_url']) + + def __init__(self, rule): + super(PagerTreeAlerter, self).__init__(rule) + self.url = self.rule['pagertree_integration_url'] + self.pagertree_proxy = self.rule.get('pagertree_proxy', None) + + def alert(self, matches): + # post to pagertree + headers = {'content-type': 'application/json'} + # set https proxy, if it was provided + proxies = {'https': self.pagertree_proxy} if self.pagertree_proxy else None + payload = { + "event_type": "create", + "Id": str(uuid.uuid4()), + "Title": self.create_title(matches), + "Description": self.create_alert_body(matches) + } + + try: + response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies) + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to PagerTree: %s" % e) + elastalert_logger.info("Trigger sent to PagerTree") + + def get_info(self): + return {'type': 'pagertree', + 'pagertree_integration_url': self.url} diff --git a/elastalert/alerters/servicenow.py b/elastalert/alerters/servicenow.py new file mode 100644 index 000000000..eecf8744e --- /dev/null +++ b/elastalert/alerters/servicenow.py @@ -0,0 +1,66 @@ +import json + +import requests +from requests import RequestException + +from elastalert.alerts import Alerter, BasicMatchString, DateTimeEncoder +from elastalert.util import EAException, elastalert_logger + + +class ServiceNowAlerter(Alerter): + """ Creates a ServiceNow alert """ + required_options = set([ + 'username', + 'password', + 'servicenow_rest_url', + 'short_description', + 'comments', + 'assignment_group', + 'category', + 'subcategory', + 'cmdb_ci', + 'caller_id' + ]) + + def __init__(self, rule): + super(ServiceNowAlerter, self).__init__(rule) + self.servicenow_rest_url = self.rule['servicenow_rest_url'] + self.servicenow_proxy = self.rule.get('servicenow_proxy', None) + + def alert(self, matches): + for match in matches: + # Parse everything into description. + description = str(BasicMatchString(self.rule, match)) + + # Set proper headers + headers = { + "Content-Type": "application/json", + "Accept": "application/json;charset=utf-8" + } + proxies = {'https': self.servicenow_proxy} if self.servicenow_proxy else None + payload = { + "description": description, + "short_description": self.rule['short_description'], + "comments": self.rule['comments'], + "assignment_group": self.rule['assignment_group'], + "category": self.rule['category'], + "subcategory": self.rule['subcategory'], + "cmdb_ci": self.rule['cmdb_ci'], + "caller_id": self.rule["caller_id"] + } + try: + response = requests.post( + self.servicenow_rest_url, + auth=(self.rule['username'], self.rule['password']), + headers=headers, + data=json.dumps(payload, cls=DateTimeEncoder), + proxies=proxies + ) + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to ServiceNow: %s" % e) + elastalert_logger.info("Alert sent to ServiceNow") + + def get_info(self): + return {'type': 'ServiceNow', + 'self.servicenow_rest_url': self.servicenow_rest_url} diff --git a/elastalert/alerters/ses.py b/elastalert/alerters/ses.py new file mode 100644 index 000000000..8a48abc6b --- /dev/null +++ b/elastalert/alerters/ses.py @@ -0,0 +1,109 @@ +import boto3 + +from elastalert.alerts import Alerter +from elastalert.util import lookup_es_key, EAException, elastalert_logger + + +class SesAlerter(Alerter): + """ Sends an email alert using AWS SES """ + required_options = frozenset(['ses_email', 'ses_from_addr']) + + def __init__(self, *args): + super(SesAlerter, self).__init__(*args) + + self.aws_access_key_id = self.rule.get('ses_aws_access_key_id') + self.aws_secret_access_key = self.rule.get('ses_aws_secret_access_key') + self.aws_region = self.rule.get('ses_aws_region', 'us-east-1') + self.aws_profile = self.rule.get('ses_aws_profile', '') + + self.from_addr = self.rule.get('ses_from_addr') + + # Convert email to a list if it isn't already + if isinstance(self.rule['ses_email'], str): + self.rule['ses_email'] = [self.rule['ses_email']] + + # If there is a cc then also convert it a list if it isn't + cc = self.rule.get('ses_cc') + if cc and isinstance(cc, str): + self.rule['ses_cc'] = [self.rule['ses_cc']] + + # If there is a bcc then also convert it to a list if it isn't + bcc = self.rule.get('ses_bcc') + if bcc and isinstance(bcc, str): + self.rule['ses_bcc'] = [self.rule['ses_bcc']] + + # If there is a email_reply_to then also convert it to a list if it isn't + reply_to = self.rule.get('ses_email_reply_to') + if reply_to and isinstance(reply_to, str): + self.rule['ses_email_reply_to'] = [self.rule['ses_email_reply_to']] + + add_suffix = self.rule.get('ses_email_add_domain') + if add_suffix and not add_suffix.startswith('@'): + self.rule['ses_email_add_domain'] = '@' + add_suffix + + def alert(self, matches): + body = self.create_alert_body(matches) + + to_addr = self.rule['ses_email'] + if 'ses_email_from_field' in self.rule: + recipient = lookup_es_key(matches[0], self.rule['ses_email_from_field']) + if isinstance(recipient, str): + if '@' in recipient: + to_addr = [recipient] + elif 'ses_email_add_domain' in self.rule: + to_addr = [recipient + self.rule['ses_email_add_domain']] + elif isinstance(recipient, list): + to_addr = recipient + if 'ses_email_add_domain' in self.rule: + to_addr = [name + self.rule['ses_email_add_domain'] for name in to_addr] + + if self.aws_profile != '': + session = boto3.Session(profile_name=self.aws_profile) + else: + session = boto3.Session( + aws_access_key_id=self.aws_access_key_id, + aws_secret_access_key=self.aws_secret_access_key, + region_name=self.aws_region + ) + + client = session.client('ses') + try: + client.send_email( + Source=self.from_addr, + Destination={ + 'ToAddresses': to_addr, + 'CcAddresses': self.rule.get('ses_cc', []), + 'BccAddresses': self.rule.get('ses_bcc', []) + }, + Message={ + 'Subject': { + 'Charset': 'UTF-8', + 'Data': self.create_title(matches), + }, + 'Body': { + 'Text': { + 'Charset': 'UTF-8', + 'Data': body, + } + } + }, + ReplyToAddresses=self.rule.get('ses_email_reply_to', [])) + except Exception as e: + raise EAException("Error sending ses: %s" % (e,)) + + elastalert_logger.info("Sent ses to %s" % (to_addr,)) + + def create_default_title(self, matches): + subject = 'ElastAlert 2: %s' % (self.rule['name']) + + # If the rule has a query_key, add that value plus timestamp to subject + if 'query_key' in self.rule: + qk = matches[0].get(self.rule['query_key']) + if qk: + subject += ' - %s' % (qk) + + return subject + + def get_info(self): + return {'type': 'ses', + 'recipients': self.rule['ses_email']} diff --git a/elastalert/alerters/stomp.py b/elastalert/alerters/stomp.py new file mode 100644 index 000000000..65e0d3101 --- /dev/null +++ b/elastalert/alerters/stomp.py @@ -0,0 +1,75 @@ +import datetime +import json +import time + +import stomp + +from elastalert.alerts import Alerter, BasicMatchString +from elastalert.util import lookup_es_key, elastalert_logger + + +class StompAlerter(Alerter): + """ The stomp alerter publishes alerts via stomp to a broker. """ + required_options = frozenset( + ['stomp_hostname', 'stomp_hostport', 'stomp_login', 'stomp_password']) + + def alert(self, matches): + alerts = [] + + qk = self.rule.get('query_key', None) + + fullmessage = {} + for match in matches: + if qk is not None: + resmatch = lookup_es_key(match, qk) + else: + resmatch = None + + if resmatch is not None: + elastalert_logger.info( + 'Alert for %s, %s at %s:' % (self.rule['name'], resmatch, lookup_es_key(match, self.rule['timestamp_field']))) + alerts.append( + 'Alert for %s, %s at %s:' % (self.rule['name'], resmatch, lookup_es_key( + match, self.rule['timestamp_field'])) + ) + fullmessage['match'] = resmatch + else: + elastalert_logger.info('Rule %s generated an alert at %s:' % ( + self.rule['name'], lookup_es_key(match, self.rule['timestamp_field']))) + alerts.append( + 'Rule %s generated an alert at %s:' % (self.rule['name'], lookup_es_key( + match, self.rule['timestamp_field'])) + ) + fullmessage['match'] = lookup_es_key( + match, self.rule['timestamp_field']) + elastalert_logger.info(str(BasicMatchString(self.rule, match))) + + fullmessage['alerts'] = alerts + fullmessage['rule'] = self.rule['name'] + fullmessage['rule_file'] = self.rule['rule_file'] + + fullmessage['matching'] = str(BasicMatchString(self.rule, match)) + fullmessage['alertDate'] = datetime.datetime.now( + ).strftime("%Y-%m-%d %H:%M:%S") + fullmessage['body'] = self.create_alert_body(matches) + + fullmessage['matches'] = matches + + self.stomp_hostname = self.rule.get('stomp_hostname', 'localhost') + self.stomp_hostport = self.rule.get('stomp_hostport', '61613') + self.stomp_login = self.rule.get('stomp_login', 'admin') + self.stomp_password = self.rule.get('stomp_password', 'admin') + self.stomp_destination = self.rule.get( + 'stomp_destination', '/queue/ALERT') + self.stomp_ssl = self.rule.get('stomp_ssl', False) + + conn = stomp.Connection([(self.stomp_hostname, self.stomp_hostport)], use_ssl=self.stomp_ssl) + + conn.connect(self.stomp_login, self.stomp_password) + # Ensures that the CONNECTED frame is received otherwise, the disconnect call will fail. + time.sleep(1) + conn.send(self.stomp_destination, json.dumps(fullmessage)) + conn.disconnect() + + def get_info(self): + return {'type': 'stomp'} diff --git a/elastalert/alerters/telegram.py b/elastalert/alerters/telegram.py new file mode 100644 index 000000000..718656670 --- /dev/null +++ b/elastalert/alerters/telegram.py @@ -0,0 +1,60 @@ +import json +import warnings + +import requests +from requests import RequestException +from requests.auth import HTTPProxyAuth + +from elastalert.alerts import Alerter, BasicMatchString, DateTimeEncoder +from elastalert.util import EAException, elastalert_logger + + +class TelegramAlerter(Alerter): + """ Send a Telegram message via bot api for each alert """ + required_options = frozenset(['telegram_bot_token', 'telegram_room_id']) + + def __init__(self, rule): + super(TelegramAlerter, self).__init__(rule) + self.telegram_bot_token = self.rule['telegram_bot_token'] + self.telegram_room_id = self.rule['telegram_room_id'] + self.telegram_api_url = self.rule.get('telegram_api_url', 'api.telegram.org') + self.url = 'https://%s/bot%s/%s' % (self.telegram_api_url, self.telegram_bot_token, "sendMessage") + self.telegram_proxy = self.rule.get('telegram_proxy', None) + self.telegram_proxy_login = self.rule.get('telegram_proxy_login', None) + self.telegram_proxy_password = self.rule.get('telegram_proxy_pass', None) + + def alert(self, matches): + body = '⚠ *%s* ⚠ ```\n' % (self.create_title(matches)) + for match in matches: + body += str(BasicMatchString(self.rule, match)) + # Separate text of aggregated alerts with dashes + if len(matches) > 1: + body += '\n----------------------------------------\n' + if len(body) > 4095: + body = body[0:4000] + "\n⚠ *message was cropped according to telegram limits!* ⚠" + body += ' ```' + + headers = {'content-type': 'application/json'} + # set https proxy, if it was provided + proxies = {'https': self.telegram_proxy} if self.telegram_proxy else None + auth = HTTPProxyAuth(self.telegram_proxy_login, self.telegram_proxy_password) if self.telegram_proxy_login else None + payload = { + 'chat_id': self.telegram_room_id, + 'text': body, + 'parse_mode': 'markdown', + 'disable_web_page_preview': True + } + + try: + response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies, auth=auth) + warnings.resetwarnings() + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to Telegram: %s. Details: %s" % (e, "" if e.response is None else e.response.text)) + + elastalert_logger.info( + "Alert sent to Telegram room %s" % self.telegram_room_id) + + def get_info(self): + return {'type': 'telegram', + 'telegram_room_id': self.telegram_room_id} diff --git a/elastalert/alerters/twilio.py b/elastalert/alerters/twilio.py new file mode 100644 index 000000000..f603a06c3 --- /dev/null +++ b/elastalert/alerters/twilio.py @@ -0,0 +1,45 @@ +from twilio.base.exceptions import TwilioRestException +from twilio.rest import Client as TwilioClient + +from elastalert.alerts import Alerter +from elastalert.util import EAException, elastalert_logger + + +class TwilioAlerter(Alerter): + required_options = frozenset(['twilio_account_sid', 'twilio_auth_token', 'twilio_to_number']) + + def __init__(self, rule): + super(TwilioAlerter, self).__init__(rule) + self.twilio_account_sid = self.rule['twilio_account_sid'] + self.twilio_auth_token = self.rule['twilio_auth_token'] + self.twilio_to_number = self.rule['twilio_to_number'] + self.twilio_from_number = self.rule.get('twilio_from_number') + self.twilio_message_service_sid = self.rule.get('twilio_message_service_sid') + self.twilio_use_copilot = self.rule.get('twilio_use_copilot', False) + + def alert(self, matches): + client = TwilioClient(self.twilio_account_sid, self.twilio_auth_token) + + try: + if self.twilio_use_copilot: + if self.twilio_message_service_sid is None: + raise EAException("Twilio Copilot requires the 'twilio_message_service_sid' option") + + client.messages.create(body=self.rule['name'], + to=self.twilio_to_number, + messaging_service_sid=self.twilio_message_service_sid) + else: + if self.twilio_from_number is None: + raise EAException("Twilio SMS requires the 'twilio_from_number' option") + + client.messages.create(body=self.rule['name'], + to=self.twilio_to_number, + from_=self.twilio_from_number) + except TwilioRestException as e: + raise EAException("Error posting to twilio: %s" % e) + + elastalert_logger.info("Trigger sent to Twilio") + + def get_info(self): + return {'type': 'twilio', + 'twilio_client_name': self.twilio_from_number} diff --git a/elastalert/alerters/victorops.py b/elastalert/alerters/victorops.py new file mode 100644 index 000000000..993a31814 --- /dev/null +++ b/elastalert/alerters/victorops.py @@ -0,0 +1,50 @@ +import json + +import requests +from requests import RequestException + +from elastalert.alerts import Alerter, DateTimeEncoder +from elastalert.util import EAException, elastalert_logger + + +class VictorOpsAlerter(Alerter): + """ Creates a VictorOps Incident for each alert """ + required_options = frozenset(['victorops_api_key', 'victorops_routing_key', 'victorops_message_type']) + + def __init__(self, rule): + super(VictorOpsAlerter, self).__init__(rule) + self.victorops_api_key = self.rule['victorops_api_key'] + self.victorops_routing_key = self.rule['victorops_routing_key'] + self.victorops_message_type = self.rule['victorops_message_type'] + self.victorops_entity_id = self.rule.get('victorops_entity_id', None) + self.victorops_entity_display_name = self.rule.get('victorops_entity_display_name', 'no entity display name') + self.url = 'https://alert.victorops.com/integrations/generic/20131114/alert/%s/%s' % ( + self.victorops_api_key, self.victorops_routing_key) + self.victorops_proxy = self.rule.get('victorops_proxy', None) + + def alert(self, matches): + body = self.create_alert_body(matches) + + # post to victorops + headers = {'content-type': 'application/json'} + # set https proxy, if it was provided + proxies = {'https': self.victorops_proxy} if self.victorops_proxy else None + payload = { + "message_type": self.victorops_message_type, + "entity_display_name": self.victorops_entity_display_name, + "monitoring_tool": "ElastAlert", + "state_message": body + } + if self.victorops_entity_id: + payload["entity_id"] = self.victorops_entity_id + + try: + response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies) + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to VictorOps: %s" % e) + elastalert_logger.info("Trigger sent to VictorOps") + + def get_info(self): + return {'type': 'victorops', + 'victorops_routing_key': self.victorops_routing_key} diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 4605757d8..194667ef5 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -1,29 +1,12 @@ # -*- coding: utf-8 -*- import copy -import datetime import json import os -import subprocess -import sys -import time -import uuid -import warnings -import boto3 -import requests -import stomp -from exotel import Exotel -from requests.auth import HTTPProxyAuth -from requests.exceptions import RequestException from texttable import Texttable -from twilio.base.exceptions import TwilioRestException -from twilio.rest import Client as TwilioClient from .util import EAException -from .util import elastalert_logger from .util import lookup_es_key -from .util import resolve_string -from .util import ts_to_dt from .yaml import read_yaml @@ -296,1146 +279,3 @@ def get_account(self, account_file): raise EAException('Account file must have user and password fields') self.user = account_conf['user'] self.password = account_conf['password'] - - -class StompAlerter(Alerter): - """ The stomp alerter publishes alerts via stomp to a broker. """ - required_options = frozenset( - ['stomp_hostname', 'stomp_hostport', 'stomp_login', 'stomp_password']) - - def alert(self, matches): - alerts = [] - - qk = self.rule.get('query_key', None) - - fullmessage = {} - for match in matches: - if qk is not None: - resmatch = lookup_es_key(match, qk) - else: - resmatch = None - - if resmatch is not None: - elastalert_logger.info( - 'Alert for %s, %s at %s:' % (self.rule['name'], resmatch, lookup_es_key(match, self.rule['timestamp_field']))) - alerts.append( - 'Alert for %s, %s at %s:' % (self.rule['name'], resmatch, lookup_es_key( - match, self.rule['timestamp_field'])) - ) - fullmessage['match'] = resmatch - else: - elastalert_logger.info('Rule %s generated an alert at %s:' % ( - self.rule['name'], lookup_es_key(match, self.rule['timestamp_field']))) - alerts.append( - 'Rule %s generated an alert at %s:' % (self.rule['name'], lookup_es_key( - match, self.rule['timestamp_field'])) - ) - fullmessage['match'] = lookup_es_key( - match, self.rule['timestamp_field']) - elastalert_logger.info(str(BasicMatchString(self.rule, match))) - - fullmessage['alerts'] = alerts - fullmessage['rule'] = self.rule['name'] - fullmessage['rule_file'] = self.rule['rule_file'] - - fullmessage['matching'] = str(BasicMatchString(self.rule, match)) - fullmessage['alertDate'] = datetime.datetime.now( - ).strftime("%Y-%m-%d %H:%M:%S") - fullmessage['body'] = self.create_alert_body(matches) - - fullmessage['matches'] = matches - - self.stomp_hostname = self.rule.get('stomp_hostname', 'localhost') - self.stomp_hostport = self.rule.get('stomp_hostport', '61613') - self.stomp_login = self.rule.get('stomp_login', 'admin') - self.stomp_password = self.rule.get('stomp_password', 'admin') - self.stomp_destination = self.rule.get( - 'stomp_destination', '/queue/ALERT') - self.stomp_ssl = self.rule.get('stomp_ssl', False) - - conn = stomp.Connection([(self.stomp_hostname, self.stomp_hostport)], use_ssl=self.stomp_ssl) - - conn.connect(self.stomp_login, self.stomp_password) - # Ensures that the CONNECTED frame is received otherwise, the disconnect call will fail. - time.sleep(1) - conn.send(self.stomp_destination, json.dumps(fullmessage)) - conn.disconnect() - - def get_info(self): - return {'type': 'stomp'} - - -class DebugAlerter(Alerter): - """ The debug alerter uses a Python logger (by default, alerting to terminal). """ - - def alert(self, matches): - qk = self.rule.get('query_key', None) - for match in matches: - if qk in match: - elastalert_logger.info( - 'Alert for %s, %s at %s:' % (self.rule['name'], match[qk], lookup_es_key(match, self.rule['timestamp_field']))) - else: - elastalert_logger.info('Alert for %s at %s:' % (self.rule['name'], lookup_es_key(match, self.rule['timestamp_field']))) - elastalert_logger.info(str(BasicMatchString(self.rule, match))) - - def get_info(self): - return {'type': 'debug'} - - -class CommandAlerter(Alerter): - required_options = set(['command']) - - def __init__(self, *args): - super(CommandAlerter, self).__init__(*args) - - self.last_command = [] - - self.shell = False - if isinstance(self.rule['command'], str): - self.shell = True - if '%' in self.rule['command']: - elastalert_logger.warning('Warning! You could be vulnerable to shell injection!') - self.rule['command'] = [self.rule['command']] - - def alert(self, matches): - # Format the command and arguments - try: - command = [resolve_string(command_arg, matches[0]) for command_arg in self.rule['command']] - self.last_command = command - except KeyError as e: - raise EAException("Error formatting command: %s" % (e)) - - # Run command and pipe data - try: - subp = subprocess.Popen(command, stdin=subprocess.PIPE, shell=self.shell) - - if self.rule.get('pipe_match_json'): - match_json = json.dumps(matches, cls=DateTimeEncoder) + '\n' - stdout, stderr = subp.communicate(input=match_json.encode()) - elif self.rule.get('pipe_alert_text'): - alert_text = self.create_alert_body(matches) - stdout, stderr = subp.communicate(input=alert_text.encode()) - if self.rule.get("fail_on_non_zero_exit", False) and subp.wait(): - raise EAException("Non-zero exit code while running command %s" % (' '.join(command))) - except OSError as e: - raise EAException("Error while running command %s: %s" % (' '.join(command), e)) - - def get_info(self): - return {'type': 'command', - 'command': ' '.join(self.last_command)} - - -class PagerTreeAlerter(Alerter): - """ Creates a PagerTree Incident for each alert """ - required_options = frozenset(['pagertree_integration_url']) - - def __init__(self, rule): - super(PagerTreeAlerter, self).__init__(rule) - self.url = self.rule['pagertree_integration_url'] - self.pagertree_proxy = self.rule.get('pagertree_proxy', None) - - def alert(self, matches): - # post to pagertree - headers = {'content-type': 'application/json'} - # set https proxy, if it was provided - proxies = {'https': self.pagertree_proxy} if self.pagertree_proxy else None - payload = { - "event_type": "create", - "Id": str(uuid.uuid4()), - "Title": self.create_title(matches), - "Description": self.create_alert_body(matches) - } - - try: - response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies) - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to PagerTree: %s" % e) - elastalert_logger.info("Trigger sent to PagerTree") - - def get_info(self): - return {'type': 'pagertree', - 'pagertree_integration_url': self.url} - - -class ExotelAlerter(Alerter): - required_options = frozenset(['exotel_account_sid', 'exotel_auth_token', 'exotel_to_number', 'exotel_from_number']) - - def __init__(self, rule): - super(ExotelAlerter, self).__init__(rule) - self.exotel_account_sid = self.rule['exotel_account_sid'] - self.exotel_auth_token = self.rule['exotel_auth_token'] - self.exotel_to_number = self.rule['exotel_to_number'] - self.exotel_from_number = self.rule['exotel_from_number'] - self.sms_body = self.rule.get('exotel_message_body', '') - - def alert(self, matches): - client = Exotel(self.exotel_account_sid, self.exotel_auth_token) - - try: - message_body = self.rule['name'] + self.sms_body - response = client.sms(self.rule['exotel_from_number'], self.rule['exotel_to_number'], message_body) - if response != 200: - raise EAException("Error posting to Exotel, response code is %s" % response) - except RequestException: - raise EAException("Error posting to Exotel").with_traceback(sys.exc_info()[2]) - elastalert_logger.info("Trigger sent to Exotel") - - def get_info(self): - return {'type': 'exotel', 'exotel_account': self.exotel_account_sid} - - -class TwilioAlerter(Alerter): - required_options = frozenset(['twilio_account_sid', 'twilio_auth_token', 'twilio_to_number']) - - def __init__(self, rule): - super(TwilioAlerter, self).__init__(rule) - self.twilio_account_sid = self.rule['twilio_account_sid'] - self.twilio_auth_token = self.rule['twilio_auth_token'] - self.twilio_to_number = self.rule['twilio_to_number'] - self.twilio_from_number = self.rule.get('twilio_from_number') - self.twilio_message_service_sid = self.rule.get('twilio_message_service_sid') - self.twilio_use_copilot = self.rule.get('twilio_use_copilot', False) - - def alert(self, matches): - client = TwilioClient(self.twilio_account_sid, self.twilio_auth_token) - - try: - if self.twilio_use_copilot: - if self.twilio_message_service_sid is None: - raise EAException("Twilio Copilot requires the 'twilio_message_service_sid' option") - - client.messages.create(body=self.rule['name'], - to=self.twilio_to_number, - messaging_service_sid=self.twilio_message_service_sid) - else: - if self.twilio_from_number is None: - raise EAException("Twilio SMS requires the 'twilio_from_number' option") - - client.messages.create(body=self.rule['name'], - to=self.twilio_to_number, - from_=self.twilio_from_number) - except TwilioRestException as e: - raise EAException("Error posting to twilio: %s" % e) - - elastalert_logger.info("Trigger sent to Twilio") - - def get_info(self): - return {'type': 'twilio', - 'twilio_client_name': self.twilio_from_number} - - -class VictorOpsAlerter(Alerter): - """ Creates a VictorOps Incident for each alert """ - required_options = frozenset(['victorops_api_key', 'victorops_routing_key', 'victorops_message_type']) - - def __init__(self, rule): - super(VictorOpsAlerter, self).__init__(rule) - self.victorops_api_key = self.rule['victorops_api_key'] - self.victorops_routing_key = self.rule['victorops_routing_key'] - self.victorops_message_type = self.rule['victorops_message_type'] - self.victorops_entity_id = self.rule.get('victorops_entity_id', None) - self.victorops_entity_display_name = self.rule.get('victorops_entity_display_name', 'no entity display name') - self.url = 'https://alert.victorops.com/integrations/generic/20131114/alert/%s/%s' % ( - self.victorops_api_key, self.victorops_routing_key) - self.victorops_proxy = self.rule.get('victorops_proxy', None) - - def alert(self, matches): - body = self.create_alert_body(matches) - - # post to victorops - headers = {'content-type': 'application/json'} - # set https proxy, if it was provided - proxies = {'https': self.victorops_proxy} if self.victorops_proxy else None - payload = { - "message_type": self.victorops_message_type, - "entity_display_name": self.victorops_entity_display_name, - "monitoring_tool": "ElastAlert", - "state_message": body - } - if self.victorops_entity_id: - payload["entity_id"] = self.victorops_entity_id - - try: - response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies) - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to VictorOps: %s" % e) - elastalert_logger.info("Trigger sent to VictorOps") - - def get_info(self): - return {'type': 'victorops', - 'victorops_routing_key': self.victorops_routing_key} - - -class TelegramAlerter(Alerter): - """ Send a Telegram message via bot api for each alert """ - required_options = frozenset(['telegram_bot_token', 'telegram_room_id']) - - def __init__(self, rule): - super(TelegramAlerter, self).__init__(rule) - self.telegram_bot_token = self.rule['telegram_bot_token'] - self.telegram_room_id = self.rule['telegram_room_id'] - self.telegram_api_url = self.rule.get('telegram_api_url', 'api.telegram.org') - self.url = 'https://%s/bot%s/%s' % (self.telegram_api_url, self.telegram_bot_token, "sendMessage") - self.telegram_proxy = self.rule.get('telegram_proxy', None) - self.telegram_proxy_login = self.rule.get('telegram_proxy_login', None) - self.telegram_proxy_password = self.rule.get('telegram_proxy_pass', None) - - def alert(self, matches): - body = '⚠ *%s* ⚠ ```\n' % (self.create_title(matches)) - for match in matches: - body += str(BasicMatchString(self.rule, match)) - # Separate text of aggregated alerts with dashes - if len(matches) > 1: - body += '\n----------------------------------------\n' - if len(body) > 4095: - body = body[0:4000] + "\n⚠ *message was cropped according to telegram limits!* ⚠" - body += ' ```' - - headers = {'content-type': 'application/json'} - # set https proxy, if it was provided - proxies = {'https': self.telegram_proxy} if self.telegram_proxy else None - auth = HTTPProxyAuth(self.telegram_proxy_login, self.telegram_proxy_password) if self.telegram_proxy_login else None - payload = { - 'chat_id': self.telegram_room_id, - 'text': body, - 'parse_mode': 'markdown', - 'disable_web_page_preview': True - } - - try: - response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies, auth=auth) - warnings.resetwarnings() - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to Telegram: %s. Details: %s" % (e, "" if e.response is None else e.response.text)) - - elastalert_logger.info( - "Alert sent to Telegram room %s" % self.telegram_room_id) - - def get_info(self): - return {'type': 'telegram', - 'telegram_room_id': self.telegram_room_id} - - -class GoogleChatAlerter(Alerter): - """ Send a notification via Google Chat webhooks """ - required_options = frozenset(['googlechat_webhook_url']) - - def __init__(self, rule): - super(GoogleChatAlerter, self).__init__(rule) - self.googlechat_webhook_url = self.rule['googlechat_webhook_url'] - if isinstance(self.googlechat_webhook_url, str): - self.googlechat_webhook_url = [self.googlechat_webhook_url] - self.googlechat_format = self.rule.get('googlechat_format', 'basic') - self.googlechat_header_title = self.rule.get('googlechat_header_title', None) - self.googlechat_header_subtitle = self.rule.get('googlechat_header_subtitle', None) - self.googlechat_header_image = self.rule.get('googlechat_header_image', None) - self.googlechat_footer_kibanalink = self.rule.get('googlechat_footer_kibanalink', None) - - def create_header(self): - header = None - if self.googlechat_header_title: - header = { - "title": self.googlechat_header_title, - "subtitle": self.googlechat_header_subtitle, - "imageUrl": self.googlechat_header_image - } - return header - - def create_footer(self): - footer = None - if self.googlechat_footer_kibanalink: - footer = {"widgets": [{ - "buttons": [{ - "textButton": { - "text": "VISIT KIBANA", - "onClick": { - "openLink": { - "url": self.googlechat_footer_kibanalink - } - } - } - }] - }] - } - return footer - - def create_card(self, matches): - card = {"cards": [{ - "sections": [{ - "widgets": [ - {"textParagraph": {"text": self.create_alert_body(matches)}} - ]} - ]} - ]} - - # Add the optional header - header = self.create_header() - if header: - card['cards'][0]['header'] = header - - # Add the optional footer - footer = self.create_footer() - if footer: - card['cards'][0]['sections'].append(footer) - return card - - def create_basic(self, matches): - body = self.create_alert_body(matches) - return {'text': body} - - def alert(self, matches): - # Format message - if self.googlechat_format == 'card': - message = self.create_card(matches) - else: - message = self.create_basic(matches) - - # Post to webhook - headers = {'content-type': 'application/json'} - for url in self.googlechat_webhook_url: - try: - response = requests.post(url, data=json.dumps(message), headers=headers) - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to google chat: {}".format(e)) - elastalert_logger.info("Alert sent to Google Chat!") - - def get_info(self): - return {'type': 'googlechat', - 'googlechat_webhook_url': self.googlechat_webhook_url} - - -class GitterAlerter(Alerter): - """ Creates a Gitter activity message for each alert """ - required_options = frozenset(['gitter_webhook_url']) - - def __init__(self, rule): - super(GitterAlerter, self).__init__(rule) - self.gitter_webhook_url = self.rule['gitter_webhook_url'] - self.gitter_proxy = self.rule.get('gitter_proxy', None) - self.gitter_msg_level = self.rule.get('gitter_msg_level', 'error') - - def alert(self, matches): - body = self.create_alert_body(matches) - - # post to Gitter - headers = {'content-type': 'application/json'} - # set https proxy, if it was provided - proxies = {'https': self.gitter_proxy} if self.gitter_proxy else None - payload = { - 'message': body, - 'level': self.gitter_msg_level - } - - try: - response = requests.post(self.gitter_webhook_url, json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies) - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to Gitter: %s" % e) - elastalert_logger.info("Alert sent to Gitter") - - def get_info(self): - return {'type': 'gitter', - 'gitter_webhook_url': self.gitter_webhook_url} - - -class ServiceNowAlerter(Alerter): - """ Creates a ServiceNow alert """ - required_options = set([ - 'username', - 'password', - 'servicenow_rest_url', - 'short_description', - 'comments', - 'assignment_group', - 'category', - 'subcategory', - 'cmdb_ci', - 'caller_id' - ]) - - def __init__(self, rule): - super(ServiceNowAlerter, self).__init__(rule) - self.servicenow_rest_url = self.rule['servicenow_rest_url'] - self.servicenow_proxy = self.rule.get('servicenow_proxy', None) - - def alert(self, matches): - for match in matches: - # Parse everything into description. - description = str(BasicMatchString(self.rule, match)) - - # Set proper headers - headers = { - "Content-Type": "application/json", - "Accept": "application/json;charset=utf-8" - } - proxies = {'https': self.servicenow_proxy} if self.servicenow_proxy else None - payload = { - "description": description, - "short_description": self.rule['short_description'], - "comments": self.rule['comments'], - "assignment_group": self.rule['assignment_group'], - "category": self.rule['category'], - "subcategory": self.rule['subcategory'], - "cmdb_ci": self.rule['cmdb_ci'], - "caller_id": self.rule["caller_id"] - } - try: - response = requests.post( - self.servicenow_rest_url, - auth=(self.rule['username'], self.rule['password']), - headers=headers, - data=json.dumps(payload, cls=DateTimeEncoder), - proxies=proxies - ) - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to ServiceNow: %s" % e) - elastalert_logger.info("Alert sent to ServiceNow") - - def get_info(self): - return {'type': 'ServiceNow', - 'self.servicenow_rest_url': self.servicenow_rest_url} - - -class AlertaAlerter(Alerter): - """ Creates an Alerta event for each alert """ - required_options = frozenset(['alerta_api_url']) - - def __init__(self, rule): - super(AlertaAlerter, self).__init__(rule) - - # Setup defaul parameters - self.url = self.rule.get('alerta_api_url', None) - self.api_key = self.rule.get('alerta_api_key', None) - self.timeout = self.rule.get('alerta_timeout', 86400) - self.use_match_timestamp = self.rule.get('alerta_use_match_timestamp', False) - self.use_qk_as_resource = self.rule.get('alerta_use_qk_as_resource', False) - self.verify_ssl = not self.rule.get('alerta_api_skip_ssl', False) - self.missing_text = self.rule.get('alert_missing_value', '') - - # Fill up default values of the API JSON payload - self.severity = self.rule.get('alerta_severity', 'warning') - self.resource = self.rule.get('alerta_resource', 'elastalert') - self.environment = self.rule.get('alerta_environment', 'Production') - self.origin = self.rule.get('alerta_origin', 'elastalert') - self.service = self.rule.get('alerta_service', ['elastalert']) - self.text = self.rule.get('alerta_text', 'elastalert') - self.type = self.rule.get('alerta_type', 'elastalert') - self.event = self.rule.get('alerta_event', 'elastalert') - self.correlate = self.rule.get('alerta_correlate', []) - self.tags = self.rule.get('alerta_tags', []) - self.group = self.rule.get('alerta_group', '') - self.attributes_keys = self.rule.get('alerta_attributes_keys', []) - self.attributes_values = self.rule.get('alerta_attributes_values', []) - self.value = self.rule.get('alerta_value', '') - - def alert(self, matches): - # Override the resource if requested - if self.use_qk_as_resource and 'query_key' in self.rule and lookup_es_key(matches[0], self.rule['query_key']): - self.resource = lookup_es_key(matches[0], self.rule['query_key']) - - headers = {'content-type': 'application/json'} - if self.api_key is not None: - headers['Authorization'] = 'Key %s' % (self.rule['alerta_api_key']) - alerta_payload = self.get_json_payload(matches[0]) - - try: - response = requests.post(self.url, data=alerta_payload, headers=headers, verify=self.verify_ssl) - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to Alerta: %s" % e) - elastalert_logger.info("Alert sent to Alerta") - - def create_default_title(self, matches): - title = '%s' % (self.rule['name']) - # If the rule has a query_key, add that value - if 'query_key' in self.rule: - qk = matches[0].get(self.rule['query_key']) - if qk: - title += '.%s' % (qk) - return title - - def get_info(self): - return {'type': 'alerta', - 'alerta_url': self.url} - - def get_json_payload(self, match): - """ - Builds the API Create Alert body, as in - http://alerta.readthedocs.io/en/latest/api/reference.html#create-an-alert - - For the values that could have references to fields on the match, resolve those references. - - """ - - # Using default text and event title if not defined in rule - alerta_text = self.rule['type'].get_match_str([match]) if self.text == '' else resolve_string(self.text, match, self.missing_text) - alerta_event = self.create_default_title([match]) if self.event == '' else resolve_string(self.event, match, self.missing_text) - - match_timestamp = lookup_es_key(match, self.rule.get('timestamp_field', '@timestamp')) - if match_timestamp is None: - match_timestamp = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ") - if self.use_match_timestamp: - createTime = ts_to_dt(match_timestamp).strftime("%Y-%m-%dT%H:%M:%S.%fZ") - else: - createTime = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ") - - alerta_payload_dict = { - 'resource': resolve_string(self.resource, match, self.missing_text), - 'severity': resolve_string(self.severity, match), - 'timeout': self.timeout, - 'createTime': createTime, - 'type': self.type, - 'environment': resolve_string(self.environment, match, self.missing_text), - 'origin': resolve_string(self.origin, match, self.missing_text), - 'group': resolve_string(self.group, match, self.missing_text), - 'event': alerta_event, - 'text': alerta_text, - 'value': resolve_string(self.value, match, self.missing_text), - 'service': [resolve_string(a_service, match, self.missing_text) for a_service in self.service], - 'tags': [resolve_string(a_tag, match, self.missing_text) for a_tag in self.tags], - 'correlate': [resolve_string(an_event, match, self.missing_text) for an_event in self.correlate], - 'attributes': dict(list(zip(self.attributes_keys, - [resolve_string(a_value, match, self.missing_text) for a_value in self.attributes_values]))), - 'rawData': self.create_alert_body([match]), - } - - try: - payload = json.dumps(alerta_payload_dict, cls=DateTimeEncoder) - except Exception as e: - raise Exception("Error building Alerta request: %s" % e) - return payload - - -class HTTPPostAlerter(Alerter): - """ Requested elasticsearch indices are sent by HTTP POST. Encoded with JSON. """ - required_options = frozenset(['http_post_url']) - - def __init__(self, rule): - super(HTTPPostAlerter, self).__init__(rule) - post_url = self.rule.get('http_post_url') - if isinstance(post_url, str): - post_url = [post_url] - self.post_url = post_url - self.post_proxy = self.rule.get('http_post_proxy') - self.post_payload = self.rule.get('http_post_payload', {}) - self.post_static_payload = self.rule.get('http_post_static_payload', {}) - self.post_all_values = self.rule.get('http_post_all_values', not self.post_payload) - self.post_http_headers = self.rule.get('http_post_headers', {}) - self.post_ca_certs = self.rule.get('http_post_ca_certs') - self.post_ignore_ssl_errors = self.rule.get('http_post_ignore_ssl_errors', False) - self.timeout = self.rule.get('http_post_timeout', 10) - - def alert(self, matches): - """ Each match will trigger a POST to the specified endpoint(s). """ - for match in matches: - payload = match if self.post_all_values else {} - payload.update(self.post_static_payload) - for post_key, es_key in list(self.post_payload.items()): - payload[post_key] = lookup_es_key(match, es_key) - headers = { - "Content-Type": "application/json", - "Accept": "application/json;charset=utf-8" - } - if self.post_ca_certs: - verify = self.post_ca_certs - else: - verify = not self.post_ignore_ssl_errors - - headers.update(self.post_http_headers) - proxies = {'https': self.post_proxy} if self.post_proxy else None - for url in self.post_url: - try: - response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder), - headers=headers, proxies=proxies, timeout=self.timeout, - verify=verify) - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting HTTP Post alert: %s" % e) - elastalert_logger.info("HTTP Post alert sent.") - - def get_info(self): - return {'type': 'http_post', - 'http_post_webhook_url': self.post_url} - - -class LineNotifyAlerter(Alerter): - """ Created a Line Notify for each alert """ - required_option = frozenset(["linenotify_access_token"]) - - def __init__(self, rule): - super(LineNotifyAlerter, self).__init__(rule) - self.linenotify_access_token = self.rule["linenotify_access_token"] - - def alert(self, matches): - body = self.create_alert_body(matches) - # post to Line Notify - headers = { - "Content-Type": "application/x-www-form-urlencoded", - "Authorization": "Bearer {}".format(self.linenotify_access_token) - } - payload = { - "message": body - } - try: - response = requests.post("https://notify-api.line.me/api/notify", data=payload, headers=headers) - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to Line Notify: %s" % e) - elastalert_logger.info("Alert sent to Line Notify") - - def get_info(self): - return {"type": "linenotify", "linenotify_access_token": self.linenotify_access_token} - - -class HiveAlerter(Alerter): - """ - Use matched data to create alerts containing observables in an instance of TheHive - """ - required_options = set(['hive_connection', 'hive_alert_config']) - - def lookup_field(self, match: dict, field_name: str, default): - """Populates a field with values depending on the contents of the Elastalert match - provided to it. - - Uses a similar algorithm to that implemented to populate the `alert_text_args`. - First checks any fields found in the match provided, then any fields defined in - the rule, finally returning the default value provided if no value can be found. - """ - field_value = lookup_es_key(match, field_name) - if field_value is None: - field_value = self.rule.get(field_name, default) - - return field_value - - # Iterate through the matches, building up a list of observables - def load_observable_artifacts(self, match: dict): - artifacts = [] - for mapping in self.rule.get('hive_observable_data_mapping', []): - for observable_type, mapping_key in mapping.items(): - data = self.lookup_field(match, mapping_key, '') - artifact = {'tlp': 2, - 'tags': [], - 'message': None, - 'dataType': observable_type, - 'data': data} - artifacts.append(artifact) - - return artifacts - - def load_custom_fields(self, custom_fields_raw: list, match: dict): - custom_fields = {} - position = 0 - - for field in custom_fields_raw: - if (isinstance(field['value'], str)): - value = self.lookup_field(match, field['value'], field['value']) - else: - value = field['value'] - - custom_fields[field['name']] = {'order': position, field['type']: value} - position += 1 - - return custom_fields - - def load_tags(self, tag_names: list, match: dict): - tag_values = set() - for tag in tag_names: - tag_value = self.lookup_field(match, tag, tag) - if isinstance(tag_value, list): - for sub_tag in tag_value: - tag_values.add(sub_tag) - else: - tag_values.add(tag_value) - - return tag_values - - def alert(self, matches): - # Build TheHive alert object, starting with some defaults, updating with any - # user-specified config - alert_config = { - 'artifacts': [], - 'customFields': {}, - 'date': int(time.time()) * 1000, - 'description': self.create_alert_body(matches), - 'sourceRef': str(uuid.uuid4())[0:6], - 'tags': [], - 'title': self.create_title(matches), - } - alert_config.update(self.rule.get('hive_alert_config', {})) - - # Iterate through each match found, populating the alert tags and observables as required - tags = set() - artifacts = [] - for match in matches: - artifacts = artifacts + self.load_observable_artifacts(match) - tags.update(self.load_tags(alert_config['tags'], match)) - - alert_config['artifacts'] = artifacts - alert_config['tags'] = list(tags) - - # Populate the customFields - alert_config['customFields'] = self.load_custom_fields(alert_config['customFields'], - matches[0]) - - # POST the alert to TheHive - connection_details = self.rule['hive_connection'] - - api_key = connection_details.get('hive_apikey', '') - hive_host = connection_details.get('hive_host', 'http://localhost') - hive_port = connection_details.get('hive_port', 9000) - proxies = connection_details.get('hive_proxies', {'http': '', 'https': ''}) - verify = connection_details.get('hive_verify', False) - - alert_body = json.dumps(alert_config, indent=4, sort_keys=True) - req = f'{hive_host}:{hive_port}/api/alert' - headers = {'Content-Type': 'application/json', - 'Authorization': f'Bearer {api_key}'} - - try: - response = requests.post(req, - headers=headers, - data=alert_body, - proxies=proxies, - verify=verify) - response.raise_for_status() - except RequestException as e: - raise EAException(f"Error posting to TheHive: {e}") - - def get_info(self): - - return { - 'type': 'hivealerter', - 'hive_host': self.rule.get('hive_connection', {}).get('hive_host', '') - } - - -class DiscordAlerter(Alerter): - """ Created a Discord for each alert """ - required_options = frozenset(['discord_webhook_url']) - - def __init__(self, rule): - super(DiscordAlerter, self).__init__(rule) - self.discord_webhook_url = self.rule['discord_webhook_url'] - self.discord_emoji_title = self.rule.get('discord_emoji_title', ':warning:') - self.discord_proxy = self.rule.get('discord_proxy', None) - self.discord_proxy_login = self.rule.get('discord_proxy_login', None) - self.discord_proxy_password = self.rule.get('discord_proxy_password', None) - self.discord_embed_color = self.rule.get('discord_embed_color', 0xffffff) - self.discord_embed_footer = self.rule.get('discord_embed_footer', None) - self.discord_embed_icon_url = self.rule.get('discord_embed_icon_url', None) - - def alert(self, matches): - body = '' - title = u'%s' % (self.create_title(matches)) - for match in matches: - body += str(BasicMatchString(self.rule, match)) - if len(matches) > 1: - body += '\n----------------------------------------\n' - if len(body) > 2047: - body = body[0:1950] + '\n *message was cropped according to discord embed description limits!* ' - - body += '```' - - proxies = {'https': self.discord_proxy} if self.discord_proxy else None - auth = HTTPProxyAuth(self.discord_proxy_login, self.discord_proxy_password) if self.discord_proxy_login else None - headers = {"Content-Type": "application/json"} - - data = {} - data["content"] = "%s %s %s" % (self.discord_emoji_title, title, self.discord_emoji_title) - data["embeds"] = [] - embed = {} - embed["description"] = "%s" % (body) - embed["color"] = (self.discord_embed_color) - - if self.discord_embed_footer: - embed["footer"] = {} - embed["footer"]["text"] = (self.discord_embed_footer) if self.discord_embed_footer else None - embed["footer"]["icon_url"] = (self.discord_embed_icon_url) if self.discord_embed_icon_url else None - else: - None - - data["embeds"].append(embed) - - try: - response = requests.post(self.discord_webhook_url, data=json.dumps(data), headers=headers, proxies=proxies, auth=auth) - warnings.resetwarnings() - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to Discord: %s. Details: %s" % (e, "" if e.response is None else e.response.text)) - - elastalert_logger.info( - "Alert sent to the webhook %s" % self.discord_webhook_url) - - def get_info(self): - return {'type': 'discord', - 'discord_webhook_url': self.discord_webhook_url} - - -class DingTalkAlerter(Alerter): - """ Creates a DingTalk room message for each alert """ - required_options = frozenset(['dingtalk_access_token', 'dingtalk_msgtype']) - - def __init__(self, rule): - super(DingTalkAlerter, self).__init__(rule) - self.dingtalk_access_token = self.rule.get('dingtalk_access_token') - self.dingtalk_webhook_url = 'https://oapi.dingtalk.com/robot/send?access_token=%s' % (self.dingtalk_access_token) - self.dingtalk_msgtype = self.rule.get('dingtalk_msgtype') - self.dingtalk_single_title = self.rule.get('dingtalk_single_title', 'elastalert') - self.dingtalk_single_url = self.rule.get('dingtalk_single_url', '') - self.dingtalk_btn_orientation = self.rule.get('dingtalk_btn_orientation', '') - self.dingtalk_btns = self.rule.get('dingtalk_btns', []) - self.dingtalk_proxy = self.rule.get('dingtalk_proxy', None) - self.dingtalk_proxy_login = self.rule.get('dingtalk_proxy_login', None) - self.dingtalk_proxy_password = self.rule.get('dingtalk_proxy_pass', None) - - def format_body(self, body): - return body.encode('utf8') - - def alert(self, matches): - title = self.create_title(matches) - body = self.create_alert_body(matches) - - proxies = {'https': self.dingtalk_proxy} if self.dingtalk_proxy else None - auth = HTTPProxyAuth(self.dingtalk_proxy_login, self.dingtalk_proxy_password) if self.dingtalk_proxy_login else None - headers = { - 'Content-Type': 'application/json', - 'Accept': 'application/json;charset=utf-8' - } - - if self.dingtalk_msgtype == 'text': - # text - payload = { - 'msgtype': self.dingtalk_msgtype, - 'text': { - 'content': body - } - } - elif self.dingtalk_msgtype == 'markdown': - # markdown - payload = { - 'msgtype': self.dingtalk_msgtype, - 'markdown': { - 'title': title, - 'text': body - } - } - elif self.dingtalk_msgtype == 'single_action_card': - # singleActionCard - payload = { - 'msgtype': 'actionCard', - 'actionCard': { - 'title': title, - 'text': body, - 'singleTitle': self.dingtalk_single_title, - 'singleURL': self.dingtalk_single_url - } - } - elif self.dingtalk_msgtype == 'action_card': - # actionCard - payload = { - 'msgtype': 'actionCard', - 'actionCard': { - 'title': title, - 'text': body - } - } - if self.dingtalk_btn_orientation != '': - payload['actionCard']['btnOrientation'] = self.dingtalk_btn_orientation - if self.dingtalk_btns: - payload['actionCard']['btns'] = self.dingtalk_btns - - try: - response = requests.post(self.dingtalk_webhook_url, data=json.dumps(payload, - cls=DateTimeEncoder), headers=headers, proxies=proxies, auth=auth) - warnings.resetwarnings() - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to dingtalk: %s" % e) - - elastalert_logger.info("Trigger sent to dingtalk") - - def get_info(self): - return { - "type": "dingtalk", - "dingtalk_webhook_url": self.dingtalk_webhook_url - } - - -class ChatworkAlerter(Alerter): - """ Creates a Chatwork room message for each alert """ - required_options = frozenset(['chatwork_apikey', 'chatwork_room_id']) - - def __init__(self, rule): - super(ChatworkAlerter, self).__init__(rule) - self.chatwork_apikey = self.rule.get('chatwork_apikey') - self.chatwork_room_id = self.rule.get('chatwork_room_id') - self.url = 'https://api.chatwork.com/v2/rooms/%s/messages' % (self.chatwork_room_id) - self.chatwork_proxy = self.rule.get('chatwork_proxy', None) - self.chatwork_proxy_login = self.rule.get('chatwork_proxy_login', None) - self.chatwork_proxy_pass = self.rule.get('chatwork_proxy_pass', None) - - def alert(self, matches): - body = self.create_alert_body(matches) - - headers = {'X-ChatWorkToken': self.chatwork_apikey} - # set https proxy, if it was provided - proxies = {'https': self.chatwork_proxy} if self.chatwork_proxy else None - auth = HTTPProxyAuth(self.chatwork_proxy_login, self.chatwork_proxy_pass) if self.chatwork_proxy_login else None - params = {'body': body} - - try: - response = requests.post(self.url, params=params, headers=headers, proxies=proxies, auth=auth) - warnings.resetwarnings() - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to Chattwork: %s. Details: %s" % (e, "" if e.response is None else e.response.text)) - - elastalert_logger.info( - "Alert sent to Chatwork room %s" % self.chatwork_room_id) - - def get_info(self): - return { - "type": "chatwork", - "chatwork_room_id": self.chatwork_room_id - } - - -class DatadogAlerter(Alerter): - ''' Creates a Datadog Event for each alert ''' - required_options = frozenset(['datadog_api_key', 'datadog_app_key']) - - def __init__(self, rule): - super(DatadogAlerter, self).__init__(rule) - self.dd_api_key = self.rule.get('datadog_api_key', None) - self.dd_app_key = self.rule.get('datadog_app_key', None) - - def alert(self, matches): - url = 'https://api.datadoghq.com/api/v1/events' - headers = { - 'Content-Type': 'application/json', - 'DD-API-KEY': self.dd_api_key, - 'DD-APPLICATION-KEY': self.dd_app_key - } - payload = { - 'title': self.create_title(matches), - 'text': self.create_alert_body(matches) - } - try: - response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers) - response.raise_for_status() - except RequestException as e: - raise EAException('Error posting event to Datadog: %s' % e) - elastalert_logger.info('Alert sent to Datadog') - - def get_info(self): - return {'type': 'datadog'} - - -class SesAlerter(Alerter): - """ Sends an email alert using AWS SES """ - required_options = frozenset(['ses_email', 'ses_from_addr']) - - def __init__(self, *args): - super(SesAlerter, self).__init__(*args) - - self.aws_access_key_id = self.rule.get('ses_aws_access_key_id') - self.aws_secret_access_key = self.rule.get('ses_aws_secret_access_key') - self.aws_region = self.rule.get('ses_aws_region', 'us-east-1') - self.aws_profile = self.rule.get('ses_aws_profile', '') - - self.from_addr = self.rule.get('ses_from_addr') - - # Convert email to a list if it isn't already - if isinstance(self.rule['ses_email'], str): - self.rule['ses_email'] = [self.rule['ses_email']] - - # If there is a cc then also convert it a list if it isn't - cc = self.rule.get('ses_cc') - if cc and isinstance(cc, str): - self.rule['ses_cc'] = [self.rule['ses_cc']] - - # If there is a bcc then also convert it to a list if it isn't - bcc = self.rule.get('ses_bcc') - if bcc and isinstance(bcc, str): - self.rule['ses_bcc'] = [self.rule['ses_bcc']] - - # If there is a email_reply_to then also convert it to a list if it isn't - reply_to = self.rule.get('ses_email_reply_to') - if reply_to and isinstance(reply_to, str): - self.rule['ses_email_reply_to'] = [self.rule['ses_email_reply_to']] - - add_suffix = self.rule.get('ses_email_add_domain') - if add_suffix and not add_suffix.startswith('@'): - self.rule['ses_email_add_domain'] = '@' + add_suffix - - def alert(self, matches): - body = self.create_alert_body(matches) - - to_addr = self.rule['ses_email'] - if 'ses_email_from_field' in self.rule: - recipient = lookup_es_key(matches[0], self.rule['ses_email_from_field']) - if isinstance(recipient, str): - if '@' in recipient: - to_addr = [recipient] - elif 'ses_email_add_domain' in self.rule: - to_addr = [recipient + self.rule['ses_email_add_domain']] - elif isinstance(recipient, list): - to_addr = recipient - if 'ses_email_add_domain' in self.rule: - to_addr = [name + self.rule['ses_email_add_domain'] for name in to_addr] - - if self.aws_profile != '': - session = boto3.Session(profile_name=self.aws_profile) - else: - session = boto3.Session( - aws_access_key_id=self.aws_access_key_id, - aws_secret_access_key=self.aws_secret_access_key, - region_name=self.aws_region - ) - - client = session.client('ses') - try: - client.send_email( - Source=self.from_addr, - Destination={ - 'ToAddresses': to_addr, - 'CcAddresses': self.rule.get('ses_cc', []), - 'BccAddresses': self.rule.get('ses_bcc', []) - }, - Message={ - 'Subject': { - 'Charset': 'UTF-8', - 'Data': self.create_title(matches), - }, - 'Body': { - 'Text': { - 'Charset': 'UTF-8', - 'Data': body, - } - } - }, - ReplyToAddresses=self.rule.get('ses_email_reply_to', [])) - except Exception as e: - raise EAException("Error sending ses: %s" % (e,)) - - elastalert_logger.info("Sent ses to %s" % (to_addr,)) - - def create_default_title(self, matches): - subject = 'ElastAlert 2: %s' % (self.rule['name']) - - # If the rule has a query_key, add that value plus timestamp to subject - if 'query_key' in self.rule: - qk = matches[0].get(self.rule['query_key']) - if qk: - subject += ' - %s' % (qk) - - return subject - - def get_info(self): - return {'type': 'ses', - 'recipients': self.rule['ses_email']} diff --git a/elastalert/dingtalk.py b/elastalert/dingtalk.py new file mode 100644 index 000000000..3c5282f15 --- /dev/null +++ b/elastalert/dingtalk.py @@ -0,0 +1,99 @@ +import json +import warnings + +import requests +from requests import RequestException +from requests.auth import HTTPProxyAuth + +from elastalert.alerts import Alerter, DateTimeEncoder +from elastalert.util import EAException, elastalert_logger + + +class DingTalkAlerter(Alerter): + """ Creates a DingTalk room message for each alert """ + required_options = frozenset(['dingtalk_access_token', 'dingtalk_msgtype']) + + def __init__(self, rule): + super(DingTalkAlerter, self).__init__(rule) + self.dingtalk_access_token = self.rule.get('dingtalk_access_token') + self.dingtalk_webhook_url = 'https://oapi.dingtalk.com/robot/send?access_token=%s' % (self.dingtalk_access_token) + self.dingtalk_msgtype = self.rule.get('dingtalk_msgtype') + self.dingtalk_single_title = self.rule.get('dingtalk_single_title', 'elastalert') + self.dingtalk_single_url = self.rule.get('dingtalk_single_url', '') + self.dingtalk_btn_orientation = self.rule.get('dingtalk_btn_orientation', '') + self.dingtalk_btns = self.rule.get('dingtalk_btns', []) + self.dingtalk_proxy = self.rule.get('dingtalk_proxy', None) + self.dingtalk_proxy_login = self.rule.get('dingtalk_proxy_login', None) + self.dingtalk_proxy_password = self.rule.get('dingtalk_proxy_pass', None) + + def format_body(self, body): + return body.encode('utf8') + + def alert(self, matches): + title = self.create_title(matches) + body = self.create_alert_body(matches) + + proxies = {'https': self.dingtalk_proxy} if self.dingtalk_proxy else None + auth = HTTPProxyAuth(self.dingtalk_proxy_login, self.dingtalk_proxy_password) if self.dingtalk_proxy_login else None + headers = { + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + } + + if self.dingtalk_msgtype == 'text': + # text + payload = { + 'msgtype': self.dingtalk_msgtype, + 'text': { + 'content': body + } + } + elif self.dingtalk_msgtype == 'markdown': + # markdown + payload = { + 'msgtype': self.dingtalk_msgtype, + 'markdown': { + 'title': title, + 'text': body + } + } + elif self.dingtalk_msgtype == 'single_action_card': + # singleActionCard + payload = { + 'msgtype': 'actionCard', + 'actionCard': { + 'title': title, + 'text': body, + 'singleTitle': self.dingtalk_single_title, + 'singleURL': self.dingtalk_single_url + } + } + elif self.dingtalk_msgtype == 'action_card': + # actionCard + payload = { + 'msgtype': 'actionCard', + 'actionCard': { + 'title': title, + 'text': body + } + } + if self.dingtalk_btn_orientation != '': + payload['actionCard']['btnOrientation'] = self.dingtalk_btn_orientation + if self.dingtalk_btns: + payload['actionCard']['btns'] = self.dingtalk_btns + + try: + response = requests.post(self.dingtalk_webhook_url, data=json.dumps(payload, + cls=DateTimeEncoder), headers=headers, proxies=proxies, auth=auth) + warnings.resetwarnings() + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to dingtalk: %s" % e) + + elastalert_logger.info("Trigger sent to dingtalk") + + def get_info(self): + return { + "type": "dingtalk", + "dingtalk_webhook_url": self.dingtalk_webhook_url + } diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index c25d211b2..d9a4a4e77 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -30,7 +30,7 @@ from .prometheus_wrapper import PrometheusWrapper from . import kibana -from .alerts import DebugAlerter +from elastalert.alerters.debug import DebugAlerter from .config import load_conf from .enhancements import DropMatchException from .kibana_discover import generate_kibana_discover_url diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 99d0db6ad..60824ba62 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -12,6 +12,26 @@ from jinja2 import Environment from jinja2 import FileSystemLoader +import elastalert.alerters.alerta +import elastalert.alerters.chatwork +import elastalert.alerters.command +import elastalert.alerters.datadog +import elastalert.alerters.debug +import elastalert.alerters.discord +import elastalert.alerters.gitter +import elastalert.alerters.googlechat +import elastalert.alerters.httppost +import elastalert.alerters.line +import elastalert.alerters.pagertree +import elastalert.alerters.exotel +import elastalert.alerters.servicenow +import elastalert.alerters.ses +import elastalert.alerters.stomp +import elastalert.alerters.telegram +import elastalert.alerters.twilio +import elastalert.alerters.victorops +import elastalert.dingtalk +import elastalert.thehive from . import alerts from . import enhancements from . import ruletypes @@ -69,32 +89,32 @@ class RulesLoader(object): 'email': EmailAlerter, 'jira': JiraAlerter, 'opsgenie': OpsGenieAlerter, - 'stomp': alerts.StompAlerter, - 'debug': alerts.DebugAlerter, - 'command': alerts.CommandAlerter, + 'stomp': elastalert.alerters.stomp.StompAlerter, + 'debug': elastalert.alerters.debug.DebugAlerter, + 'command': elastalert.alerters.command.CommandAlerter, 'sns': SnsAlerter, 'ms_teams': MsTeamsAlerter, 'slack': SlackAlerter, 'mattermost': MattermostAlerter, 'pagerduty': PagerDutyAlerter, - 'exotel': alerts.ExotelAlerter, - 'twilio': alerts.TwilioAlerter, - 'victorops': alerts.VictorOpsAlerter, - 'telegram': alerts.TelegramAlerter, - 'googlechat': alerts.GoogleChatAlerter, - 'gitter': alerts.GitterAlerter, - 'servicenow': alerts.ServiceNowAlerter, - 'alerta': alerts.AlertaAlerter, - 'post': alerts.HTTPPostAlerter, - 'pagertree': alerts.PagerTreeAlerter, - 'linenotify': alerts.LineNotifyAlerter, - 'hivealerter': alerts.HiveAlerter, + 'exotel': elastalert.alerters.exotel.ExotelAlerter, + 'twilio': elastalert.alerters.twilio.TwilioAlerter, + 'victorops': elastalert.alerters.victorops.VictorOpsAlerter, + 'telegram': elastalert.alerters.telegram.TelegramAlerter, + 'googlechat': elastalert.alerters.googlechat.GoogleChatAlerter, + 'gitter': elastalert.alerters.gitter.GitterAlerter, + 'servicenow': elastalert.alerters.servicenow.ServiceNowAlerter, + 'alerta': elastalert.alerters.alerta.AlertaAlerter, + 'post': elastalert.alerters.httppost.HTTPPostAlerter, + 'pagertree': elastalert.alerters.pagertree.PagerTreeAlerter, + 'linenotify': elastalert.alerters.line.LineNotifyAlerter, + 'hivealerter': elastalert.thehive.HiveAlerter, 'zabbix': ZabbixAlerter, - 'discord': alerts.DiscordAlerter, - 'dingtalk': alerts.DingTalkAlerter, - 'chatwork': alerts.ChatworkAlerter, - 'datadog': alerts.DatadogAlerter, - 'ses': alerts.SesAlerter + 'discord': elastalert.alerters.discord.DiscordAlerter, + 'dingtalk': elastalert.dingtalk.DingTalkAlerter, + 'chatwork': elastalert.alerters.chatwork.ChatworkAlerter, + 'datadog': elastalert.alerters.datadog.DatadogAlerter, + 'ses': elastalert.alerters.ses.SesAlerter } # A partial ordering of alert types. Relative order will be preserved in the resulting alerts list diff --git a/elastalert/thehive.py b/elastalert/thehive.py new file mode 100644 index 000000000..9910398eb --- /dev/null +++ b/elastalert/thehive.py @@ -0,0 +1,131 @@ +import json +import time +import uuid + +import requests +from requests import RequestException + +from elastalert.alerts import Alerter +from elastalert.util import lookup_es_key, EAException + + +class HiveAlerter(Alerter): + """ + Use matched data to create alerts containing observables in an instance of TheHive + """ + required_options = set(['hive_connection', 'hive_alert_config']) + + def lookup_field(self, match: dict, field_name: str, default): + """Populates a field with values depending on the contents of the Elastalert match + provided to it. + + Uses a similar algorithm to that implemented to populate the `alert_text_args`. + First checks any fields found in the match provided, then any fields defined in + the rule, finally returning the default value provided if no value can be found. + """ + field_value = lookup_es_key(match, field_name) + if field_value is None: + field_value = self.rule.get(field_name, default) + + return field_value + + # Iterate through the matches, building up a list of observables + def load_observable_artifacts(self, match: dict): + artifacts = [] + for mapping in self.rule.get('hive_observable_data_mapping', []): + for observable_type, mapping_key in mapping.items(): + data = self.lookup_field(match, mapping_key, '') + artifact = {'tlp': 2, + 'tags': [], + 'message': None, + 'dataType': observable_type, + 'data': data} + artifacts.append(artifact) + + return artifacts + + def load_custom_fields(self, custom_fields_raw: list, match: dict): + custom_fields = {} + position = 0 + + for field in custom_fields_raw: + if (isinstance(field['value'], str)): + value = self.lookup_field(match, field['value'], field['value']) + else: + value = field['value'] + + custom_fields[field['name']] = {'order': position, field['type']: value} + position += 1 + + return custom_fields + + def load_tags(self, tag_names: list, match: dict): + tag_values = set() + for tag in tag_names: + tag_value = self.lookup_field(match, tag, tag) + if isinstance(tag_value, list): + for sub_tag in tag_value: + tag_values.add(sub_tag) + else: + tag_values.add(tag_value) + + return tag_values + + def alert(self, matches): + # Build TheHive alert object, starting with some defaults, updating with any + # user-specified config + alert_config = { + 'artifacts': [], + 'customFields': {}, + 'date': int(time.time()) * 1000, + 'description': self.create_alert_body(matches), + 'sourceRef': str(uuid.uuid4())[0:6], + 'tags': [], + 'title': self.create_title(matches), + } + alert_config.update(self.rule.get('hive_alert_config', {})) + + # Iterate through each match found, populating the alert tags and observables as required + tags = set() + artifacts = [] + for match in matches: + artifacts = artifacts + self.load_observable_artifacts(match) + tags.update(self.load_tags(alert_config['tags'], match)) + + alert_config['artifacts'] = artifacts + alert_config['tags'] = list(tags) + + # Populate the customFields + alert_config['customFields'] = self.load_custom_fields(alert_config['customFields'], + matches[0]) + + # POST the alert to TheHive + connection_details = self.rule['hive_connection'] + + api_key = connection_details.get('hive_apikey', '') + hive_host = connection_details.get('hive_host', 'http://localhost') + hive_port = connection_details.get('hive_port', 9000) + proxies = connection_details.get('hive_proxies', {'http': '', 'https': ''}) + verify = connection_details.get('hive_verify', False) + + alert_body = json.dumps(alert_config, indent=4, sort_keys=True) + req = f'{hive_host}:{hive_port}/api/alert' + headers = {'Content-Type': 'application/json', + 'Authorization': f'Bearer {api_key}'} + + try: + response = requests.post(req, + headers=headers, + data=alert_body, + proxies=proxies, + verify=verify) + response.raise_for_status() + except RequestException as e: + raise EAException(f"Error posting to TheHive: {e}") + + def get_info(self): + + return { + 'type': 'hivealerter', + 'hive_host': self.rule.get('hive_connection', {}).get('hive_host', '') + } diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 898d26777..22a3f84f9 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -12,22 +12,22 @@ from requests.auth import HTTPProxyAuth from requests.exceptions import RequestException -from elastalert.alerts import AlertaAlerter +from elastalert.alerters.alerta import AlertaAlerter from elastalert.alerts import Alerter from elastalert.alerts import BasicMatchString -from elastalert.alerts import ChatworkAlerter -from elastalert.alerts import CommandAlerter -from elastalert.alerts import DatadogAlerter -from elastalert.alerts import DingTalkAlerter -from elastalert.alerts import DiscordAlerter -from elastalert.alerts import GitterAlerter -from elastalert.alerts import GoogleChatAlerter -from elastalert.alerts import HiveAlerter -from elastalert.alerts import HTTPPostAlerter -from elastalert.alerts import LineNotifyAlerter -from elastalert.alerts import PagerTreeAlerter -from elastalert.alerts import ServiceNowAlerter -from elastalert.alerts import TelegramAlerter +from elastalert.alerters.chatwork import ChatworkAlerter +from elastalert.alerters.command import CommandAlerter +from elastalert.alerters.datadog import DatadogAlerter +from elastalert.dingtalk import DingTalkAlerter +from elastalert.alerters.discord import DiscordAlerter +from elastalert.alerters.gitter import GitterAlerter +from elastalert.alerters.googlechat import GoogleChatAlerter +from elastalert.thehive import HiveAlerter +from elastalert.alerters.httppost import HTTPPostAlerter +from elastalert.alerters.line import LineNotifyAlerter +from elastalert.alerters.pagertree import PagerTreeAlerter +from elastalert.alerters.servicenow import ServiceNowAlerter +from elastalert.alerters.telegram import TelegramAlerter from elastalert.loaders import FileRulesLoader from elastalert.alerters.jira import JiraAlerter from elastalert.alerters.jira import JiraFormattedMatchString @@ -38,7 +38,7 @@ from elastalert.alerters.slack import SlackAlerter from elastalert.alerters.teams import MsTeamsAlerter from elastalert.alerters.zabbix import ZabbixAlerter -from elastalert.alerts import VictorOpsAlerter +from elastalert.alerters.victorops import VictorOpsAlerter from elastalert.util import ts_add from elastalert.util import ts_now from elastalert.util import EAException @@ -1600,21 +1600,21 @@ def test_command(): match = {'@timestamp': '2014-01-01T00:00:00', 'somefield': 'foobarbaz', 'nested': {'field': 1}} - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: alert.alert([match]) assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) # Test command as string with formatted arg (old-style string format) rule = {'command': '/bin/test/ --arg %(somefield)s'} alert = CommandAlerter(rule) - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: alert.alert([match]) assert mock_popen.called_with('/bin/test --arg foobarbaz', stdin=subprocess.PIPE, shell=False) # Test command as string without formatted arg (old-style string format) rule = {'command': '/bin/test/foo.sh'} alert = CommandAlerter(rule) - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: alert.alert([match]) assert mock_popen.called_with('/bin/test/foo.sh', stdin=subprocess.PIPE, shell=True) @@ -1624,7 +1624,7 @@ def test_command(): alert = CommandAlerter(rule) match = {'@timestamp': '2014-01-01T00:00:00', 'somefield': 'foobarbaz'} - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: mock_subprocess = mock.Mock() mock_popen.return_value = mock_subprocess mock_subprocess.communicate.return_value = (None, None) @@ -1639,7 +1639,7 @@ def test_command(): match = {'@timestamp': '2014-01-01T00:00:00', 'somefield': 'foobarbaz'} alert_text = str(BasicMatchString(rule, match)) - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: mock_subprocess = mock.Mock() mock_popen.return_value = mock_subprocess mock_subprocess.communicate.return_value = (None, None) @@ -1654,7 +1654,7 @@ def test_command(): match = {'@timestamp': '2014-01-01T00:00:00', 'somefield': 'foobarbaz'} with pytest.raises(Exception) as exception: - with mock.patch("elastalert.alerts.subprocess.Popen") as mock_popen: + with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: mock_subprocess = mock.Mock() mock_popen.return_value = mock_subprocess mock_subprocess.wait.return_value = 1 @@ -1671,7 +1671,7 @@ def test_command(): 'somefield': 'foobarbaz'} alert_text = str(BasicMatchString(rule, match)) mock_run = mock.MagicMock(side_effect=OSError) - with mock.patch("elastalert.alerts.subprocess.Popen", mock_run), pytest.raises(OSError) as mock_popen: + with mock.patch("elastalert.alerters.command.subprocess.Popen", mock_run), pytest.raises(OSError) as mock_popen: mock_subprocess = mock.Mock() mock_popen.return_value = mock_subprocess mock_subprocess.communicate.return_value = (None, None) From c8856e92a08ec600e91e4749d3ae2186081467e6 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sat, 22 May 2021 12:00:59 +0100 Subject: [PATCH 0235/1065] Migrate all the alerter tests into individual files --- tests/alerters/__init__.py | 0 tests/alerters/alerta_test.py | 653 +++ tests/alerters/chatwork_test.py | 103 + tests/alerters/command_test.py | 97 + tests/alerters/datadog_test.py | 69 + tests/alerters/dingtalk_test.py | 294 ++ tests/alerters/discord_test.py | 209 + tests/alerters/email_test.py | 286 ++ tests/alerters/gitter_test.py | 171 + tests/alerters/googlechat_test.py | 133 + tests/alerters/httppost_test.py | 305 ++ tests/alerters/jira_test.py | 323 ++ tests/alerters/line_test.py | 63 + tests/alerters/mattermost_test.py | 776 ++++ tests/alerters/opsgenie_test.py | 873 ++++ tests/alerters/pagerduty_test.py | 610 +++ tests/alerters/pagertree_test.py | 115 + tests/alerters/servicenow_test.py | 146 + tests/alerters/slack_test.py | 1352 ++++++ tests/alerters/teams_test.py | 145 + tests/alerters/telegram_test.py | 145 + tests/alerters/thehive_test.py | 88 + tests/alerters/victorops_test.py | 112 + tests/alerters/zabbix_text.py | 34 + tests/alerts_test.py | 7215 +---------------------------- 25 files changed, 7240 insertions(+), 7077 deletions(-) create mode 100644 tests/alerters/__init__.py create mode 100644 tests/alerters/alerta_test.py create mode 100644 tests/alerters/chatwork_test.py create mode 100644 tests/alerters/command_test.py create mode 100644 tests/alerters/datadog_test.py create mode 100644 tests/alerters/dingtalk_test.py create mode 100644 tests/alerters/discord_test.py create mode 100644 tests/alerters/email_test.py create mode 100644 tests/alerters/gitter_test.py create mode 100644 tests/alerters/googlechat_test.py create mode 100644 tests/alerters/httppost_test.py create mode 100644 tests/alerters/jira_test.py create mode 100644 tests/alerters/line_test.py create mode 100644 tests/alerters/mattermost_test.py create mode 100644 tests/alerters/opsgenie_test.py create mode 100644 tests/alerters/pagerduty_test.py create mode 100644 tests/alerters/pagertree_test.py create mode 100644 tests/alerters/servicenow_test.py create mode 100644 tests/alerters/slack_test.py create mode 100644 tests/alerters/teams_test.py create mode 100644 tests/alerters/telegram_test.py create mode 100644 tests/alerters/thehive_test.py create mode 100644 tests/alerters/victorops_test.py create mode 100644 tests/alerters/zabbix_text.py diff --git a/tests/alerters/__init__.py b/tests/alerters/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/alerters/alerta_test.py b/tests/alerters/alerta_test.py new file mode 100644 index 000000000..adc537cd0 --- /dev/null +++ b/tests/alerters/alerta_test.py @@ -0,0 +1,653 @@ +import datetime +import json + +import mock +import pytest +from requests import RequestException + +from elastalert.alerters.alerta import AlertaAlerter +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_alerta_no_auth(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_api_skip_ssl': True, + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["%(key)s", "%(logdate)s", "%(sender_ip)s"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe %(hostname)s is UP at %(logdate)s GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alert': 'alerta' + } + + match = { + '@timestamp': '2014-10-10T00:00:00', + # 'key': ---- missing field on purpose, to verify that simply the text is left empty + # 'logdate': ---- missing field on purpose, to verify that simply the text is left empty + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" + } + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + headers={ + 'content-type': 'application/json'}, + verify=False + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) + + +def test_alerta_auth(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'alerta_api_key': '123456789ABCDEF', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_severity': "debug", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alert': 'alerta' + } + + match = { + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json', + 'Authorization': 'Key {}'.format(rule['alerta_api_key'])}) + + +def test_alerta_new_style(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alert': 'alerta' + } + + match = { + '@timestamp': '2014-10-10T00:00:00', + # 'key': ---- missing field on purpose, to verify that simply the text is left empty + # 'logdate': ---- missing field on purpose, to verify that simply the text is left empty + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" + } + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) + + +def test_alerta_use_qk_as_resource(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alerta_use_qk_as_resource': True, + 'query_key': 'hostname', + 'alert': 'alerta' + } + + match = { + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + "origin": "ElastAlert 2", + "resource": "aProbe", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" + } + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) + + +def test_alerta_timeout(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alerta_timeout': 86450, + 'alert': 'alerta' + } + + match = { + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86450, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" + } + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) + + +def test_alerta_type(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alerta_type': 'elastalert2', + 'alert': 'alerta' + } + + match = { + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert2", + "event": "ProbeUP" + } + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) + + +def test_alerta_resource(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alerta_resource': 'elastalert2', + 'alert': 'alerta' + } + + match = { + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + "origin": "ElastAlert 2", + "resource": "elastalert2", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" + } + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) + + +def test_alerta_service(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alerta_service': ['elastalert2'], + 'alert': 'alerta' + } + + match = { + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert2"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" + } + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) + + +def test_alerta_environment(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alerta_environment': 'Production2', + 'alert': 'alerta' + } + + match = { + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production2", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" + } + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) + + +def test_alerta_tags(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alerta_tags': ['elastalert2'], + 'alert': 'alerta' + } + + match = { + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": ['elastalert2'], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" + } + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + assert expected_data == json.loads( + mock_post_request.call_args_list[0][1]['data']) + + +def test_alerta_ea_exception(): + try: + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alerta_tags': ['elastalert2'], + 'alert': 'alerta' + } + + match = { + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/chatwork_test.py b/tests/alerters/chatwork_test.py new file mode 100644 index 000000000..8d60a0f37 --- /dev/null +++ b/tests/alerters/chatwork_test.py @@ -0,0 +1,103 @@ +import mock +import pytest +from requests import RequestException +from requests.auth import HTTPProxyAuth + +from elastalert.alerters.chatwork import ChatworkAlerter +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_chatwork(): + rule = { + 'name': 'Test Chatwork Rule', + 'type': 'any', + 'chatwork_apikey': 'xxxx1', + 'chatwork_room_id': 'xxxx2', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ChatworkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'body': 'Test Chatwork Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + } + + mock_post_request.assert_called_once_with( + 'https://api.chatwork.com/v2/rooms/xxxx2/messages', + params=mock.ANY, + headers={'X-ChatWorkToken': 'xxxx1'}, + proxies=None, + auth=None + ) + + actual_data = mock_post_request.call_args_list[0][1]['params'] + assert expected_data == actual_data + + +def test_chatwork_proxy(): + rule = { + 'name': 'Test Chatwork Rule', + 'type': 'any', + 'chatwork_apikey': 'xxxx1', + 'chatwork_room_id': 'xxxx2', + 'chatwork_proxy': 'http://proxy.url', + 'chatwork_proxy_login': 'admin', + 'chatwork_proxy_pass': 'password', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ChatworkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'body': 'Test Chatwork Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + } + + mock_post_request.assert_called_once_with( + 'https://api.chatwork.com/v2/rooms/xxxx2/messages', + params=mock.ANY, + headers={'X-ChatWorkToken': 'xxxx1'}, + proxies={'https': 'http://proxy.url'}, + auth=HTTPProxyAuth('admin', 'password') + ) + + actual_data = mock_post_request.call_args_list[0][1]['params'] + assert expected_data == actual_data + + +def test_chatwork_ea_exception(): + try: + rule = { + 'name': 'Test Chatwork Rule', + 'type': 'any', + 'chatwork_apikey': 'xxxx1', + 'chatwork_room_id': 'xxxx2', + 'chatwork_proxy': 'http://proxy.url', + 'chatwork_proxy_login': 'admin', + 'chatwork_proxy_pass': 'password', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ChatworkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/command_test.py b/tests/alerters/command_test.py new file mode 100644 index 000000000..95279e539 --- /dev/null +++ b/tests/alerters/command_test.py @@ -0,0 +1,97 @@ +import json +import subprocess + +import mock +import pytest + +from elastalert.alerters.command import CommandAlerter +from elastalert.alerts import BasicMatchString +from elastalert.util import EAException +from tests.alerts_test import mock_rule + + +def test_command(): + # Test command as list with a formatted arg + rule = {'command': ['/bin/test/', '--arg', '%(somefield)s']} + alert = CommandAlerter(rule) + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'nested': {'field': 1}} + with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: + alert.alert([match]) + assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) + + # Test command as string with formatted arg (old-style string format) + rule = {'command': '/bin/test/ --arg %(somefield)s'} + alert = CommandAlerter(rule) + with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: + alert.alert([match]) + assert mock_popen.called_with('/bin/test --arg foobarbaz', stdin=subprocess.PIPE, shell=False) + + # Test command as string without formatted arg (old-style string format) + rule = {'command': '/bin/test/foo.sh'} + alert = CommandAlerter(rule) + with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: + alert.alert([match]) + assert mock_popen.called_with('/bin/test/foo.sh', stdin=subprocess.PIPE, shell=True) + + # Test command with pipe_match_json + rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], + 'pipe_match_json': True} + alert = CommandAlerter(rule) + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz'} + with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: + mock_subprocess = mock.Mock() + mock_popen.return_value = mock_subprocess + mock_subprocess.communicate.return_value = (None, None) + alert.alert([match]) + assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) + assert mock_subprocess.communicate.called_with(input=json.dumps(match)) + + # Test command with pipe_alert_text + rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], + 'pipe_alert_text': True, 'type': mock_rule(), 'name': 'Test'} + alert = CommandAlerter(rule) + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz'} + alert_text = str(BasicMatchString(rule, match)) + with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: + mock_subprocess = mock.Mock() + mock_popen.return_value = mock_subprocess + mock_subprocess.communicate.return_value = (None, None) + alert.alert([match]) + assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) + assert mock_subprocess.communicate.called_with(input=alert_text.encode()) + + # Test command with fail_on_non_zero_exit + rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], + 'fail_on_non_zero_exit': True} + alert = CommandAlerter(rule) + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz'} + with pytest.raises(Exception) as exception: + with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: + mock_subprocess = mock.Mock() + mock_popen.return_value = mock_subprocess + mock_subprocess.wait.return_value = 1 + alert.alert([match]) + assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) + assert "Non-zero exit code while running command" in str(exception) + + # Test OSError + try: + rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], + 'pipe_alert_text': True, 'type': mock_rule(), 'name': 'Test'} + alert = CommandAlerter(rule) + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz'} + alert_text = str(BasicMatchString(rule, match)) + mock_run = mock.MagicMock(side_effect=OSError) + with mock.patch("elastalert.alerters.command.subprocess.Popen", mock_run), pytest.raises(OSError) as mock_popen: + mock_subprocess = mock.Mock() + mock_popen.return_value = mock_subprocess + mock_subprocess.communicate.return_value = (None, None) + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/datadog_test.py b/tests/alerters/datadog_test.py new file mode 100644 index 000000000..956396e9d --- /dev/null +++ b/tests/alerters/datadog_test.py @@ -0,0 +1,69 @@ +import json + +import mock +import pytest +from requests import RequestException + +from elastalert.alerters.datadog import DatadogAlerter +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_datadog_alerter(): + rule = { + 'name': 'Test Datadog Event Alerter', + 'type': 'any', + 'datadog_api_key': 'test-api-key', + 'datadog_app_key': 'test-app-key', + 'alert': [], + 'alert_subject': 'Test Datadog Event Alert' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DatadogAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'name': 'datadog-test-name' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'title': rule['alert_subject'], + 'text': "Test Datadog Event Alerter\n\n@timestamp: 2021-01-01T00:00:00\nname: datadog-test-name\n" + } + mock_post_request.assert_called_once_with( + "https://api.datadoghq.com/api/v1/events", + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'DD-API-KEY': rule['datadog_api_key'], + 'DD-APPLICATION-KEY': rule['datadog_app_key'] + } + ) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_datadog_alerterea_exception(): + try: + rule = { + 'name': 'Test Datadog Event Alerter', + 'type': 'any', + 'datadog_api_key': 'test-api-key', + 'datadog_app_key': 'test-app-key', + 'alert': [], + 'alert_subject': 'Test Datadog Event Alert' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DatadogAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'name': 'datadog-test-name' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/dingtalk_test.py b/tests/alerters/dingtalk_test.py new file mode 100644 index 000000000..f91961a86 --- /dev/null +++ b/tests/alerters/dingtalk_test.py @@ -0,0 +1,294 @@ +import json + +import mock +import pytest +from requests import RequestException +from requests.auth import HTTPProxyAuth + +from elastalert.dingtalk import DingTalkAlerter +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_dingtalk_text(): + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'text', + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'msgtype': 'text', + 'text': {'content': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n'} + } + + mock_post_request.assert_called_once_with( + 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_dingtalk_markdown(): + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'markdown', + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'msgtype': 'markdown', + 'markdown': { + 'title': 'Test DingTalk', + 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + } + + mock_post_request.assert_called_once_with( + 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_dingtalk_single_action_card(): + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'single_action_card', + 'dingtalk_single_title': 'elastalert', + 'dingtalk_single_url': 'http://xxxxx2', + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'msgtype': 'actionCard', + 'actionCard': { + 'title': 'Test DingTalk', + 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'singleTitle': rule['dingtalk_single_title'], + 'singleURL': rule['dingtalk_single_url'] + } + } + + mock_post_request.assert_called_once_with( + 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_dingtalk_action_card(): + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'action_card', + 'dingtalk_single_title': 'elastalert', + 'dingtalk_single_url': 'http://xxxxx2', + 'dingtalk_btn_orientation': '1', + 'dingtalk_btns': [ + { + 'title': 'test1', + 'actionURL': 'https://xxxxx0/' + }, + { + 'title': 'test2', + 'actionURL': 'https://xxxxx1/' + } + ], + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'msgtype': 'actionCard', + 'actionCard': { + 'title': 'Test DingTalk', + 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'btnOrientation': rule['dingtalk_btn_orientation'], + 'btns': rule['dingtalk_btns'] + } + } + + mock_post_request.assert_called_once_with( + 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_dingtalk_proxy(): + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'action_card', + 'dingtalk_single_title': 'elastalert', + 'dingtalk_single_url': 'http://xxxxx2', + 'dingtalk_btn_orientation': '1', + 'dingtalk_btns': [ + { + 'title': 'test1', + 'actionURL': 'https://xxxxx0/' + }, + { + 'title': 'test2', + 'actionURL': 'https://xxxxx1/' + } + ], + 'dingtalk_proxy': 'http://proxy.url', + 'dingtalk_proxy_login': 'admin', + 'dingtalk_proxy_pass': 'password', + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'msgtype': 'actionCard', + 'actionCard': { + 'title': 'Test DingTalk', + 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'btnOrientation': rule['dingtalk_btn_orientation'], + 'btns': rule['dingtalk_btns'] + } + } + + mock_post_request.assert_called_once_with( + 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + proxies={'https': 'http://proxy.url'}, + auth=HTTPProxyAuth('admin', 'password') + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_dingtalk_ea_exception(): + try: + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'action_card', + 'dingtalk_single_title': 'elastalert', + 'dingtalk_single_url': 'http://xxxxx2', + 'dingtalk_btn_orientation': '1', + 'dingtalk_btns': [ + { + 'title': 'test1', + 'actionURL': 'https://xxxxx0/' + }, + { + 'title': 'test2', + 'actionURL': 'https://xxxxx1/' + } + ], + 'dingtalk_proxy': 'http://proxy.url', + 'dingtalk_proxy_login': 'admin', + 'dingtalk_proxy_pass': 'password', + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/discord_test.py b/tests/alerters/discord_test.py new file mode 100644 index 000000000..fea064774 --- /dev/null +++ b/tests/alerters/discord_test.py @@ -0,0 +1,209 @@ +import json + +import mock +import pytest +from requests import RequestException +from requests.auth import HTTPProxyAuth + +from elastalert.alerters.discord import DiscordAlerter +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_discord(): + rule = { + 'name': 'Test Discord Rule', + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'discord_embed_footer': 'footer', + 'discord_embed_icon_url': 'http://xxxx/image.png', + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', + 'color': 0xffffff, + 'footer': { + 'text': 'footer', + 'icon_url': 'http://xxxx/image.png' + } + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_discord_not_footer(): + rule = { + 'name': 'Test Discord Rule', + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', + 'color': 0xffffff + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_discord_proxy(): + rule = { + 'name': 'Test Discord Rule', + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'discord_proxy': 'http://proxy.url', + 'discord_proxy_login': 'admin', + 'discord_proxy_password': 'password', + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', + 'color': 0xffffff + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies={'https': 'http://proxy.url'}, + auth=HTTPProxyAuth('admin', 'password') + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_discord_description_maxlength(): + rule = { + 'name': 'Test Discord Rule' + ('a' * 2069), + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule' + ('a' * 1933) + + '\n *message was cropped according to discord embed description limits!* ```', + 'color': 0xffffff + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_discord_ea_exception(): + try: + rule = { + 'name': 'Test Discord Rule' + ('a' * 2069), + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/email_test.py b/tests/alerters/email_test.py new file mode 100644 index 000000000..ec8bdf2a3 --- /dev/null +++ b/tests/alerters/email_test.py @@ -0,0 +1,286 @@ +import base64 + +import mock + +from elastalert.alerters.email import EmailAlerter +from tests.alerts_test import mock_rule + + +def test_email(): + rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', 'owner': 'owner_value', + 'alert_subject': 'Test alert for {0}, owned by {1}', 'alert_subject_args': ['test_term', 'owner'], 'snowman': '☃'} + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: + mock_smtp.return_value = mock.Mock() + + alert = EmailAlerter(rule) + alert.alert([{'test_term': 'test_value'}]) + expected = [mock.call('localhost', 25), + mock.call().ehlo(), + mock.call().has_extn('STARTTLS'), + mock.call().starttls(certfile=None, keyfile=None), + mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test'], mock.ANY), + mock.call().quit()] + assert mock_smtp.mock_calls == expected + + body = mock_smtp.mock_calls[4][1][2] + + assert 'Reply-To: test@example.com' in body + assert 'To: testing@test.test' in body + assert 'From: testfrom@test.test' in body + assert 'Subject: Test alert for test_value, owned by owner_value' in body + + +def test_email_from_field(): + rule = {'name': 'test alert', 'email': ['testing@test.test'], 'email_add_domain': 'example.com', + 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_from_field': 'data.user', 'owner': 'owner_value'} + # Found, without @ + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: + mock_smtp.return_value = mock.Mock() + alert = EmailAlerter(rule) + alert.alert([{'data': {'user': 'qlo'}}]) + assert mock_smtp.mock_calls[4][1][1] == ['qlo@example.com'] + + # Found, with @ + rule['email_add_domain'] = '@example.com' + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: + mock_smtp.return_value = mock.Mock() + alert = EmailAlerter(rule) + alert.alert([{'data': {'user': 'qlo'}}]) + assert mock_smtp.mock_calls[4][1][1] == ['qlo@example.com'] + + # Found, list + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: + mock_smtp.return_value = mock.Mock() + alert = EmailAlerter(rule) + alert.alert([{'data': {'user': ['qlo', 'foo']}}]) + assert mock_smtp.mock_calls[4][1][1] == ['qlo@example.com', 'foo@example.com'] + + # Not found + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: + mock_smtp.return_value = mock.Mock() + alert = EmailAlerter(rule) + alert.alert([{'data': {'foo': 'qlo'}}]) + assert mock_smtp.mock_calls[4][1][1] == ['testing@test.test'] + + # Found, wrong type + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: + mock_smtp.return_value = mock.Mock() + alert = EmailAlerter(rule) + alert.alert([{'data': {'user': 17}}]) + assert mock_smtp.mock_calls[4][1][1] == ['testing@test.test'] + + +def test_email_with_unicode_strings(): + rule = {'name': 'test alert', 'email': 'testing@test.test', 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', 'owner': 'owner_value', + 'alert_subject': 'Test alert for {0}, owned by {1}', 'alert_subject_args': ['test_term', 'owner'], 'snowman': '☃'} + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: + mock_smtp.return_value = mock.Mock() + + alert = EmailAlerter(rule) + alert.alert([{'test_term': 'test_value'}]) + expected = [mock.call('localhost', 25), + mock.call().ehlo(), + mock.call().has_extn('STARTTLS'), + mock.call().starttls(certfile=None, keyfile=None), + mock.call().sendmail(mock.ANY, ['testing@test.test'], mock.ANY), + mock.call().quit()] + assert mock_smtp.mock_calls == expected + + body = mock_smtp.mock_calls[4][1][2] + + assert 'Reply-To: test@example.com' in body + assert 'To: testing@test.test' in body + assert 'From: testfrom@test.test' in body + assert 'Subject: Test alert for test_value, owned by owner_value' in body + + +def test_email_with_auth(): + rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', + 'alert_subject': 'Test alert for {0}', 'alert_subject_args': ['test_term'], 'smtp_auth_file': 'file.txt', + 'rule_file': '/tmp/foo.yaml'} + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: + with mock.patch('elastalert.alerts.read_yaml') as mock_open: + mock_open.return_value = {'user': 'someone', 'password': 'hunter2'} + mock_smtp.return_value = mock.Mock() + alert = EmailAlerter(rule) + + alert.alert([{'test_term': 'test_value'}]) + expected = [mock.call('localhost', 25), + mock.call().ehlo(), + mock.call().has_extn('STARTTLS'), + mock.call().starttls(certfile=None, keyfile=None), + mock.call().login('someone', 'hunter2'), + mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test'], mock.ANY), + mock.call().quit()] + assert mock_smtp.mock_calls == expected + + +def test_email_with_cert_key(): + rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', + 'alert_subject': 'Test alert for {0}', 'alert_subject_args': ['test_term'], 'smtp_auth_file': 'file.txt', + 'smtp_cert_file': 'dummy/cert.crt', 'smtp_key_file': 'dummy/client.key', 'rule_file': '/tmp/foo.yaml'} + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: + with mock.patch('elastalert.alerts.read_yaml') as mock_open: + mock_open.return_value = {'user': 'someone', 'password': 'hunter2'} + mock_smtp.return_value = mock.Mock() + alert = EmailAlerter(rule) + + alert.alert([{'test_term': 'test_value'}]) + expected = [mock.call('localhost', 25), + mock.call().ehlo(), + mock.call().has_extn('STARTTLS'), + mock.call().starttls(certfile='dummy/cert.crt', keyfile='dummy/client.key'), + mock.call().login('someone', 'hunter2'), + mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test'], mock.ANY), + mock.call().quit()] + assert mock_smtp.mock_calls == expected + + +def test_email_with_cc(): + rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', + 'cc': 'tester@testing.testing'} + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: + mock_smtp.return_value = mock.Mock() + + alert = EmailAlerter(rule) + alert.alert([{'test_term': 'test_value'}]) + expected = [mock.call('localhost', 25), + mock.call().ehlo(), + mock.call().has_extn('STARTTLS'), + mock.call().starttls(certfile=None, keyfile=None), + mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test', 'tester@testing.testing'], mock.ANY), + mock.call().quit()] + assert mock_smtp.mock_calls == expected + + body = mock_smtp.mock_calls[4][1][2] + + assert 'Reply-To: test@example.com' in body + assert 'To: testing@test.test' in body + assert 'CC: tester@testing.testing' in body + assert 'From: testfrom@test.test' in body + + +def test_email_with_bcc(): + rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', + 'bcc': 'tester@testing.testing'} + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: + mock_smtp.return_value = mock.Mock() + + alert = EmailAlerter(rule) + alert.alert([{'test_term': 'test_value'}]) + expected = [mock.call('localhost', 25), + mock.call().ehlo(), + mock.call().has_extn('STARTTLS'), + mock.call().starttls(certfile=None, keyfile=None), + mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test', 'tester@testing.testing'], mock.ANY), + mock.call().quit()] + assert mock_smtp.mock_calls == expected + + body = mock_smtp.mock_calls[4][1][2] + + assert 'Reply-To: test@example.com' in body + assert 'To: testing@test.test' in body + assert 'CC: tester@testing.testing' not in body + assert 'From: testfrom@test.test' in body + + +def test_email_with_cc_and_bcc(): + rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', + 'cc': ['test1@test.com', 'test2@test.com'], 'bcc': 'tester@testing.testing'} + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: + mock_smtp.return_value = mock.Mock() + + alert = EmailAlerter(rule) + alert.alert([{'test_term': 'test_value'}]) + expected = [mock.call('localhost', 25), + mock.call().ehlo(), + mock.call().has_extn('STARTTLS'), + mock.call().starttls(certfile=None, keyfile=None), + mock.call().sendmail( + mock.ANY, + [ + 'testing@test.test', + 'test@test.test', + 'test1@test.com', + 'test2@test.com', + 'tester@testing.testing' + ], + mock.ANY + ), + mock.call().quit()] + assert mock_smtp.mock_calls == expected + + body = mock_smtp.mock_calls[4][1][2] + + assert 'Reply-To: test@example.com' in body + assert 'To: testing@test.test' in body + assert 'CC: test1@test.com,test2@test.com' in body + assert 'From: testfrom@test.test' in body + + +def test_email_with_args(): + rule = { + 'name': 'test alert', + 'email': ['testing@test.test', 'test@test.test'], + 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_reply_to': 'test@example.com', + 'alert_subject': 'Test alert for {0} {1}', + 'alert_subject_args': ['test_term', 'test.term'], + 'alert_text': 'Test alert for {0} and {1} {2}', + 'alert_text_args': ['test_arg1', 'test_arg2', 'test.arg3'], + 'alert_missing_value': '' + } + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: + mock_smtp.return_value = mock.Mock() + + alert = EmailAlerter(rule) + alert.alert([{'test_term': 'test_value', 'test_arg1': 'testing', 'test': {'term': ':)', 'arg3': '☃'}}]) + expected = [mock.call('localhost', 25), + mock.call().ehlo(), + mock.call().has_extn('STARTTLS'), + mock.call().starttls(certfile=None, keyfile=None), + mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test'], mock.ANY), + mock.call().quit()] + assert mock_smtp.mock_calls == expected + + body = mock_smtp.mock_calls[4][1][2] + # Extract the MIME encoded message body + body_text = base64.b64decode(body.split('\n\n')[-1][:-1]).decode('utf-8') + + assert 'testing' in body_text + assert '' in body_text + assert '☃' in body_text + + assert 'Reply-To: test@example.com' in body + assert 'To: testing@test.test' in body + assert 'From: testfrom@test.test' in body + assert 'Subject: Test alert for test_value :)' in body + + +def test_email_query_key_in_subject(): + rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], + 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', + 'query_key': 'username'} + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: + mock_smtp.return_value = mock.Mock() + + alert = EmailAlerter(rule) + alert.alert([{'test_term': 'test_value', 'username': 'werbenjagermanjensen'}]) + + body = mock_smtp.mock_calls[4][1][2] + lines = body.split('\n') + found_subject = False + for line in lines: + if line.startswith('Subject'): + assert 'werbenjagermanjensen' in line + found_subject = True + assert found_subject diff --git a/tests/alerters/gitter_test.py b/tests/alerters/gitter_test.py new file mode 100644 index 000000000..5bf0e7ee7 --- /dev/null +++ b/tests/alerters/gitter_test.py @@ -0,0 +1,171 @@ +import json + +import mock +import pytest +from requests import RequestException + +from elastalert.alerters.gitter import GitterAlerter +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_gitter_msg_level_default(): + rule = { + 'name': 'Test Gitter Rule', + 'type': 'any', + 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GitterAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'message': 'Test Gitter Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'level': 'error' + } + + mock_post_request.assert_called_once_with( + rule['gitter_webhook_url'], + mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) + assert expected_data == actual_data + assert 'error' in actual_data['level'] + + +def test_gitter_msg_level_info(): + rule = { + 'name': 'Test Gitter Rule', + 'type': 'any', + 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', + 'gitter_msg_level': 'info', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GitterAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'message': 'Test Gitter Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'level': 'info' + } + + mock_post_request.assert_called_once_with( + rule['gitter_webhook_url'], + mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) + assert expected_data == actual_data + assert 'info' in actual_data['level'] + + +def test_gitter_msg_level_error(): + rule = { + 'name': 'Test Gitter Rule', + 'type': 'any', + 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', + 'gitter_msg_level': 'error', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GitterAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'message': 'Test Gitter Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'level': 'error' + } + + mock_post_request.assert_called_once_with( + rule['gitter_webhook_url'], + mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) + assert expected_data == actual_data + assert 'error' in actual_data['level'] + + +def test_gitter_proxy(): + rule = { + 'name': 'Test Gitter Rule', + 'type': 'any', + 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', + 'gitter_msg_level': 'error', + 'gitter_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GitterAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'message': 'Test Gitter Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'level': 'error' + } + + mock_post_request.assert_called_once_with( + rule['gitter_webhook_url'], + mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) + assert expected_data == actual_data + assert 'error' in actual_data['level'] + + +def test_gitter_ea_exception(): + try: + rule = { + 'name': 'Test Gitter Rule', + 'type': 'any', + 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', + 'gitter_msg_level': 'error', + 'gitter_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GitterAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/googlechat_test.py b/tests/alerters/googlechat_test.py new file mode 100644 index 000000000..c830cc78a --- /dev/null +++ b/tests/alerters/googlechat_test.py @@ -0,0 +1,133 @@ +import json + +import mock +import pytest +from requests import RequestException + +from elastalert.alerters.googlechat import GoogleChatAlerter +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_google_chat_basic(): + rule = { + 'name': 'Test GoogleChat Rule', + 'type': 'any', + 'googlechat_webhook_url': 'http://xxxxxxx', + 'googlechat_format': 'basic', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GoogleChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'text': 'Test GoogleChat Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + + mock_post_request.assert_called_once_with( + rule['googlechat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_google_chat_card(): + rule = { + 'name': 'Test GoogleChat Rule', + 'type': 'any', + 'googlechat_webhook_url': 'http://xxxxxxx', + 'googlechat_format': 'card', + 'googlechat_header_title': 'xxxx1', + 'googlechat_header_subtitle': 'xxxx2', + 'googlechat_header_image': 'http://xxxx/image.png', + 'googlechat_footer_kibanalink': 'http://xxxxx/kibana', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GoogleChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'cards': [{ + 'header': { + 'title': rule['googlechat_header_title'], + 'subtitle': rule['googlechat_header_subtitle'], + 'imageUrl': rule['googlechat_header_image'] + }, + 'sections': [ + { + 'widgets': [{ + "textParagraph": { + 'text': 'Test GoogleChat Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + }] + }, + { + 'widgets': [{ + 'buttons': [{ + 'textButton': { + 'text': 'VISIT KIBANA', + 'onClick': { + 'openLink': { + 'url': rule['googlechat_footer_kibanalink'] + } + } + } + }] + }] + } + ]} + ] + } + + mock_post_request.assert_called_once_with( + rule['googlechat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_google_chat_ea_exception(): + try: + rule = { + 'name': 'Test GoogleChat Rule', + 'type': 'any', + 'googlechat_webhook_url': 'http://xxxxxxx', + 'googlechat_format': 'card', + 'googlechat_header_title': 'xxxx1', + 'googlechat_header_subtitle': 'xxxx2', + 'googlechat_header_image': 'http://xxxx/image.png', + 'googlechat_footer_kibanalink': 'http://xxxxx/kibana', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GoogleChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/httppost_test.py b/tests/alerters/httppost_test.py new file mode 100644 index 000000000..bff4aa202 --- /dev/null +++ b/tests/alerters/httppost_test.py @@ -0,0 +1,305 @@ +import json + +import mock +import pytest +from requests import RequestException + +from elastalert.alerters.httppost import HTTPPostAlerter +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_http_alerter_with_payload(): + rule = { + 'name': 'Test HTTP Post Alerter With Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_payload': {'posted_name': 'somefield'}, + 'http_post_static_payload': {'name': 'somestaticname'}, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'posted_name': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_with_payload_all_values(): + rule = { + 'name': 'Test HTTP Post Alerter With Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_payload': {'posted_name': 'somefield'}, + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_all_values': True, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'posted_name': 'foobarbaz', + 'name': 'somestaticname', + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_without_payload(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_proxy(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies={'https': 'http://proxy.url'}, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_timeout(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_timeout': 20, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=20, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_headers(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_headers': {'authorization': 'Basic 123dr3234'}, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8', 'authorization': 'Basic 123dr3234'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_post_ca_certs_true(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_ca_certs': True, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_post_ca_certs_false(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_ca_certs': False, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'name': 'somestaticname' + } + mock_post_request.assert_called_once_with( + rule['http_post_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_http_alerter_post_ea_exception(): + try: + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'http_post_static_payload': {'name': 'somestaticname'}, + 'http_post_ca_certs': False, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/jira_test.py b/tests/alerters/jira_test.py new file mode 100644 index 000000000..255f0d444 --- /dev/null +++ b/tests/alerters/jira_test.py @@ -0,0 +1,323 @@ +import datetime + +import mock +import pytest +from jira import JIRAError + +from elastalert.alerters.jira import JiraFormattedMatchString, JiraAlerter +from elastalert.util import ts_now +from tests.alerts_test import mock_rule + + +def test_jira_formatted_match_string(ea): + match = {'foo': {'bar': ['one', 2, 'three']}, 'top_events_poof': 'phew'} + alert_text = str(JiraFormattedMatchString(ea.rules[0], match)) + tab = 4 * ' ' + expected_alert_text_snippet = '{code}{\n' \ + + tab + '"foo": {\n' \ + + 2 * tab + '"bar": [\n' \ + + 3 * tab + '"one",\n' \ + + 3 * tab + '2,\n' \ + + 3 * tab + '"three"\n' \ + + 2 * tab + ']\n' \ + + tab + '}\n' \ + + '}{code}' + assert expected_alert_text_snippet in alert_text + + +def test_jira(): + description_txt = "Description stuff goes here like a runbook link." + rule = { + 'name': 'test alert', + 'jira_account_file': 'jirafile', + 'type': mock_rule(), + 'jira_project': 'testproject', + 'jira_priority': 0, + 'jira_issuetype': 'testtype', + 'jira_server': 'jiraserver', + 'jira_label': 'testlabel', + 'jira_component': 'testcomponent', + 'jira_description': description_txt, + 'jira_watchers': ['testwatcher1', 'testwatcher2'], + 'timestamp_field': '@timestamp', + 'alert_subject': 'Issue {0} occurred at {1}', + 'alert_subject_args': ['test_term', '@timestamp'], + 'rule_file': '/tmp/foo.yaml' + } + + mock_priority = mock.Mock(id='5') + + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.read_yaml') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + + expected = [ + mock.call('jiraserver', basic_auth=('jirauser', 'jirapassword')), + mock.call().priorities(), + mock.call().fields(), + mock.call().create_issue( + issuetype={'name': 'testtype'}, + priority={'id': '5'}, + project={'key': 'testproject'}, + labels=['testlabel'], + components=[{'name': 'testcomponent'}], + description=mock.ANY, + summary='Issue test_value occurred at 2014-10-31T00:00:00', + ), + mock.call().add_watcher(mock.ANY, 'testwatcher1'), + mock.call().add_watcher(mock.ANY, 'testwatcher2'), + ] + + # We don't care about additional calls to mock_jira, such as __str__ + assert mock_jira.mock_calls[:6] == expected + assert mock_jira.mock_calls[3][2]['description'].startswith(description_txt) + + # Search called if jira_bump_tickets + rule['jira_bump_tickets'] = True + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.read_yaml') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.return_value = [] + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + + expected.insert(3, mock.call().search_issues(mock.ANY)) + assert mock_jira.mock_calls == expected + + # Remove a field if jira_ignore_in_title set + rule['jira_ignore_in_title'] = 'test_term' + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.read_yaml') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.return_value = [] + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + + assert 'test_value' not in mock_jira.mock_calls[3][1][0] + + # Issue is still created if search_issues throws an exception + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.read_yaml') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.side_effect = JIRAError + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + + assert mock_jira.mock_calls == expected + + # Only bump after 3d of inactivity + rule['jira_bump_after_inactivity'] = 3 + mock_issue = mock.Mock() + + # Check ticket is bumped if it is updated 4 days ago + mock_issue.fields.updated = str(ts_now() - datetime.timedelta(days=4)) + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.read_yaml') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.return_value = [mock_issue] + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + # Check add_comment is called + assert len(mock_jira.mock_calls) == 5 + assert '().add_comment' == mock_jira.mock_calls[4][0] + + # Check ticket is bumped is not bumped if ticket is updated right now + mock_issue.fields.updated = str(ts_now()) + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.read_yaml') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.return_value = [mock_issue] + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + # Only 4 calls for mock_jira since add_comment is not called + assert len(mock_jira.mock_calls) == 4 + + # Test match resolved values + rule = { + 'name': 'test alert', + 'jira_account_file': 'jirafile', + 'type': mock_rule(), + 'owner': 'the_owner', + 'jira_project': 'testproject', + 'jira_issuetype': 'testtype', + 'jira_server': 'jiraserver', + 'jira_label': 'testlabel', + 'jira_component': 'testcomponent', + 'jira_description': "DESC", + 'jira_watchers': ['testwatcher1', 'testwatcher2'], + 'timestamp_field': '@timestamp', + 'jira_affected_user': "#gmail.the_user", + 'rule_file': '/tmp/foo.yaml' + } + mock_issue = mock.Mock() + mock_issue.fields.updated = str(ts_now() - datetime.timedelta(days=4)) + mock_fields = [ + {'name': 'affected user', 'id': 'affected_user_id', 'schema': {'type': 'string'}} + ] + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.read_yaml') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value = mock.Mock() + mock_jira.return_value.search_issues.return_value = [mock_issue] + mock_jira.return_value.fields.return_value = mock_fields + mock_jira.return_value.priorities.return_value = [mock_priority] + alert = JiraAlerter(rule) + alert.alert([{'gmail.the_user': 'jdoe', '@timestamp': '2014-10-31T00:00:00'}]) + assert mock_jira.mock_calls[4][2]['affected_user_id'] == "jdoe" + + +def test_jira_arbitrary_field_support(): + description_txt = "Description stuff goes here like a runbook link." + rule = { + 'name': 'test alert', + 'jira_account_file': 'jirafile', + 'type': mock_rule(), + 'owner': 'the_owner', + 'jira_project': 'testproject', + 'jira_issuetype': 'testtype', + 'jira_server': 'jiraserver', + 'jira_label': 'testlabel', + 'jira_component': 'testcomponent', + 'jira_description': description_txt, + 'jira_watchers': ['testwatcher1', 'testwatcher2'], + 'jira_arbitrary_reference_string_field': '$owner$', + 'jira_arbitrary_string_field': 'arbitrary_string_value', + 'jira_arbitrary_string_array_field': ['arbitrary_string_value1', 'arbitrary_string_value2'], + 'jira_arbitrary_string_array_field_provided_as_single_value': 'arbitrary_string_value_in_array_field', + 'jira_arbitrary_number_field': 1, + 'jira_arbitrary_number_array_field': [2, 3], + 'jira_arbitrary_number_array_field_provided_as_single_value': 1, + 'jira_arbitrary_complex_field': 'arbitrary_complex_value', + 'jira_arbitrary_complex_array_field': ['arbitrary_complex_value1', 'arbitrary_complex_value2'], + 'jira_arbitrary_complex_array_field_provided_as_single_value': 'arbitrary_complex_value_in_array_field', + 'timestamp_field': '@timestamp', + 'alert_subject': 'Issue {0} occurred at {1}', + 'alert_subject_args': ['test_term', '@timestamp'], + 'rule_file': '/tmp/foo.yaml' + } + + mock_priority = mock.MagicMock(id='5') + + mock_fields = [ + {'name': 'arbitrary reference string field', 'id': 'arbitrary_reference_string_field', 'schema': {'type': 'string'}}, + {'name': 'arbitrary string field', 'id': 'arbitrary_string_field', 'schema': {'type': 'string'}}, + {'name': 'arbitrary string array field', 'id': 'arbitrary_string_array_field', 'schema': {'type': 'array', 'items': 'string'}}, + { + 'name': 'arbitrary string array field provided as single value', + 'id': 'arbitrary_string_array_field_provided_as_single_value', + 'schema': {'type': 'array', 'items': 'string'} + }, + {'name': 'arbitrary number field', 'id': 'arbitrary_number_field', 'schema': {'type': 'number'}}, + {'name': 'arbitrary number array field', 'id': 'arbitrary_number_array_field', 'schema': {'type': 'array', 'items': 'number'}}, + { + 'name': 'arbitrary number array field provided as single value', + 'id': 'arbitrary_number_array_field_provided_as_single_value', + 'schema': {'type': 'array', 'items': 'number'} + }, + {'name': 'arbitrary complex field', 'id': 'arbitrary_complex_field', 'schema': {'type': 'ArbitraryType'}}, + { + 'name': 'arbitrary complex array field', + 'id': 'arbitrary_complex_array_field', + 'schema': {'type': 'array', 'items': 'ArbitraryType'} + }, + { + 'name': 'arbitrary complex array field provided as single value', + 'id': 'arbitrary_complex_array_field_provided_as_single_value', + 'schema': {'type': 'array', 'items': 'ArbitraryType'} + }, + ] + + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.read_yaml') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = mock_fields + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + + expected = [ + mock.call('jiraserver', basic_auth=('jirauser', 'jirapassword')), + mock.call().priorities(), + mock.call().fields(), + mock.call().create_issue( + issuetype={'name': 'testtype'}, + project={'key': 'testproject'}, + labels=['testlabel'], + components=[{'name': 'testcomponent'}], + description=mock.ANY, + summary='Issue test_value occurred at 2014-10-31T00:00:00', + arbitrary_reference_string_field='the_owner', + arbitrary_string_field='arbitrary_string_value', + arbitrary_string_array_field=['arbitrary_string_value1', 'arbitrary_string_value2'], + arbitrary_string_array_field_provided_as_single_value=['arbitrary_string_value_in_array_field'], + arbitrary_number_field=1, + arbitrary_number_array_field=[2, 3], + arbitrary_number_array_field_provided_as_single_value=[1], + arbitrary_complex_field={'name': 'arbitrary_complex_value'}, + arbitrary_complex_array_field=[{'name': 'arbitrary_complex_value1'}, {'name': 'arbitrary_complex_value2'}], + arbitrary_complex_array_field_provided_as_single_value=[{'name': 'arbitrary_complex_value_in_array_field'}], + ), + mock.call().add_watcher(mock.ANY, 'testwatcher1'), + mock.call().add_watcher(mock.ANY, 'testwatcher2'), + ] + + # We don't care about additional calls to mock_jira, such as __str__ + assert mock_jira.mock_calls[:6] == expected + assert mock_jira.mock_calls[3][2]['description'].startswith(description_txt) + + # Reference an arbitrary string field that is not defined on the JIRA server + rule['jira_nonexistent_field'] = 'nonexistent field value' + + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.read_yaml') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = mock_fields + + with pytest.raises(Exception) as exception: + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + assert "Could not find a definition for the jira field 'nonexistent field'" in str(exception) + + del rule['jira_nonexistent_field'] + + # Reference a watcher that does not exist + rule['jira_watchers'] = 'invalid_watcher' + + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.read_yaml') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = mock_fields + + # Cause add_watcher to raise, which most likely means that the user did not exist + mock_jira.return_value.add_watcher.side_effect = Exception() + + with pytest.raises(Exception) as exception: + alert = JiraAlerter(rule) + alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + assert "Exception encountered when trying to add 'invalid_watcher' as a watcher. Does the user exist?" in str(exception) diff --git a/tests/alerters/line_test.py b/tests/alerters/line_test.py new file mode 100644 index 000000000..ff5dc30f1 --- /dev/null +++ b/tests/alerters/line_test.py @@ -0,0 +1,63 @@ +import mock +import pytest +from requests import RequestException + +from elastalert.alerters.line import LineNotifyAlerter +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_line_notify(): + rule = { + 'name': 'Test LineNotify Rule', + 'type': 'any', + 'linenotify_access_token': 'xxxxx', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = LineNotifyAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'message': 'Test LineNotify Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + + mock_post_request.assert_called_once_with( + 'https://notify-api.line.me/api/notify', + data=mock.ANY, + headers={ + 'Content-Type': 'application/x-www-form-urlencoded', + 'Authorization': 'Bearer {}'.format('xxxxx') + } + ) + + actual_data = mock_post_request.call_args_list[0][1]['data'] + assert expected_data == actual_data + + +def test_line_notify_ea_exception(): + try: + rule = { + 'name': 'Test LineNotify Rule', + 'type': 'any', + 'linenotify_access_token': 'xxxxx', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = LineNotifyAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/mattermost_test.py b/tests/alerters/mattermost_test.py new file mode 100644 index 000000000..95897ce9a --- /dev/null +++ b/tests/alerters/mattermost_test.py @@ -0,0 +1,776 @@ +import json + +import mock +import pytest +from requests import RequestException + +from elastalert.alerters.mattermost import MattermostAlerter +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_mattermost_proxy(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_proxy': 'https://proxy.url', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies={'https': 'https://proxy.url'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_alert_text_only(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_not_alert_text_only(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'exclude_fields', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [] + } + ], + 'text': 'Test Mattermost Rule\n\n', + 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_msg_fields(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_msg_fields': [ + { + 'title': 'Stack', + 'value': "{0} {1}", + 'short': False, + 'args': ["type", "msg.status_code"] + }, + { + 'title': 'Name', + 'value': 'static field', + 'short': False + } + ], + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [ + {'title': 'Stack', 'value': ' ', 'short': False}, + {'title': 'Name', 'value': 'static field', 'short': False} + ], + 'text': 'Test Mattermost Rule\n\n' + } + ], 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_icon_url_override(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_icon_url_override': 'http://xxxx/icon.png', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], + 'username': 'elastalert', + 'icon_url': 'http://xxxx/icon.png' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_channel_override(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_channel_override': 'test channel', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], + 'username': 'elastalert', + 'channel': 'test channel' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_ignore_ssl_errors(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_ignore_ssl_errors': True, + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], + 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=False, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_title_link(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_title_link': 'http://title.url', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n', + 'title_link': 'http://title.url' + } + ], + 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_footer(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_footer': 'Mattermost footer', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n', + 'footer': 'Mattermost footer' + } + ], + 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_footer_icon(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_footer_icon': 'http://icon.url', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n', + 'footer_icon': 'http://icon.url' + } + ], + 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_image_url(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_image_url': 'http://image.url', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n', + 'image_url': 'http://image.url' + } + ], + 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_thumb_url(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_thumb_url': 'http://thumb.url', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n', + 'thumb_url': 'http://thumb.url' + } + ], + 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_author_name(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_author_name': 'author name', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n', + 'author_name': 'author name' + } + ], + 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_author_link(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_author_link': 'http://author.link.url', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n', + 'author_link': 'http://author.link.url' + } + ], + 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_author_icon(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_author_icon': 'http://author.icon.url', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n', + 'author_icon': 'http://author.icon.url' + } + ], + 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_ea_exception(): + try: + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_author_icon': 'http://author.icon.url', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/opsgenie_test.py b/tests/alerters/opsgenie_test.py new file mode 100644 index 000000000..4f68016da --- /dev/null +++ b/tests/alerters/opsgenie_test.py @@ -0,0 +1,873 @@ +import mock + +from elastalert.alerters.opsgenie import OpsGenieAlerter +from elastalert.alerts import BasicMatchString +from tests.alerts_test import mock_rule + + +def test_opsgenie_basic(): + rule = {'name': 'testOGalert', 'opsgenie_key': 'ogkey', + 'opsgenie_account': 'genies', 'opsgenie_addr': 'https://api.opsgenie.com/v2/alerts', + 'opsgenie_recipients': ['lytics'], 'type': mock_rule()} + with mock.patch('requests.post') as mock_post: + + alert = OpsGenieAlerter(rule) + alert.alert([{'@timestamp': '2014-10-31T00:00:00'}]) + print(("mock_post: {0}".format(mock_post._mock_call_args_list))) + mcal = mock_post._mock_call_args_list + print(('mcal: {0}'.format(mcal[0]))) + assert mcal[0][0][0] == ('https://api.opsgenie.com/v2/alerts') + + assert mock_post.called + + assert mcal[0][1]['headers']['Authorization'] == 'GenieKey ogkey' + assert mcal[0][1]['json']['source'] == 'ElastAlert' + assert mcal[0][1]['json']['responders'] == [{'username': 'lytics', 'type': 'user'}] + assert mcal[0][1]['json']['source'] == 'ElastAlert' + + +def test_opsgenie_frequency(): + rule = {'name': 'testOGalert', 'opsgenie_key': 'ogkey', + 'opsgenie_account': 'genies', 'opsgenie_addr': 'https://api.opsgenie.com/v2/alerts', + 'opsgenie_recipients': ['lytics'], 'type': mock_rule(), + 'filter': [{'query': {'query_string': {'query': '*hihi*'}}}], + 'alert': 'opsgenie'} + with mock.patch('requests.post') as mock_post: + + alert = OpsGenieAlerter(rule) + alert.alert([{'@timestamp': '2014-10-31T00:00:00'}]) + + assert alert.get_info()['recipients'] == rule['opsgenie_recipients'] + + print(("mock_post: {0}".format(mock_post._mock_call_args_list))) + mcal = mock_post._mock_call_args_list + print(('mcal: {0}'.format(mcal[0]))) + assert mcal[0][0][0] == ('https://api.opsgenie.com/v2/alerts') + + assert mock_post.called + + assert mcal[0][1]['headers']['Authorization'] == 'GenieKey ogkey' + assert mcal[0][1]['json']['source'] == 'ElastAlert' + assert mcal[0][1]['json']['responders'] == [{'username': 'lytics', 'type': 'user'}] + assert mcal[0][1]['json']['source'] == 'ElastAlert' + assert mcal[0][1]['json']['source'] == 'ElastAlert' + + +def test_opsgenie_alert_routing(): + rule = {'name': 'testOGalert', 'opsgenie_key': 'ogkey', + 'opsgenie_account': 'genies', 'opsgenie_addr': 'https://api.opsgenie.com/v2/alerts', + 'opsgenie_recipients': ['{RECEIPIENT_PREFIX}'], 'opsgenie_recipients_args': {'RECEIPIENT_PREFIX': 'recipient'}, + 'type': mock_rule(), + 'filter': [{'query': {'query_string': {'query': '*hihi*'}}}], + 'alert': 'opsgenie', + 'opsgenie_teams': ['{TEAM_PREFIX}-Team'], 'opsgenie_teams_args': {'TEAM_PREFIX': 'team'}} + with mock.patch('requests.post'): + + alert = OpsGenieAlerter(rule) + alert.alert([{'@timestamp': '2014-10-31T00:00:00', 'team': "Test", 'recipient': "lytics"}]) + + assert alert.get_info()['teams'] == ['Test-Team'] + assert alert.get_info()['recipients'] == ['lytics'] + + +def test_opsgenie_default_alert_routing(): + rule = {'name': 'testOGalert', 'opsgenie_key': 'ogkey', + 'opsgenie_account': 'genies', 'opsgenie_addr': 'https://api.opsgenie.com/v2/alerts', + 'opsgenie_recipients': ['{RECEIPIENT_PREFIX}'], 'opsgenie_recipients_args': {'RECEIPIENT_PREFIX': 'recipient'}, + 'type': mock_rule(), + 'filter': [{'query': {'query_string': {'query': '*hihi*'}}}], + 'alert': 'opsgenie', + 'opsgenie_teams': ['{TEAM_PREFIX}-Team'], + 'opsgenie_default_receipients': ["devops@test.com"], 'opsgenie_default_teams': ["Test"] + } + with mock.patch('requests.post'): + + alert = OpsGenieAlerter(rule) + alert.alert([{'@timestamp': '2014-10-31T00:00:00', 'team': "Test"}]) + + assert alert.get_info()['teams'] == ['{TEAM_PREFIX}-Team'] + assert alert.get_info()['recipients'] == ['devops@test.com'] + + +def test_opsgenie_details_with_constant_value(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': {'Foo': 'Bar'} + } + match = { + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Foo': 'Bar'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_details_with_field(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': {'Foo': {'field': 'message'}} + } + match = { + 'message': 'Bar', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Foo': 'Bar'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_details_with_nested_field(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': {'Foo': {'field': 'nested.field'}} + } + match = { + 'nested': { + 'field': 'Bar' + }, + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Foo': 'Bar'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_details_with_non_string_field(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Age': {'field': 'age'}, + 'Message': {'field': 'message'} + } + } + match = { + 'age': 10, + 'message': { + 'format': 'The cow goes %s!', + 'arg0': 'moo' + } + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': { + 'Age': '10', + 'Message': "{'format': 'The cow goes %s!', 'arg0': 'moo'}" + }, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_details_with_missing_field(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + } + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_details_with_environment_variable_replacement(environ): + environ.update({ + 'TEST_VAR': 'Bar' + }) + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': {'Foo': '$TEST_VAR'} + } + match = { + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Foo': 'Bar'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_tags(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_tags': ['test1', 'test2'] + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['test1', 'test2', 'ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_message(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_message': 'test1' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'test1', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_alias(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_alias': 'test1' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies', + 'alias': 'test1' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_subject(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_subject': 'test1' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'test1', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_subject_args(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_subject': 'test', + 'opsgenie_subject_args': ['Testing', 'message'] + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'test', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_priority_p1(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'P1' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': 'P1', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_priority_p2(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'P2' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': 'P2', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_priority_p3(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'P3' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': 'P3', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_priority_p4(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'P4' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': 'P4', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_priority_p5(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'P5' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': 'P5', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_priority_none(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_priority': 'abc' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json + + +def test_opsgenie_proxy(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_proxy': 'https://proxy.url' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies={'https': 'https://proxy.url'} + ) + + expected_json = { + 'description': BasicMatchString(rule, match).__str__(), + 'details': {'Message': 'Testing'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json diff --git a/tests/alerters/pagerduty_test.py b/tests/alerters/pagerduty_test.py new file mode 100644 index 000000000..6b268602b --- /dev/null +++ b/tests/alerters/pagerduty_test.py @@ -0,0 +1,610 @@ +import json + +import mock +import pytest +from requests import RequestException + +from elastalert.alerters.pagerduty import PagerDutyAlerter +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_pagerduty_alerter(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'description': 'Test PD Rule', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': '', + 'service_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with('https://events.pagerduty.com/generic/2010-04-15/create_event.json', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_alerter_v2(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'mysql', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_alerter_v2_payload_class_args(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'somefield', + 'pagerduty_v2_payload_class_args': ['@timestamp', 'somefield'], + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'payload': { + 'class': 'somefield', + 'component': 'mysql', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_alerter_v2_payload_component_args(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'somefield', + 'pagerduty_v2_payload_component_args': ['@timestamp', 'somefield'], + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'somefield', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_alerter_v2_payload_group_args(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'somefield', + 'pagerduty_v2_payload_group_args': ['@timestamp', 'somefield'], + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'mysql', + 'group': 'somefield', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_alerter_v2_payload_source_args(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'somefield', + 'pagerduty_v2_payload_source_args': ['@timestamp', 'somefield'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'mysql', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'somefield', + 'summary': 'Test PD Rule', + 'custom_details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_alerter_v2_payload_custom_details(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', + 'pagerduty_v2_payload_custom_details': {'a': 'somefield', 'c': 'f'}, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'mysql', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': { + 'a': 'foobarbaz', + 'c': None, + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_alerter_v2_payload_include_all_info(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': 'error', + 'pagerduty_v2_payload_source': 'mysql.host.name', + 'pagerduty_v2_payload_include_all_info': False, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'mysql', + 'group': 'app-stack', + 'severity': 'error', + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': {}, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_alerter_custom_incident_key(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom key', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'description': 'Test PD Rule', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': 'custom key', + 'service_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_alerter_custom_incident_key_with_args(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['somefield'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'description': 'Test PD Rule', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': 'custom foobarbaz', + 'service_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_alerter_custom_alert_subject(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'alert_subject': 'Hungry kittens', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['somefield'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'description': 'Hungry kittens', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': 'custom foobarbaz', + 'service_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_alerter_custom_alert_subject_with_args(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'alert_subject': '{0} kittens', + 'alert_subject_args': ['somefield'], + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['someotherfield'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'Stinky', + 'someotherfield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'description': 'Stinky kittens', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinky\nsomeotherfield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': 'custom foobarbaz', + 'service_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_alerter_custom_alert_subject_with_args_specifying_trigger(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'alert_subject': '{0} kittens', + 'alert_subject_args': ['somefield'], + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_event_type': 'trigger', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['someotherfield'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'Stinkiest', + 'someotherfield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'description': 'Stinkiest kittens', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinkiest\nsomeotherfield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': 'custom foobarbaz', + 'service_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_alerter_proxy(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'alert_subject': '{0} kittens', + 'alert_subject_args': ['somefield'], + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_event_type': 'trigger', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['someotherfield'], + 'pagerduty_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'Stinkiest', + 'someotherfield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'description': 'Stinkiest kittens', + 'details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinkiest\nsomeotherfield: foobarbaz\n' + }, + 'event_type': 'trigger', + 'incident_key': 'custom foobarbaz', + 'service_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'}) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_ea_exception(): + try: + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'alert_subject': '{0} kittens', + 'alert_subject_args': ['somefield'], + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_event_type': 'trigger', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_incident_key': 'custom {0}', + 'pagerduty_incident_key_args': ['someotherfield'], + 'pagerduty_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'Stinkiest', + 'someotherfield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/pagertree_test.py b/tests/alerters/pagertree_test.py new file mode 100644 index 000000000..99844e56f --- /dev/null +++ b/tests/alerters/pagertree_test.py @@ -0,0 +1,115 @@ +import json +import re +import uuid + +import mock +import pytest +from requests import RequestException + +from elastalert.alerters.pagertree import PagerTreeAlerter +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_pagertree(): + rule = { + 'name': 'Test PagerTree Rule', + 'type': 'any', + 'pagertree_integration_url': 'https://api.pagertree.com/integration/xxxxx', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerTreeAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'event_type': 'create', + 'Id': str(uuid.uuid4()), + 'Title': 'Test PagerTree Rule', + 'Description': 'Test PagerTree Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + + mock_post_request.assert_called_once_with( + rule['pagertree_integration_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + uuid4hex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) + match = uuid4hex.match(actual_data['Id']) + assert bool(match) is True + assert expected_data["event_type"] == actual_data['event_type'] + assert expected_data["Title"] == actual_data['Title'] + assert expected_data["Description"] == actual_data['Description'] + + +def test_pagertree_proxy(): + rule = { + 'name': 'Test PagerTree Rule', + 'type': 'any', + 'pagertree_integration_url': 'https://api.pagertree.com/integration/xxxxx', + 'pagertree_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerTreeAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'event_type': 'create', + 'Id': str(uuid.uuid4()), + 'Title': 'Test PagerTree Rule', + 'Description': 'Test PagerTree Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + + mock_post_request.assert_called_once_with( + rule['pagertree_integration_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + uuid4hex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) + match = uuid4hex.match(actual_data['Id']) + assert bool(match) is True + assert expected_data["event_type"] == actual_data['event_type'] + assert expected_data["Title"] == actual_data['Title'] + assert expected_data["Description"] == actual_data['Description'] + + +def test_pagertree_ea_exception(): + try: + rule = { + 'name': 'Test PagerTree Rule', + 'type': 'any', + 'pagertree_integration_url': 'https://api.pagertree.com/integration/xxxxx', + 'pagertree_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerTreeAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/servicenow_test.py b/tests/alerters/servicenow_test.py new file mode 100644 index 000000000..2b0d5bbe9 --- /dev/null +++ b/tests/alerters/servicenow_test.py @@ -0,0 +1,146 @@ +import json + +import mock +import pytest +from requests import RequestException + +from elastalert.alerters.servicenow import ServiceNowAlerter +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_service_now(): + rule = { + 'name': 'Test ServiceNow Rule', + 'type': 'any', + 'username': 'ServiceNow username', + 'password': 'ServiceNow password', + 'servicenow_rest_url': 'https://xxxxxxxxxx', + 'short_description': 'ServiceNow short_description', + 'comments': 'ServiceNow comments', + 'assignment_group': 'ServiceNow assignment_group', + 'category': 'ServiceNow category', + 'subcategory': 'ServiceNow subcategory', + 'cmdb_ci': 'ServiceNow cmdb_ci', + 'caller_id': 'ServiceNow caller_id', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ServiceNowAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'description': 'Test ServiceNow Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'short_description': rule['short_description'], + 'comments': rule['comments'], + 'assignment_group': rule['assignment_group'], + 'category': rule['category'], + 'subcategory': rule['subcategory'], + 'cmdb_ci': rule['cmdb_ci'], + 'caller_id': rule['caller_id'] + } + + mock_post_request.assert_called_once_with( + rule['servicenow_rest_url'], + auth=(rule['username'], rule['password']), + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + data=mock.ANY, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_service_now_proxy(): + rule = { + 'name': 'Test ServiceNow Rule', + 'type': 'any', + 'username': 'ServiceNow username', + 'password': 'ServiceNow password', + 'servicenow_rest_url': 'https://xxxxxxxxxx', + 'short_description': 'ServiceNow short_description', + 'comments': 'ServiceNow comments', + 'assignment_group': 'ServiceNow assignment_group', + 'category': 'ServiceNow category', + 'subcategory': 'ServiceNow subcategory', + 'cmdb_ci': 'ServiceNow cmdb_ci', + 'caller_id': 'ServiceNow caller_id', + 'servicenow_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ServiceNowAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'description': 'Test ServiceNow Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'short_description': rule['short_description'], + 'comments': rule['comments'], + 'assignment_group': rule['assignment_group'], + 'category': rule['category'], + 'subcategory': rule['subcategory'], + 'cmdb_ci': rule['cmdb_ci'], + 'caller_id': rule['caller_id'] + } + + mock_post_request.assert_called_once_with( + rule['servicenow_rest_url'], + auth=(rule['username'], rule['password']), + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + data=mock.ANY, + proxies={'https': 'http://proxy.url'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_service_now_ea_exception(): + try: + rule = { + 'name': 'Test ServiceNow Rule', + 'type': 'any', + 'username': 'ServiceNow username', + 'password': 'ServiceNow password', + 'servicenow_rest_url': 'https://xxxxxxxxxx', + 'short_description': 'ServiceNow short_description', + 'comments': 'ServiceNow comments', + 'assignment_group': 'ServiceNow assignment_group', + 'category': 'ServiceNow category', + 'subcategory': 'ServiceNow subcategory', + 'cmdb_ci': 'ServiceNow cmdb_ci', + 'caller_id': 'ServiceNow caller_id', + 'servicenow_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ServiceNowAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/slack_test.py b/tests/alerters/slack_test.py new file mode 100644 index 000000000..5d6080ab7 --- /dev/null +++ b/tests/alerters/slack_test.py @@ -0,0 +1,1352 @@ +import json + +import mock +import pytest +from requests import RequestException + +from elastalert.alerters.slack import SlackAlerter +from elastalert.alerts import BasicMatchString +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_slack_uses_custom_title(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_uses_custom_timeout(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert_subject': 'Cool subject', + 'alert': [], + 'slack_timeout': 20 + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=20 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_uses_rule_name_when_custom_title_is_not_provided(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': ['http://please.dontgohere.slack'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none', + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_uses_custom_slack_channel(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': ['http://please.dontgohere.slack'], + 'slack_channel_override': '#test-alert', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '#test-alert', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none', + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_uses_list_of_custom_slack_channel(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': ['http://please.dontgohere.slack'], + 'slack_channel_override': ['#test-alert', '#test-alert2'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data1 = { + 'username': 'elastalert', + 'channel': '#test-alert', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + expected_data2 = { + 'username': 'elastalert', + 'channel': '#test-alert2', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_with( + rule['slack_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data1 == json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data2 == json.loads(mock_post_request.call_args_list[1][1]['data']) + + +def test_slack_attach_kibana_discover_url_when_generated(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_attach_kibana_discover_url': True, + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'kibana_discover_url': 'http://kibana#discover' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'parse': 'none', + 'text': '', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + }, + { + 'color': '#ec4b98', + 'title': 'Discover in Kibana', + 'title_link': 'http://kibana#discover' + } + ], + 'icon_emoji': ':ghost:', + 'channel': '' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_slack_attach_kibana_discover_url_when_not_generated(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_attach_kibana_discover_url': True, + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'parse': 'none', + 'text': '', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'icon_emoji': ':ghost:', + 'channel': '' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_slack_kibana_discover_title(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_attach_kibana_discover_url': True, + 'slack_kibana_discover_title': 'Click to discover in Kibana', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'kibana_discover_url': 'http://kibana#discover' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'parse': 'none', + 'text': '', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + }, + { + 'color': '#ec4b98', + 'title': 'Click to discover in Kibana', + 'title_link': 'http://kibana#discover' + } + ], + 'icon_emoji': ':ghost:', + 'channel': '' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_slack_kibana_discover_color(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_attach_kibana_discover_url': True, + 'slack_kibana_discover_color': 'blue', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'kibana_discover_url': 'http://kibana#discover' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'parse': 'none', + 'text': '', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + }, + { + 'color': 'blue', + 'title': 'Discover in Kibana', + 'title_link': 'http://kibana#discover' + } + ], + 'icon_emoji': ':ghost:', + 'channel': '' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_slack_ignore_ssl_errors(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_ignore_ssl_errors': True, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=False, + timeout=10 + ) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_proxy(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_proxy': 'http://proxy.url', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': rule['slack_proxy']}, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_username_override(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'test elastalert', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'test elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_title_link(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_title_link': 'http://slack.title.link', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'title_link': 'http://slack.title.link' + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_title(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_title': 'slack title', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'slack title', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_icon_url_override(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_icon_url_override': 'http://slack.icon.url.override', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_url': 'http://slack.icon.url.override', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_msg_color(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_msg_color': 'good', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'good', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_parse_override(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_parse_override': 'full', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'full' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_text_string(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_text_string': 'text str', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': 'text str', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_alert_fields(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_alert_fields': [ + { + 'title': 'Host', + 'value': 'somefield', + 'short': 'true' + }, + { + 'title': 'Sensors', + 'value': '@timestamp', + 'short': 'true' + } + ], + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': + [ + { + 'short': 'true', + 'title': 'Host', + 'value': 'foobarbaz' + }, + { + 'short': 'true', + 'title': 'Sensors', + 'value': '2016-01-01T00:00:00' + } + ], + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_ca_certs(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_ca_certs': True, + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_footer(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_footer': 'Elastic Alerts', + 'slack_footer_icon': 'http://footer.icon.url', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'footer': 'Elastic Alerts', + 'footer_icon': 'http://footer.icon.url' + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_image_url(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_image_url': 'http://image.url', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'image_url': 'http://image.url', + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_thumb_url(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_thumb_url': 'http://thumb.url', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'thumb_url': 'http://thumb.url', + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_author_name(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_author_name': 'author name', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'author_name': 'author name', + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_author_link(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_author_link': 'http://author.url', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'author_link': 'http://author.url', + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_author_icon(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_author_icon': 'http://author.icon', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'author_icon': 'http://author.icon', + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_msg_pretext(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_msg_pretext': 'pretext value', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [], + 'pretext': 'pretext value' + } + ], + 'text': '', + 'parse': 'none' + } + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_slack_ea_exception(): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_msg_pretext': 'pretext value', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/teams_test.py b/tests/alerters/teams_test.py new file mode 100644 index 000000000..a55a402b3 --- /dev/null +++ b/tests/alerters/teams_test.py @@ -0,0 +1,145 @@ +import json + +import mock +import pytest +from requests import RequestException + +from elastalert.alerters.teams import MsTeamsAlerter +from elastalert.alerts import BasicMatchString +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_ms_teams(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'ms_teams_webhook_url': 'http://test.webhook.url', + 'ms_teams_alert_summary': 'Alert from ElastAlert', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MsTeamsAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + '@type': 'MessageCard', + '@context': 'http://schema.org/extensions', + 'summary': rule['ms_teams_alert_summary'], + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__() + } + mock_post_request.assert_called_once_with( + rule['ms_teams_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_ms_teams_uses_color_and_fixed_width_text(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'ms_teams_webhook_url': 'http://test.webhook.url', + 'ms_teams_alert_summary': 'Alert from ElastAlert', + 'ms_teams_alert_fixed_width': True, + 'ms_teams_theme_color': '#124578', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MsTeamsAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + body = BasicMatchString(rule, match).__str__() + body = body.replace('`', "'") + body = "```{0}```".format('```\n\n```'.join(x for x in body.split('\n'))).replace('\n``````', '') + expected_data = { + '@type': 'MessageCard', + '@context': 'http://schema.org/extensions', + 'summary': rule['ms_teams_alert_summary'], + 'title': rule['alert_subject'], + 'themeColor': '#124578', + 'text': body + } + mock_post_request.assert_called_once_with( + rule['ms_teams_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_ms_teams_proxy(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'ms_teams_webhook_url': 'http://test.webhook.url', + 'ms_teams_alert_summary': 'Alert from ElastAlert', + 'ms_teams_proxy': 'https://test.proxy.url', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MsTeamsAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + '@type': 'MessageCard', + '@context': 'http://schema.org/extensions', + 'summary': rule['ms_teams_alert_summary'], + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__() + } + mock_post_request.assert_called_once_with( + rule['ms_teams_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': rule['ms_teams_proxy']} + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_ms_teams_ea_exception(): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'ms_teams_webhook_url': 'http://test.webhook.url', + 'ms_teams_alert_summary': 'Alert from ElastAlert', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MsTeamsAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/telegram_test.py b/tests/alerters/telegram_test.py new file mode 100644 index 000000000..a7c9b38f6 --- /dev/null +++ b/tests/alerters/telegram_test.py @@ -0,0 +1,145 @@ +import json + +import mock +import pytest +from requests import RequestException +from requests.auth import HTTPProxyAuth + +from elastalert.alerters.telegram import TelegramAlerter +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_telegram(): + rule = { + 'name': 'Test Telegram Rule', + 'type': 'any', + 'telegram_bot_token': 'xxxxx1', + 'telegram_room_id': 'xxxxx2', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TelegramAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'chat_id': rule['telegram_room_id'], + 'text': '⚠ *Test Telegram Rule* ⚠ ```\nTest Telegram Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n ```', + 'parse_mode': 'markdown', + 'disable_web_page_preview': True + } + + mock_post_request.assert_called_once_with( + 'https://api.telegram.org/botxxxxx1/sendMessage', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_telegram_proxy(): + rule = { + 'name': 'Test Telegram Rule', + 'type': 'any', + 'telegram_bot_token': 'xxxxx1', + 'telegram_room_id': 'xxxxx2', + 'telegram_proxy': 'http://proxy.url', + 'telegram_proxy_login': 'admin', + 'telegram_proxy_pass': 'password', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TelegramAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'chat_id': rule['telegram_room_id'], + 'text': '⚠ *Test Telegram Rule* ⚠ ```\nTest Telegram Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n ```', + 'parse_mode': 'markdown', + 'disable_web_page_preview': True + } + + mock_post_request.assert_called_once_with( + 'https://api.telegram.org/botxxxxx1/sendMessage', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'}, + auth=HTTPProxyAuth('admin', 'password') + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_telegram_text_maxlength(): + rule = { + 'name': 'Test Telegram Rule' + ('a' * 3985), + 'type': 'any', + 'telegram_bot_token': 'xxxxx1', + 'telegram_room_id': 'xxxxx2', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TelegramAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'chat_id': rule['telegram_room_id'], + 'text': '⚠ *Test Telegram Rule' + ('a' * 3979) + + '\n⚠ *message was cropped according to telegram limits!* ⚠ ```', + 'parse_mode': 'markdown', + 'disable_web_page_preview': True + } + + mock_post_request.assert_called_once_with( + 'https://api.telegram.org/botxxxxx1/sendMessage', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_telegram_ea_exception(): + try: + rule = { + 'name': 'Test Telegram Rule' + ('a' * 3985), + 'type': 'any', + 'telegram_bot_token': 'xxxxx1', + 'telegram_room_id': 'xxxxx2', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TelegramAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/thehive_test.py b/tests/alerters/thehive_test.py new file mode 100644 index 000000000..a2a90e12e --- /dev/null +++ b/tests/alerters/thehive_test.py @@ -0,0 +1,88 @@ +import json + +import mock + +from elastalert.loaders import FileRulesLoader +from elastalert.thehive import HiveAlerter + + +def test_thehive_alerter(): + rule = {'alert': [], + 'alert_text': '', + 'alert_text_type': 'alert_text_only', + 'description': 'test', + 'hive_alert_config': {'customFields': [{'name': 'test', + 'type': 'string', + 'value': 'test.ip'}], + 'follow': True, + 'severity': 2, + 'source': 'elastalert', + 'status': 'New', + 'tags': ['test.ip'], + 'tlp': 3, + 'type': 'external'}, + 'hive_connection': {'hive_apikey': '', + 'hive_host': 'https://localhost', + 'hive_port': 9000}, + 'hive_observable_data_mapping': [{'ip': 'test.ip'}], + 'name': 'test-thehive', + 'tags': ['a', 'b'], + 'type': 'any'} + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HiveAlerter(rule) + match = { + "test": { + "ip": "127.0.0.1" + }, + "@timestamp": "2021-05-09T14:43:30", + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + "artifacts": [ + { + "data": "127.0.0.1", + "dataType": "ip", + "message": None, + "tags": [], + "tlp": 2 + } + ], + "customFields": { + "test": { + "order": 0, + "string": "127.0.0.1" + } + }, + "description": "\n\n", + "follow": True, + "severity": 2, + "source": "elastalert", + "status": "New", + "tags": [ + "127.0.0.1" + ], + "title": "test-thehive", + "tlp": 3, + "type": "external" + } + + conn_config = rule['hive_connection'] + alert_url = f"{conn_config['hive_host']}:{conn_config['hive_port']}/api/alert" + mock_post_request.assert_called_once_with( + alert_url, + data=mock.ANY, + headers={'Content-Type': 'application/json', + 'Authorization': 'Bearer '}, + verify=False, + proxies={'http': '', 'https': ''} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + # The date and sourceRef are autogenerated, so we can't expect them to be a particular value + del actual_data['date'] + del actual_data['sourceRef'] + + assert expected_data == actual_data diff --git a/tests/alerters/victorops_test.py b/tests/alerters/victorops_test.py new file mode 100644 index 000000000..ec7895840 --- /dev/null +++ b/tests/alerters/victorops_test.py @@ -0,0 +1,112 @@ +import json + +import mock +import pytest +from requests import RequestException + +from elastalert.alerters.victorops import VictorOpsAlerter +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_victor_ops(): + rule = { + 'name': 'Test VictorOps Rule', + 'type': 'any', + 'victorops_api_key': 'xxxx1', + 'victorops_routing_key': 'xxxx2', + 'victorops_message_type': 'INFO', + 'victorops_entity_display_name': 'no entity display name', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = VictorOpsAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'message_type': rule['victorops_message_type'], + 'entity_display_name': rule['victorops_entity_display_name'], + 'monitoring_tool': 'ElastAlert', + 'state_message': 'Test VictorOps Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + + mock_post_request.assert_called_once_with( + 'https://alert.victorops.com/integrations/generic/20131114/alert/xxxx1/xxxx2', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_victor_ops_proxy(): + rule = { + 'name': 'Test VictorOps Rule', + 'type': 'any', + 'victorops_api_key': 'xxxx1', + 'victorops_routing_key': 'xxxx2', + 'victorops_message_type': 'INFO', + 'victorops_entity_display_name': 'no entity display name', + 'victorops_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = VictorOpsAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'message_type': rule['victorops_message_type'], + 'entity_display_name': rule['victorops_entity_display_name'], + 'monitoring_tool': 'ElastAlert', + 'state_message': 'Test VictorOps Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + + mock_post_request.assert_called_once_with( + 'https://alert.victorops.com/integrations/generic/20131114/alert/xxxx1/xxxx2', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_victor_ops_ea_exception(): + try: + rule = { + 'name': 'Test VictorOps Rule', + 'type': 'any', + 'victorops_api_key': 'xxxx1', + 'victorops_routing_key': 'xxxx2', + 'victorops_message_type': 'INFO', + 'victorops_entity_display_name': 'no entity display name', + 'victorops_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = VictorOpsAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True diff --git a/tests/alerters/zabbix_text.py b/tests/alerters/zabbix_text.py new file mode 100644 index 000000000..a25cae9cc --- /dev/null +++ b/tests/alerters/zabbix_text.py @@ -0,0 +1,34 @@ +import mock + +from elastalert.alerters.zabbix import ZabbixAlerter +from elastalert.loaders import FileRulesLoader + + +def test_zabbix_basic(): + rule = { + 'name': 'Basic Zabbix test', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'alert': [], + 'alert_subject': 'Test Zabbix', + 'zbx_host': 'example.com', + 'zbx_key': 'example-key' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ZabbixAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00Z', + 'somefield': 'foobarbaz' + } + with mock.patch('pyzabbix.ZabbixSender.send') as mock_zbx_send: + alert.alert([match]) + + zabbix_metrics = { + "host": "example.com", + "key": "example-key", + "value": "1", + "clock": 1609459200 + } + alerter_args = mock_zbx_send.call_args.args + assert vars(alerter_args[0][0]) == zabbix_metrics diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 22a3f84f9..fab081b8e 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -1,47 +1,12 @@ # -*- coding: utf-8 -*- -import base64 import datetime import json -import subprocess -import re -import uuid import mock -import pytest -from jira.exceptions import JIRAError -from requests.auth import HTTPProxyAuth -from requests.exceptions import RequestException -from elastalert.alerters.alerta import AlertaAlerter from elastalert.alerts import Alerter from elastalert.alerts import BasicMatchString -from elastalert.alerters.chatwork import ChatworkAlerter -from elastalert.alerters.command import CommandAlerter -from elastalert.alerters.datadog import DatadogAlerter -from elastalert.dingtalk import DingTalkAlerter -from elastalert.alerters.discord import DiscordAlerter -from elastalert.alerters.gitter import GitterAlerter -from elastalert.alerters.googlechat import GoogleChatAlerter -from elastalert.thehive import HiveAlerter -from elastalert.alerters.httppost import HTTPPostAlerter -from elastalert.alerters.line import LineNotifyAlerter -from elastalert.alerters.pagertree import PagerTreeAlerter -from elastalert.alerters.servicenow import ServiceNowAlerter -from elastalert.alerters.telegram import TelegramAlerter -from elastalert.loaders import FileRulesLoader -from elastalert.alerters.jira import JiraAlerter -from elastalert.alerters.jira import JiraFormattedMatchString -from elastalert.alerters.email import EmailAlerter -from elastalert.alerters.mattermost import MattermostAlerter -from elastalert.alerters.opsgenie import OpsGenieAlerter -from elastalert.alerters.pagerduty import PagerDutyAlerter -from elastalert.alerters.slack import SlackAlerter -from elastalert.alerters.teams import MsTeamsAlerter -from elastalert.alerters.zabbix import ZabbixAlerter -from elastalert.alerters.victorops import VictorOpsAlerter from elastalert.util import ts_add -from elastalert.util import ts_now -from elastalert.util import EAException class mock_rule: @@ -93,7097 +58,193 @@ def test_basic_match_string(ea): assert 'field: value' not in alert_text -def test_jira_formatted_match_string(ea): - match = {'foo': {'bar': ['one', 2, 'three']}, 'top_events_poof': 'phew'} - alert_text = str(JiraFormattedMatchString(ea.rules[0], match)) - tab = 4 * ' ' - expected_alert_text_snippet = '{code}{\n' \ - + tab + '"foo": {\n' \ - + 2 * tab + '"bar": [\n' \ - + 3 * tab + '"one",\n' \ - + 3 * tab + '2,\n' \ - + 3 * tab + '"three"\n' \ - + 2 * tab + ']\n' \ - + tab + '}\n' \ - + '}{code}' - assert expected_alert_text_snippet in alert_text - - -def test_email(): - rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', 'owner': 'owner_value', - 'alert_subject': 'Test alert for {0}, owned by {1}', 'alert_subject_args': ['test_term', 'owner'], 'snowman': '☃'} - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - mock_smtp.return_value = mock.Mock() - - alert = EmailAlerter(rule) - alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost', 25), - mock.call().ehlo(), - mock.call().has_extn('STARTTLS'), - mock.call().starttls(certfile=None, keyfile=None), - mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test'], mock.ANY), - mock.call().quit()] - assert mock_smtp.mock_calls == expected - - body = mock_smtp.mock_calls[4][1][2] - - assert 'Reply-To: test@example.com' in body - assert 'To: testing@test.test' in body - assert 'From: testfrom@test.test' in body - assert 'Subject: Test alert for test_value, owned by owner_value' in body - - -def test_email_from_field(): - rule = {'name': 'test alert', 'email': ['testing@test.test'], 'email_add_domain': 'example.com', - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_from_field': 'data.user', 'owner': 'owner_value'} - # Found, without @ - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - mock_smtp.return_value = mock.Mock() - alert = EmailAlerter(rule) - alert.alert([{'data': {'user': 'qlo'}}]) - assert mock_smtp.mock_calls[4][1][1] == ['qlo@example.com'] - - # Found, with @ - rule['email_add_domain'] = '@example.com' - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - mock_smtp.return_value = mock.Mock() - alert = EmailAlerter(rule) - alert.alert([{'data': {'user': 'qlo'}}]) - assert mock_smtp.mock_calls[4][1][1] == ['qlo@example.com'] - - # Found, list - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - mock_smtp.return_value = mock.Mock() - alert = EmailAlerter(rule) - alert.alert([{'data': {'user': ['qlo', 'foo']}}]) - assert mock_smtp.mock_calls[4][1][1] == ['qlo@example.com', 'foo@example.com'] - - # Not found - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - mock_smtp.return_value = mock.Mock() - alert = EmailAlerter(rule) - alert.alert([{'data': {'foo': 'qlo'}}]) - assert mock_smtp.mock_calls[4][1][1] == ['testing@test.test'] - - # Found, wrong type - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - mock_smtp.return_value = mock.Mock() - alert = EmailAlerter(rule) - alert.alert([{'data': {'user': 17}}]) - assert mock_smtp.mock_calls[4][1][1] == ['testing@test.test'] - - -def test_email_with_unicode_strings(): - rule = {'name': 'test alert', 'email': 'testing@test.test', 'from_addr': 'testfrom@test.test', - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', 'owner': 'owner_value', - 'alert_subject': 'Test alert for {0}, owned by {1}', 'alert_subject_args': ['test_term', 'owner'], 'snowman': '☃'} - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - mock_smtp.return_value = mock.Mock() - - alert = EmailAlerter(rule) - alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost', 25), - mock.call().ehlo(), - mock.call().has_extn('STARTTLS'), - mock.call().starttls(certfile=None, keyfile=None), - mock.call().sendmail(mock.ANY, ['testing@test.test'], mock.ANY), - mock.call().quit()] - assert mock_smtp.mock_calls == expected - - body = mock_smtp.mock_calls[4][1][2] - - assert 'Reply-To: test@example.com' in body - assert 'To: testing@test.test' in body - assert 'From: testfrom@test.test' in body - assert 'Subject: Test alert for test_value, owned by owner_value' in body - - -def test_email_with_auth(): - rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', - 'alert_subject': 'Test alert for {0}', 'alert_subject_args': ['test_term'], 'smtp_auth_file': 'file.txt', - 'rule_file': '/tmp/foo.yaml'} - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - with mock.patch('elastalert.alerts.read_yaml') as mock_open: - mock_open.return_value = {'user': 'someone', 'password': 'hunter2'} - mock_smtp.return_value = mock.Mock() - alert = EmailAlerter(rule) - - alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost', 25), - mock.call().ehlo(), - mock.call().has_extn('STARTTLS'), - mock.call().starttls(certfile=None, keyfile=None), - mock.call().login('someone', 'hunter2'), - mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test'], mock.ANY), - mock.call().quit()] - assert mock_smtp.mock_calls == expected - - -def test_email_with_cert_key(): - rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', - 'alert_subject': 'Test alert for {0}', 'alert_subject_args': ['test_term'], 'smtp_auth_file': 'file.txt', - 'smtp_cert_file': 'dummy/cert.crt', 'smtp_key_file': 'dummy/client.key', 'rule_file': '/tmp/foo.yaml'} - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - with mock.patch('elastalert.alerts.read_yaml') as mock_open: - mock_open.return_value = {'user': 'someone', 'password': 'hunter2'} - mock_smtp.return_value = mock.Mock() - alert = EmailAlerter(rule) - - alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost', 25), - mock.call().ehlo(), - mock.call().has_extn('STARTTLS'), - mock.call().starttls(certfile='dummy/cert.crt', keyfile='dummy/client.key'), - mock.call().login('someone', 'hunter2'), - mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test'], mock.ANY), - mock.call().quit()] - assert mock_smtp.mock_calls == expected - - -def test_email_with_cc(): - rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', - 'cc': 'tester@testing.testing'} - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - mock_smtp.return_value = mock.Mock() - - alert = EmailAlerter(rule) - alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost', 25), - mock.call().ehlo(), - mock.call().has_extn('STARTTLS'), - mock.call().starttls(certfile=None, keyfile=None), - mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test', 'tester@testing.testing'], mock.ANY), - mock.call().quit()] - assert mock_smtp.mock_calls == expected - - body = mock_smtp.mock_calls[4][1][2] - - assert 'Reply-To: test@example.com' in body - assert 'To: testing@test.test' in body - assert 'CC: tester@testing.testing' in body - assert 'From: testfrom@test.test' in body - - -def test_email_with_bcc(): - rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', - 'bcc': 'tester@testing.testing'} - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - mock_smtp.return_value = mock.Mock() - - alert = EmailAlerter(rule) - alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost', 25), - mock.call().ehlo(), - mock.call().has_extn('STARTTLS'), - mock.call().starttls(certfile=None, keyfile=None), - mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test', 'tester@testing.testing'], mock.ANY), - mock.call().quit()] - assert mock_smtp.mock_calls == expected - - body = mock_smtp.mock_calls[4][1][2] - - assert 'Reply-To: test@example.com' in body - assert 'To: testing@test.test' in body - assert 'CC: tester@testing.testing' not in body - assert 'From: testfrom@test.test' in body - +def test_kibana(ea): + rule = {'filter': [{'query': {'query_string': {'query': 'xy:z'}}}], + 'name': 'Test rule!', + 'es_host': 'test.testing', + 'es_port': 12345, + 'timeframe': datetime.timedelta(hours=1), + 'index': 'logstash-test', + 'include': ['@timestamp'], + 'timestamp_field': '@timestamp'} + match = {'@timestamp': '2014-10-10T00:00:00'} + with mock.patch("elastalert.elastalert.elasticsearch_client") as mock_es: + mock_create = mock.Mock(return_value={'_id': 'ABCDEFGH'}) + mock_es_inst = mock.Mock() + mock_es_inst.index = mock_create + mock_es_inst.host = 'test.testing' + mock_es_inst.port = 12345 + mock_es.return_value = mock_es_inst + link = ea.generate_kibana_db(rule, match) -def test_email_with_cc_and_bcc(): - rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', - 'cc': ['test1@test.com', 'test2@test.com'], 'bcc': 'tester@testing.testing'} - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - mock_smtp.return_value = mock.Mock() + assert 'http://test.testing:12345/_plugin/kibana/#/dashboard/temp/ABCDEFGH' == link - alert = EmailAlerter(rule) - alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost', 25), - mock.call().ehlo(), - mock.call().has_extn('STARTTLS'), - mock.call().starttls(certfile=None, keyfile=None), - mock.call().sendmail( - mock.ANY, - [ - 'testing@test.test', - 'test@test.test', - 'test1@test.com', - 'test2@test.com', - 'tester@testing.testing' - ], - mock.ANY - ), - mock.call().quit()] - assert mock_smtp.mock_calls == expected + # Name and index + dashboard = json.loads(mock_create.call_args_list[0][1]['body']['dashboard']) + assert dashboard['index']['default'] == 'logstash-test' + assert 'Test rule!' in dashboard['title'] - body = mock_smtp.mock_calls[4][1][2] + # Filters and time range + filters = dashboard['services']['filter']['list'] + assert 'xy:z' in filters['1']['query'] + assert filters['1']['type'] == 'querystring' + time_range = filters['0'] + assert time_range['from'] == ts_add(match['@timestamp'], -rule['timeframe']) + assert time_range['to'] == ts_add(match['@timestamp'], datetime.timedelta(minutes=10)) - assert 'Reply-To: test@example.com' in body - assert 'To: testing@test.test' in body - assert 'CC: test1@test.com,test2@test.com' in body - assert 'From: testfrom@test.test' in body + # Included fields active in table + assert dashboard['rows'][1]['panels'][0]['fields'] == ['@timestamp'] -def test_email_with_args(): - rule = { - 'name': 'test alert', - 'email': ['testing@test.test', 'test@test.test'], - 'from_addr': 'testfrom@test.test', - 'type': mock_rule(), - 'timestamp_field': '@timestamp', - 'email_reply_to': 'test@example.com', - 'alert_subject': 'Test alert for {0} {1}', - 'alert_subject_args': ['test_term', 'test.term'], - 'alert_text': 'Test alert for {0} and {1} {2}', - 'alert_text_args': ['test_arg1', 'test_arg2', 'test.arg3'], - 'alert_missing_value': '' +def test_alert_text_kw(ea): + rule = ea.rules[0].copy() + rule['alert_text'] = '{field} at {time}' + rule['alert_text_kw'] = { + '@timestamp': 'time', + 'field': 'field', } - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - mock_smtp.return_value = mock.Mock() - - alert = EmailAlerter(rule) - alert.alert([{'test_term': 'test_value', 'test_arg1': 'testing', 'test': {'term': ':)', 'arg3': '☃'}}]) - expected = [mock.call('localhost', 25), - mock.call().ehlo(), - mock.call().has_extn('STARTTLS'), - mock.call().starttls(certfile=None, keyfile=None), - mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test'], mock.ANY), - mock.call().quit()] - assert mock_smtp.mock_calls == expected - - body = mock_smtp.mock_calls[4][1][2] - # Extract the MIME encoded message body - body_text = base64.b64decode(body.split('\n\n')[-1][:-1]).decode('utf-8') - - assert 'testing' in body_text - assert '' in body_text - assert '☃' in body_text - - assert 'Reply-To: test@example.com' in body - assert 'To: testing@test.test' in body - assert 'From: testfrom@test.test' in body - assert 'Subject: Test alert for test_value :)' in body - - -def test_email_query_key_in_subject(): - rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', - 'query_key': 'username'} - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - mock_smtp.return_value = mock.Mock() - - alert = EmailAlerter(rule) - alert.alert([{'test_term': 'test_value', 'username': 'werbenjagermanjensen'}]) - - body = mock_smtp.mock_calls[4][1][2] - lines = body.split('\n') - found_subject = False - for line in lines: - if line.startswith('Subject'): - assert 'werbenjagermanjensen' in line - found_subject = True - assert found_subject - - -def test_opsgenie_basic(): - rule = {'name': 'testOGalert', 'opsgenie_key': 'ogkey', - 'opsgenie_account': 'genies', 'opsgenie_addr': 'https://api.opsgenie.com/v2/alerts', - 'opsgenie_recipients': ['lytics'], 'type': mock_rule()} - with mock.patch('requests.post') as mock_post: - - alert = OpsGenieAlerter(rule) - alert.alert([{'@timestamp': '2014-10-31T00:00:00'}]) - print(("mock_post: {0}".format(mock_post._mock_call_args_list))) - mcal = mock_post._mock_call_args_list - print(('mcal: {0}'.format(mcal[0]))) - assert mcal[0][0][0] == ('https://api.opsgenie.com/v2/alerts') - - assert mock_post.called - - assert mcal[0][1]['headers']['Authorization'] == 'GenieKey ogkey' - assert mcal[0][1]['json']['source'] == 'ElastAlert' - assert mcal[0][1]['json']['responders'] == [{'username': 'lytics', 'type': 'user'}] - assert mcal[0][1]['json']['source'] == 'ElastAlert' - - -def test_opsgenie_frequency(): - rule = {'name': 'testOGalert', 'opsgenie_key': 'ogkey', - 'opsgenie_account': 'genies', 'opsgenie_addr': 'https://api.opsgenie.com/v2/alerts', - 'opsgenie_recipients': ['lytics'], 'type': mock_rule(), - 'filter': [{'query': {'query_string': {'query': '*hihi*'}}}], - 'alert': 'opsgenie'} - with mock.patch('requests.post') as mock_post: - - alert = OpsGenieAlerter(rule) - alert.alert([{'@timestamp': '2014-10-31T00:00:00'}]) - - assert alert.get_info()['recipients'] == rule['opsgenie_recipients'] - - print(("mock_post: {0}".format(mock_post._mock_call_args_list))) - mcal = mock_post._mock_call_args_list - print(('mcal: {0}'.format(mcal[0]))) - assert mcal[0][0][0] == ('https://api.opsgenie.com/v2/alerts') - - assert mock_post.called - - assert mcal[0][1]['headers']['Authorization'] == 'GenieKey ogkey' - assert mcal[0][1]['json']['source'] == 'ElastAlert' - assert mcal[0][1]['json']['responders'] == [{'username': 'lytics', 'type': 'user'}] - assert mcal[0][1]['json']['source'] == 'ElastAlert' - assert mcal[0][1]['json']['source'] == 'ElastAlert' - - -def test_opsgenie_alert_routing(): - rule = {'name': 'testOGalert', 'opsgenie_key': 'ogkey', - 'opsgenie_account': 'genies', 'opsgenie_addr': 'https://api.opsgenie.com/v2/alerts', - 'opsgenie_recipients': ['{RECEIPIENT_PREFIX}'], 'opsgenie_recipients_args': {'RECEIPIENT_PREFIX': 'recipient'}, - 'type': mock_rule(), - 'filter': [{'query': {'query_string': {'query': '*hihi*'}}}], - 'alert': 'opsgenie', - 'opsgenie_teams': ['{TEAM_PREFIX}-Team'], 'opsgenie_teams_args': {'TEAM_PREFIX': 'team'}} - with mock.patch('requests.post'): - - alert = OpsGenieAlerter(rule) - alert.alert([{'@timestamp': '2014-10-31T00:00:00', 'team': "Test", 'recipient': "lytics"}]) - - assert alert.get_info()['teams'] == ['Test-Team'] - assert alert.get_info()['recipients'] == ['lytics'] - - -def test_opsgenie_default_alert_routing(): - rule = {'name': 'testOGalert', 'opsgenie_key': 'ogkey', - 'opsgenie_account': 'genies', 'opsgenie_addr': 'https://api.opsgenie.com/v2/alerts', - 'opsgenie_recipients': ['{RECEIPIENT_PREFIX}'], 'opsgenie_recipients_args': {'RECEIPIENT_PREFIX': 'recipient'}, - 'type': mock_rule(), - 'filter': [{'query': {'query_string': {'query': '*hihi*'}}}], - 'alert': 'opsgenie', - 'opsgenie_teams': ['{TEAM_PREFIX}-Team'], - 'opsgenie_default_receipients': ["devops@test.com"], 'opsgenie_default_teams': ["Test"] - } - with mock.patch('requests.post'): - - alert = OpsGenieAlerter(rule) - alert.alert([{'@timestamp': '2014-10-31T00:00:00', 'team': "Test"}]) + match = {'@timestamp': '1918-01-17', 'field': 'value'} + alert_text = str(BasicMatchString(rule, match)) + body = '{field} at {@timestamp}'.format(**match) + assert body in alert_text - assert alert.get_info()['teams'] == ['{TEAM_PREFIX}-Team'] - assert alert.get_info()['recipients'] == ['devops@test.com'] +def test_alert_text_global_substitution(ea): + rule = ea.rules[0].copy() + rule['owner'] = 'the owner from rule' + rule['priority'] = 'priority from rule' + rule['abc'] = 'abc from rule' + rule['alert_text'] = 'Priority: {0}; Owner: {1}; Abc: {2}' + rule['alert_text_args'] = ['priority', 'owner', 'abc'] -def test_opsgenie_details_with_constant_value(): - rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': {'Foo': 'Bar'} - } match = { - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Foo': 'Bar'}, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': None, - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' + '@timestamp': '2016-01-01', + 'field': 'field_value', + 'abc': 'abc from match', } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json - -def test_opsgenie_details_with_field(): - rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': {'Foo': {'field': 'message'}} - } - match = { - 'message': 'Bar', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) + alert_text = str(BasicMatchString(rule, match)) + assert 'Priority: priority from rule' in alert_text + assert 'Owner: the owner from rule' in alert_text - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) + # When the key exists in both places, it will come from the match + assert 'Abc: abc from match' in alert_text - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Foo': 'Bar'}, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': None, - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' +def test_alert_text_kw_global_substitution(ea): + rule = ea.rules[0].copy() + rule['foo_rule'] = 'foo from rule' + rule['owner'] = 'the owner from rule' + rule['abc'] = 'abc from rule' + rule['alert_text'] = 'Owner: {owner}; Foo: {foo}; Abc: {abc}' + rule['alert_text_kw'] = { + 'owner': 'owner', + 'foo_rule': 'foo', + 'abc': 'abc', } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json - -def test_opsgenie_details_with_nested_field(): - rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': {'Foo': {'field': 'nested.field'}} - } match = { - 'nested': { - 'field': 'Bar' - }, - '@timestamp': '2014-10-31T00:00:00' + '@timestamp': '2016-01-01', + 'field': 'field_value', + 'abc': 'abc from match', } - alert = OpsGenieAlerter(rule) - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) + alert_text = str(BasicMatchString(rule, match)) + assert 'Owner: the owner from rule' in alert_text + assert 'Foo: foo from rule' in alert_text - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Foo': 'Bar'}, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': None, - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' - } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json + # When the key exists in both places, it will come from the match + assert 'Abc: abc from match' in alert_text -def test_opsgenie_details_with_non_string_field(): +def test_resolving_rule_references(): rule = { - 'name': 'Opsgenie Details', + 'name': 'test_rule', 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Age': {'field': 'age'}, - 'Message': {'field': 'message'} - } - } - match = { - 'age': 10, - 'message': { - 'format': 'The cow goes %s!', - 'arg0': 'moo' - } - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': { - 'Age': '10', - 'Message': "{'format': 'The cow goes %s!', 'arg0': 'moo'}" + 'owner': 'the_owner', + 'priority': 2, + 'list_of_things': [ + '1', + '$owner$', + [ + '11', + '$owner$', + ], + ], + 'nested_dict': { + 'nested_one': '1', + 'nested_owner': '$owner$', }, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': None, - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' + 'resolved_string_reference': '$owner$', + 'resolved_int_reference': '$priority$', + 'unresolved_reference': '$foo$', } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json + alert = Alerter(rule) + assert 'the_owner' == alert.rule['resolved_string_reference'] + assert 2 == alert.rule['resolved_int_reference'] + assert '$foo$' == alert.rule['unresolved_reference'] + assert 'the_owner' == alert.rule['list_of_things'][1] + assert 'the_owner' == alert.rule['list_of_things'][2][1] + assert 'the_owner' == alert.rule['nested_dict']['nested_owner'] -def test_opsgenie_details_with_missing_field(): +def test_alert_subject_size_limit_no_args(): rule = { - 'name': 'Opsgenie Details', + 'name': 'test_rule', 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Message': {'field': 'message'}, - 'Missing': {'field': 'missing'} - } - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': None, - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' + 'owner': 'the_owner', + 'priority': 2, + 'alert_subject': 'A very long subject', + 'alert_subject_max_len': 5 } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json + alert = Alerter(rule) + alertSubject = alert.create_custom_title([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + assert 5 == len(alertSubject) -def test_opsgenie_details_with_environment_variable_replacement(environ): - environ.update({ - 'TEST_VAR': 'Bar' - }) +def test_alert_error(): rule = { - 'name': 'Opsgenie Details', + 'name': 'test_rule', 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': {'Foo': '$TEST_VAR'} + 'owner': 'the_owner', + 'priority': 2, + 'alert_subject': 'A very long subject', + 'alert_subject_max_len': 5 } match = { - '@timestamp': '2014-10-31T00:00:00' + '@timestamp': '2021-01-01T00:00:00', + 'name': 'datadog-test-name' } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: + alert = Alerter(rule) + try: alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Foo': 'Bar'}, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': None, - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' - } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json + except NotImplementedError: + assert True -def test_opsgenie_tags(): +def test_alert_get_aggregation_summary_text__maximum_width(): rule = { - 'name': 'Opsgenie Details', + 'name': 'test_rule', 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Message': {'field': 'message'}, - 'Missing': {'field': 'missing'} - }, - 'opsgenie_tags': ['test1', 'test2'] - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': None, - 'source': 'ElastAlert', - 'tags': ['test1', 'test2', 'ElastAlert', 'Opsgenie Details'], - 'user': 'genies' + 'owner': 'the_owner', + 'priority': 2, + 'alert_subject': 'A very long subject', + 'alert_subject_max_len': 5 } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json + alert = Alerter(rule) + assert 80 == alert.get_aggregation_summary_text__maximum_width() -def test_opsgenie_message(): +def test_alert_subject_size_limit_with_args(ea): rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Message': {'field': 'message'}, - 'Missing': {'field': 'missing'} - }, - 'opsgenie_message': 'test1' - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'test1', - 'priority': None, - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' - } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json - - -def test_opsgenie_alias(): - rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Message': {'field': 'message'}, - 'Missing': {'field': 'missing'} - }, - 'opsgenie_alias': 'test1' - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': None, - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies', - 'alias': 'test1' - } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json - - -def test_opsgenie_subject(): - rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Message': {'field': 'message'}, - 'Missing': {'field': 'missing'} - }, - 'opsgenie_subject': 'test1' - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'test1', - 'priority': None, - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' - } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json - - -def test_opsgenie_subject_args(): - rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Message': {'field': 'message'}, - 'Missing': {'field': 'missing'} - }, - 'opsgenie_subject': 'test', - 'opsgenie_subject_args': ['Testing', 'message'] - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'test', - 'priority': None, - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' - } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json - - -def test_opsgenie_priority_p1(): - rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Message': {'field': 'message'}, - 'Missing': {'field': 'missing'} - }, - 'opsgenie_priority': 'P1' - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': 'P1', - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' - } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json - - -def test_opsgenie_priority_p2(): - rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Message': {'field': 'message'}, - 'Missing': {'field': 'missing'} - }, - 'opsgenie_priority': 'P2' - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': 'P2', - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' - } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json - - -def test_opsgenie_priority_p3(): - rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Message': {'field': 'message'}, - 'Missing': {'field': 'missing'} - }, - 'opsgenie_priority': 'P3' - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': 'P3', - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' - } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json - - -def test_opsgenie_priority_p4(): - rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Message': {'field': 'message'}, - 'Missing': {'field': 'missing'} - }, - 'opsgenie_priority': 'P4' - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': 'P4', - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' - } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json - - -def test_opsgenie_priority_p5(): - rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Message': {'field': 'message'}, - 'Missing': {'field': 'missing'} - }, - 'opsgenie_priority': 'P5' - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': 'P5', - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' - } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json - - -def test_opsgenie_priority_none(): - rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Message': {'field': 'message'}, - 'Missing': {'field': 'missing'} - }, - 'opsgenie_priority': 'abc' - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'ElastAlert: Opsgenie Details', - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' - } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json - - -def test_opsgenie_proxy(): - rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Message': {'field': 'message'}, - 'Missing': {'field': 'missing'} - }, - 'opsgenie_proxy': 'https://proxy.url' - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies={'https': 'https://proxy.url'} - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': None, - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' - } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json - - -def test_jira(): - description_txt = "Description stuff goes here like a runbook link." - rule = { - 'name': 'test alert', - 'jira_account_file': 'jirafile', - 'type': mock_rule(), - 'jira_project': 'testproject', - 'jira_priority': 0, - 'jira_issuetype': 'testtype', - 'jira_server': 'jiraserver', - 'jira_label': 'testlabel', - 'jira_component': 'testcomponent', - 'jira_description': description_txt, - 'jira_watchers': ['testwatcher1', 'testwatcher2'], - 'timestamp_field': '@timestamp', - 'alert_subject': 'Issue {0} occurred at {1}', - 'alert_subject_args': ['test_term', '@timestamp'], - 'rule_file': '/tmp/foo.yaml' - } - - mock_priority = mock.Mock(id='5') - - with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.read_yaml') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - - expected = [ - mock.call('jiraserver', basic_auth=('jirauser', 'jirapassword')), - mock.call().priorities(), - mock.call().fields(), - mock.call().create_issue( - issuetype={'name': 'testtype'}, - priority={'id': '5'}, - project={'key': 'testproject'}, - labels=['testlabel'], - components=[{'name': 'testcomponent'}], - description=mock.ANY, - summary='Issue test_value occurred at 2014-10-31T00:00:00', - ), - mock.call().add_watcher(mock.ANY, 'testwatcher1'), - mock.call().add_watcher(mock.ANY, 'testwatcher2'), - ] - - # We don't care about additional calls to mock_jira, such as __str__ - assert mock_jira.mock_calls[:6] == expected - assert mock_jira.mock_calls[3][2]['description'].startswith(description_txt) - - # Search called if jira_bump_tickets - rule['jira_bump_tickets'] = True - with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.read_yaml') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.return_value = [] - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] - - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - - expected.insert(3, mock.call().search_issues(mock.ANY)) - assert mock_jira.mock_calls == expected - - # Remove a field if jira_ignore_in_title set - rule['jira_ignore_in_title'] = 'test_term' - with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.read_yaml') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.return_value = [] - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] - - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - - assert 'test_value' not in mock_jira.mock_calls[3][1][0] - - # Issue is still created if search_issues throws an exception - with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.read_yaml') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.side_effect = JIRAError - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] - - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - - assert mock_jira.mock_calls == expected - - # Only bump after 3d of inactivity - rule['jira_bump_after_inactivity'] = 3 - mock_issue = mock.Mock() - - # Check ticket is bumped if it is updated 4 days ago - mock_issue.fields.updated = str(ts_now() - datetime.timedelta(days=4)) - with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.read_yaml') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.return_value = [mock_issue] - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] - - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - # Check add_comment is called - assert len(mock_jira.mock_calls) == 5 - assert '().add_comment' == mock_jira.mock_calls[4][0] - - # Check ticket is bumped is not bumped if ticket is updated right now - mock_issue.fields.updated = str(ts_now()) - with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.read_yaml') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.return_value = [mock_issue] - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = [] - - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - # Only 4 calls for mock_jira since add_comment is not called - assert len(mock_jira.mock_calls) == 4 - - # Test match resolved values - rule = { - 'name': 'test alert', - 'jira_account_file': 'jirafile', - 'type': mock_rule(), - 'owner': 'the_owner', - 'jira_project': 'testproject', - 'jira_issuetype': 'testtype', - 'jira_server': 'jiraserver', - 'jira_label': 'testlabel', - 'jira_component': 'testcomponent', - 'jira_description': "DESC", - 'jira_watchers': ['testwatcher1', 'testwatcher2'], - 'timestamp_field': '@timestamp', - 'jira_affected_user': "#gmail.the_user", - 'rule_file': '/tmp/foo.yaml' - } - mock_issue = mock.Mock() - mock_issue.fields.updated = str(ts_now() - datetime.timedelta(days=4)) - mock_fields = [ - {'name': 'affected user', 'id': 'affected_user_id', 'schema': {'type': 'string'}} - ] - with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.read_yaml') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value = mock.Mock() - mock_jira.return_value.search_issues.return_value = [mock_issue] - mock_jira.return_value.fields.return_value = mock_fields - mock_jira.return_value.priorities.return_value = [mock_priority] - alert = JiraAlerter(rule) - alert.alert([{'gmail.the_user': 'jdoe', '@timestamp': '2014-10-31T00:00:00'}]) - assert mock_jira.mock_calls[4][2]['affected_user_id'] == "jdoe" - - -def test_jira_arbitrary_field_support(): - description_txt = "Description stuff goes here like a runbook link." - rule = { - 'name': 'test alert', - 'jira_account_file': 'jirafile', + 'name': 'test_rule', 'type': mock_rule(), 'owner': 'the_owner', - 'jira_project': 'testproject', - 'jira_issuetype': 'testtype', - 'jira_server': 'jiraserver', - 'jira_label': 'testlabel', - 'jira_component': 'testcomponent', - 'jira_description': description_txt, - 'jira_watchers': ['testwatcher1', 'testwatcher2'], - 'jira_arbitrary_reference_string_field': '$owner$', - 'jira_arbitrary_string_field': 'arbitrary_string_value', - 'jira_arbitrary_string_array_field': ['arbitrary_string_value1', 'arbitrary_string_value2'], - 'jira_arbitrary_string_array_field_provided_as_single_value': 'arbitrary_string_value_in_array_field', - 'jira_arbitrary_number_field': 1, - 'jira_arbitrary_number_array_field': [2, 3], - 'jira_arbitrary_number_array_field_provided_as_single_value': 1, - 'jira_arbitrary_complex_field': 'arbitrary_complex_value', - 'jira_arbitrary_complex_array_field': ['arbitrary_complex_value1', 'arbitrary_complex_value2'], - 'jira_arbitrary_complex_array_field_provided_as_single_value': 'arbitrary_complex_value_in_array_field', - 'timestamp_field': '@timestamp', - 'alert_subject': 'Issue {0} occurred at {1}', - 'alert_subject_args': ['test_term', '@timestamp'], - 'rule_file': '/tmp/foo.yaml' - } - - mock_priority = mock.MagicMock(id='5') - - mock_fields = [ - {'name': 'arbitrary reference string field', 'id': 'arbitrary_reference_string_field', 'schema': {'type': 'string'}}, - {'name': 'arbitrary string field', 'id': 'arbitrary_string_field', 'schema': {'type': 'string'}}, - {'name': 'arbitrary string array field', 'id': 'arbitrary_string_array_field', 'schema': {'type': 'array', 'items': 'string'}}, - { - 'name': 'arbitrary string array field provided as single value', - 'id': 'arbitrary_string_array_field_provided_as_single_value', - 'schema': {'type': 'array', 'items': 'string'} - }, - {'name': 'arbitrary number field', 'id': 'arbitrary_number_field', 'schema': {'type': 'number'}}, - {'name': 'arbitrary number array field', 'id': 'arbitrary_number_array_field', 'schema': {'type': 'array', 'items': 'number'}}, - { - 'name': 'arbitrary number array field provided as single value', - 'id': 'arbitrary_number_array_field_provided_as_single_value', - 'schema': {'type': 'array', 'items': 'number'} - }, - {'name': 'arbitrary complex field', 'id': 'arbitrary_complex_field', 'schema': {'type': 'ArbitraryType'}}, - { - 'name': 'arbitrary complex array field', - 'id': 'arbitrary_complex_array_field', - 'schema': {'type': 'array', 'items': 'ArbitraryType'} - }, - { - 'name': 'arbitrary complex array field provided as single value', - 'id': 'arbitrary_complex_array_field_provided_as_single_value', - 'schema': {'type': 'array', 'items': 'ArbitraryType'} - }, - ] - - with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.read_yaml') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = mock_fields - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - - expected = [ - mock.call('jiraserver', basic_auth=('jirauser', 'jirapassword')), - mock.call().priorities(), - mock.call().fields(), - mock.call().create_issue( - issuetype={'name': 'testtype'}, - project={'key': 'testproject'}, - labels=['testlabel'], - components=[{'name': 'testcomponent'}], - description=mock.ANY, - summary='Issue test_value occurred at 2014-10-31T00:00:00', - arbitrary_reference_string_field='the_owner', - arbitrary_string_field='arbitrary_string_value', - arbitrary_string_array_field=['arbitrary_string_value1', 'arbitrary_string_value2'], - arbitrary_string_array_field_provided_as_single_value=['arbitrary_string_value_in_array_field'], - arbitrary_number_field=1, - arbitrary_number_array_field=[2, 3], - arbitrary_number_array_field_provided_as_single_value=[1], - arbitrary_complex_field={'name': 'arbitrary_complex_value'}, - arbitrary_complex_array_field=[{'name': 'arbitrary_complex_value1'}, {'name': 'arbitrary_complex_value2'}], - arbitrary_complex_array_field_provided_as_single_value=[{'name': 'arbitrary_complex_value_in_array_field'}], - ), - mock.call().add_watcher(mock.ANY, 'testwatcher1'), - mock.call().add_watcher(mock.ANY, 'testwatcher2'), - ] - - # We don't care about additional calls to mock_jira, such as __str__ - assert mock_jira.mock_calls[:6] == expected - assert mock_jira.mock_calls[3][2]['description'].startswith(description_txt) - - # Reference an arbitrary string field that is not defined on the JIRA server - rule['jira_nonexistent_field'] = 'nonexistent field value' - - with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.read_yaml') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = mock_fields - - with pytest.raises(Exception) as exception: - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - assert "Could not find a definition for the jira field 'nonexistent field'" in str(exception) - - del rule['jira_nonexistent_field'] - - # Reference a watcher that does not exist - rule['jira_watchers'] = 'invalid_watcher' - - with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ - mock.patch('elastalert.alerts.read_yaml') as mock_open: - mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} - mock_jira.return_value.priorities.return_value = [mock_priority] - mock_jira.return_value.fields.return_value = mock_fields - - # Cause add_watcher to raise, which most likely means that the user did not exist - mock_jira.return_value.add_watcher.side_effect = Exception() - - with pytest.raises(Exception) as exception: - alert = JiraAlerter(rule) - alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - assert "Exception encountered when trying to add 'invalid_watcher' as a watcher. Does the user exist?" in str(exception) - - -def test_kibana(ea): - rule = {'filter': [{'query': {'query_string': {'query': 'xy:z'}}}], - 'name': 'Test rule!', - 'es_host': 'test.testing', - 'es_port': 12345, - 'timeframe': datetime.timedelta(hours=1), - 'index': 'logstash-test', - 'include': ['@timestamp'], - 'timestamp_field': '@timestamp'} - match = {'@timestamp': '2014-10-10T00:00:00'} - with mock.patch("elastalert.elastalert.elasticsearch_client") as mock_es: - mock_create = mock.Mock(return_value={'_id': 'ABCDEFGH'}) - mock_es_inst = mock.Mock() - mock_es_inst.index = mock_create - mock_es_inst.host = 'test.testing' - mock_es_inst.port = 12345 - mock_es.return_value = mock_es_inst - link = ea.generate_kibana_db(rule, match) - - assert 'http://test.testing:12345/_plugin/kibana/#/dashboard/temp/ABCDEFGH' == link - - # Name and index - dashboard = json.loads(mock_create.call_args_list[0][1]['body']['dashboard']) - assert dashboard['index']['default'] == 'logstash-test' - assert 'Test rule!' in dashboard['title'] - - # Filters and time range - filters = dashboard['services']['filter']['list'] - assert 'xy:z' in filters['1']['query'] - assert filters['1']['type'] == 'querystring' - time_range = filters['0'] - assert time_range['from'] == ts_add(match['@timestamp'], -rule['timeframe']) - assert time_range['to'] == ts_add(match['@timestamp'], datetime.timedelta(minutes=10)) - - # Included fields active in table - assert dashboard['rows'][1]['panels'][0]['fields'] == ['@timestamp'] - - -def test_command(): - # Test command as list with a formatted arg - rule = {'command': ['/bin/test/', '--arg', '%(somefield)s']} - alert = CommandAlerter(rule) - match = {'@timestamp': '2014-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'nested': {'field': 1}} - with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: - alert.alert([match]) - assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) - - # Test command as string with formatted arg (old-style string format) - rule = {'command': '/bin/test/ --arg %(somefield)s'} - alert = CommandAlerter(rule) - with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: - alert.alert([match]) - assert mock_popen.called_with('/bin/test --arg foobarbaz', stdin=subprocess.PIPE, shell=False) - - # Test command as string without formatted arg (old-style string format) - rule = {'command': '/bin/test/foo.sh'} - alert = CommandAlerter(rule) - with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: - alert.alert([match]) - assert mock_popen.called_with('/bin/test/foo.sh', stdin=subprocess.PIPE, shell=True) - - # Test command with pipe_match_json - rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], - 'pipe_match_json': True} - alert = CommandAlerter(rule) - match = {'@timestamp': '2014-01-01T00:00:00', - 'somefield': 'foobarbaz'} - with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: - mock_subprocess = mock.Mock() - mock_popen.return_value = mock_subprocess - mock_subprocess.communicate.return_value = (None, None) - alert.alert([match]) - assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) - assert mock_subprocess.communicate.called_with(input=json.dumps(match)) - - # Test command with pipe_alert_text - rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], - 'pipe_alert_text': True, 'type': mock_rule(), 'name': 'Test'} - alert = CommandAlerter(rule) - match = {'@timestamp': '2014-01-01T00:00:00', - 'somefield': 'foobarbaz'} - alert_text = str(BasicMatchString(rule, match)) - with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: - mock_subprocess = mock.Mock() - mock_popen.return_value = mock_subprocess - mock_subprocess.communicate.return_value = (None, None) - alert.alert([match]) - assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) - assert mock_subprocess.communicate.called_with(input=alert_text.encode()) - - # Test command with fail_on_non_zero_exit - rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], - 'fail_on_non_zero_exit': True} - alert = CommandAlerter(rule) - match = {'@timestamp': '2014-01-01T00:00:00', - 'somefield': 'foobarbaz'} - with pytest.raises(Exception) as exception: - with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: - mock_subprocess = mock.Mock() - mock_popen.return_value = mock_subprocess - mock_subprocess.wait.return_value = 1 - alert.alert([match]) - assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) - assert "Non-zero exit code while running command" in str(exception) - - # Test OSError - try: - rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], - 'pipe_alert_text': True, 'type': mock_rule(), 'name': 'Test'} - alert = CommandAlerter(rule) - match = {'@timestamp': '2014-01-01T00:00:00', - 'somefield': 'foobarbaz'} - alert_text = str(BasicMatchString(rule, match)) - mock_run = mock.MagicMock(side_effect=OSError) - with mock.patch("elastalert.alerters.command.subprocess.Popen", mock_run), pytest.raises(OSError) as mock_popen: - mock_subprocess = mock.Mock() - mock_popen.return_value = mock_subprocess - mock_subprocess.communicate.return_value = (None, None) - alert.alert([match]) - except EAException: - assert True - - -def test_ms_teams(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'ms_teams_webhook_url': 'http://test.webhook.url', - 'ms_teams_alert_summary': 'Alert from ElastAlert', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MsTeamsAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' + 'priority': 2, + 'alert_subject': 'Test alert for {0} {1}', + 'alert_subject_args': ['test_term', 'test.term'], + 'alert_subject_max_len': 6 } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - '@type': 'MessageCard', - '@context': 'http://schema.org/extensions', - 'summary': rule['ms_teams_alert_summary'], - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__() - } - mock_post_request.assert_called_once_with( - rule['ms_teams_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_ms_teams_uses_color_and_fixed_width_text(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'ms_teams_webhook_url': 'http://test.webhook.url', - 'ms_teams_alert_summary': 'Alert from ElastAlert', - 'ms_teams_alert_fixed_width': True, - 'ms_teams_theme_color': '#124578', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MsTeamsAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - body = BasicMatchString(rule, match).__str__() - body = body.replace('`', "'") - body = "```{0}```".format('```\n\n```'.join(x for x in body.split('\n'))).replace('\n``````', '') - expected_data = { - '@type': 'MessageCard', - '@context': 'http://schema.org/extensions', - 'summary': rule['ms_teams_alert_summary'], - 'title': rule['alert_subject'], - 'themeColor': '#124578', - 'text': body - } - mock_post_request.assert_called_once_with( - rule['ms_teams_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_ms_teams_proxy(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'ms_teams_webhook_url': 'http://test.webhook.url', - 'ms_teams_alert_summary': 'Alert from ElastAlert', - 'ms_teams_proxy': 'https://test.proxy.url', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MsTeamsAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - '@type': 'MessageCard', - '@context': 'http://schema.org/extensions', - 'summary': rule['ms_teams_alert_summary'], - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__() - } - mock_post_request.assert_called_once_with( - rule['ms_teams_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies={'https': rule['ms_teams_proxy']} - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_ms_teams_ea_exception(): - try: - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'ms_teams_webhook_url': 'http://test.webhook.url', - 'ms_teams_alert_summary': 'Alert from ElastAlert', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MsTeamsAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_slack_uses_custom_title(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_uses_custom_timeout(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'alert_subject': 'Cool subject', - 'alert': [], - 'slack_timeout': 20 - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=20 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_uses_rule_name_when_custom_title_is_not_provided(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': ['http://please.dontgohere.slack'], - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['name'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none', - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'][0], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_uses_custom_slack_channel(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': ['http://please.dontgohere.slack'], - 'slack_channel_override': '#test-alert', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '#test-alert', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['name'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none', - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'][0], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_uses_list_of_custom_slack_channel(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': ['http://please.dontgohere.slack'], - 'slack_channel_override': ['#test-alert', '#test-alert2'], - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data1 = { - 'username': 'elastalert', - 'channel': '#test-alert', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['name'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' - } - expected_data2 = { - 'username': 'elastalert', - 'channel': '#test-alert2', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['name'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_with( - rule['slack_webhook_url'][0], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data1 == json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data2 == json.loads(mock_post_request.call_args_list[1][1]['data']) - - -def test_slack_attach_kibana_discover_url_when_generated(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_attach_kibana_discover_url': True, - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'kibana_discover_url': 'http://kibana#discover' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'parse': 'none', - 'text': '', - 'attachments': [ - { - 'color': 'danger', - 'title': 'Test Rule', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - }, - { - 'color': '#ec4b98', - 'title': 'Discover in Kibana', - 'title_link': 'http://kibana#discover' - } - ], - 'icon_emoji': ':ghost:', - 'channel': '' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_slack_attach_kibana_discover_url_when_not_generated(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_attach_kibana_discover_url': True, - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'parse': 'none', - 'text': '', - 'attachments': [ - { - 'color': 'danger', - 'title': 'Test Rule', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'icon_emoji': ':ghost:', - 'channel': '' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_slack_kibana_discover_title(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_attach_kibana_discover_url': True, - 'slack_kibana_discover_title': 'Click to discover in Kibana', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'kibana_discover_url': 'http://kibana#discover' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'parse': 'none', - 'text': '', - 'attachments': [ - { - 'color': 'danger', - 'title': 'Test Rule', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - }, - { - 'color': '#ec4b98', - 'title': 'Click to discover in Kibana', - 'title_link': 'http://kibana#discover' - } - ], - 'icon_emoji': ':ghost:', - 'channel': '' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_slack_kibana_discover_color(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_attach_kibana_discover_url': True, - 'slack_kibana_discover_color': 'blue', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'kibana_discover_url': 'http://kibana#discover' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'parse': 'none', - 'text': '', - 'attachments': [ - { - 'color': 'danger', - 'title': 'Test Rule', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - }, - { - 'color': 'blue', - 'title': 'Discover in Kibana', - 'title_link': 'http://kibana#discover' - } - ], - 'icon_emoji': ':ghost:', - 'channel': '' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_slack_ignore_ssl_errors(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_ignore_ssl_errors': True, - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=False, - timeout=10 - ) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': 'Test Rule', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' - } - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_proxy(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_proxy': 'http://proxy.url', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies={'https': rule['slack_proxy']}, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_username_override(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'test elastalert', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'test elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_title_link(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_title_link': 'http://slack.title.link', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [], - 'title_link': 'http://slack.title.link' - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_title(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_title': 'slack title', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': 'slack title', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_icon_url_override(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_icon_url_override': 'http://slack.icon.url.override', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_url': 'http://slack.icon.url.override', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_msg_color(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_msg_color': 'good', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'good', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_parse_override(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_parse_override': 'full', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'full' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_text_string(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_text_string': 'text str', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': 'text str', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_alert_fields(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_alert_fields': [ - { - 'title': 'Host', - 'value': 'somefield', - 'short': 'true' - }, - { - 'title': 'Sensors', - 'value': '@timestamp', - 'short': 'true' - } - ], - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': - [ - { - 'short': 'true', - 'title': 'Host', - 'value': 'foobarbaz' - }, - { - 'short': 'true', - 'title': 'Sensors', - 'value': '2016-01-01T00:00:00' - } - ], - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_ca_certs(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_ca_certs': True, - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [], - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_footer(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_footer': 'Elastic Alerts', - 'slack_footer_icon': 'http://footer.icon.url', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [], - 'footer': 'Elastic Alerts', - 'footer_icon': 'http://footer.icon.url' - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_image_url(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_image_url': 'http://image.url', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [], - 'image_url': 'http://image.url', - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_thumb_url(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_thumb_url': 'http://thumb.url', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [], - 'thumb_url': 'http://thumb.url', - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_author_name(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_author_name': 'author name', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [], - 'author_name': 'author name', - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_author_link(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_author_link': 'http://author.url', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [], - 'author_link': 'http://author.url', - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_author_icon(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_author_icon': 'http://author.icon', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [], - 'author_icon': 'http://author.icon', - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_msg_pretext(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_msg_pretext': 'pretext value', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': rule['alert_subject'], - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [], - 'pretext': 'pretext value' - } - ], - 'text': '', - 'parse': 'none' - } - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=True, - timeout=10 - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_slack_ea_exception(): - try: - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_msg_pretext': 'pretext value', - 'alert_subject': 'Cool subject', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_http_alerter_with_payload(): - rule = { - 'name': 'Test HTTP Post Alerter With Payload', - 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_payload': {'posted_name': 'somefield'}, - 'http_post_static_payload': {'name': 'somestaticname'}, - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'posted_name': 'foobarbaz', - 'name': 'somestaticname' - } - mock_post_request.assert_called_once_with( - rule['http_post_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, - proxies=None, - timeout=10, - verify=True - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_http_alerter_with_payload_all_values(): - rule = { - 'name': 'Test HTTP Post Alerter With Payload', - 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_payload': {'posted_name': 'somefield'}, - 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_all_values': True, - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'posted_name': 'foobarbaz', - 'name': 'somestaticname', - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - mock_post_request.assert_called_once_with( - rule['http_post_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, - proxies=None, - timeout=10, - verify=True - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_http_alerter_without_payload(): - rule = { - 'name': 'Test HTTP Post Alerter Without Payload', - 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_static_payload': {'name': 'somestaticname'}, - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'name': 'somestaticname' - } - mock_post_request.assert_called_once_with( - rule['http_post_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, - proxies=None, - timeout=10, - verify=True - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_http_alerter_proxy(): - rule = { - 'name': 'Test HTTP Post Alerter Without Payload', - 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_proxy': 'http://proxy.url', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'name': 'somestaticname' - } - mock_post_request.assert_called_once_with( - rule['http_post_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, - proxies={'https': 'http://proxy.url'}, - timeout=10, - verify=True - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_http_alerter_timeout(): - rule = { - 'name': 'Test HTTP Post Alerter Without Payload', - 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_timeout': 20, - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'name': 'somestaticname' - } - mock_post_request.assert_called_once_with( - rule['http_post_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, - proxies=None, - timeout=20, - verify=True - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_http_alerter_headers(): - rule = { - 'name': 'Test HTTP Post Alerter Without Payload', - 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_headers': {'authorization': 'Basic 123dr3234'}, - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'name': 'somestaticname' - } - mock_post_request.assert_called_once_with( - rule['http_post_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8', 'authorization': 'Basic 123dr3234'}, - proxies=None, - timeout=10, - verify=True - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_http_alerter_post_ca_certs_true(): - rule = { - 'name': 'Test HTTP Post Alerter Without Payload', - 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_ca_certs': True, - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'name': 'somestaticname' - } - mock_post_request.assert_called_once_with( - rule['http_post_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, - proxies=None, - timeout=10, - verify=True - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_http_alerter_post_ca_certs_false(): - rule = { - 'name': 'Test HTTP Post Alerter Without Payload', - 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_ca_certs': False, - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'name': 'somestaticname' - } - mock_post_request.assert_called_once_with( - rule['http_post_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, - proxies=None, - timeout=10, - verify=True - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_http_alerter_post_ea_exception(): - try: - rule = { - 'name': 'Test HTTP Post Alerter Without Payload', - 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_ca_certs': False, - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_pagerduty_alerter(): - rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'client': 'ponies inc.', - 'description': 'Test PD Rule', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': '', - 'service_key': 'magicalbadgers', - } - mock_post_request.assert_called_once_with('https://events.pagerduty.com/generic/2010-04-15/create_event.json', - data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_pagerduty_alerter_v2(): - rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_api_version': 'v2', - 'pagerduty_v2_payload_class': 'ping failure', - 'pagerduty_v2_payload_component': 'mysql', - 'pagerduty_v2_payload_group': 'app-stack', - 'pagerduty_v2_payload_severity': 'error', - 'pagerduty_v2_payload_source': 'mysql.host.name', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'client': 'ponies inc.', - 'payload': { - 'class': 'ping failure', - 'component': 'mysql', - 'group': 'app-stack', - 'severity': 'error', - 'source': 'mysql.host.name', - 'summary': 'Test PD Rule', - 'custom_details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'timestamp': '2017-01-01T00:00:00' - }, - 'event_action': 'trigger', - 'dedup_key': '', - 'routing_key': 'magicalbadgers', - } - mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', - data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_pagerduty_alerter_v2_payload_class_args(): - rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_api_version': 'v2', - 'pagerduty_v2_payload_class': 'somefield', - 'pagerduty_v2_payload_class_args': ['@timestamp', 'somefield'], - 'pagerduty_v2_payload_component': 'mysql', - 'pagerduty_v2_payload_group': 'app-stack', - 'pagerduty_v2_payload_severity': 'error', - 'pagerduty_v2_payload_source': 'mysql.host.name', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'client': 'ponies inc.', - 'payload': { - 'class': 'somefield', - 'component': 'mysql', - 'group': 'app-stack', - 'severity': 'error', - 'source': 'mysql.host.name', - 'summary': 'Test PD Rule', - 'custom_details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'timestamp': '2017-01-01T00:00:00' - }, - 'event_action': 'trigger', - 'dedup_key': '', - 'routing_key': 'magicalbadgers', - } - mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', - data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_pagerduty_alerter_v2_payload_component_args(): - rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_api_version': 'v2', - 'pagerduty_v2_payload_class': 'ping failure', - 'pagerduty_v2_payload_component': 'somefield', - 'pagerduty_v2_payload_component_args': ['@timestamp', 'somefield'], - 'pagerduty_v2_payload_group': 'app-stack', - 'pagerduty_v2_payload_severity': 'error', - 'pagerduty_v2_payload_source': 'mysql.host.name', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'client': 'ponies inc.', - 'payload': { - 'class': 'ping failure', - 'component': 'somefield', - 'group': 'app-stack', - 'severity': 'error', - 'source': 'mysql.host.name', - 'summary': 'Test PD Rule', - 'custom_details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'timestamp': '2017-01-01T00:00:00' - }, - 'event_action': 'trigger', - 'dedup_key': '', - 'routing_key': 'magicalbadgers', - } - mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', - data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_pagerduty_alerter_v2_payload_group_args(): - rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_api_version': 'v2', - 'pagerduty_v2_payload_class': 'ping failure', - 'pagerduty_v2_payload_component': 'mysql', - 'pagerduty_v2_payload_group': 'somefield', - 'pagerduty_v2_payload_group_args': ['@timestamp', 'somefield'], - 'pagerduty_v2_payload_severity': 'error', - 'pagerduty_v2_payload_source': 'mysql.host.name', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'client': 'ponies inc.', - 'payload': { - 'class': 'ping failure', - 'component': 'mysql', - 'group': 'somefield', - 'severity': 'error', - 'source': 'mysql.host.name', - 'summary': 'Test PD Rule', - 'custom_details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'timestamp': '2017-01-01T00:00:00' - }, - 'event_action': 'trigger', - 'dedup_key': '', - 'routing_key': 'magicalbadgers', - } - mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', - data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_pagerduty_alerter_v2_payload_source_args(): - rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_api_version': 'v2', - 'pagerduty_v2_payload_class': 'ping failure', - 'pagerduty_v2_payload_component': 'mysql', - 'pagerduty_v2_payload_group': 'app-stack', - 'pagerduty_v2_payload_severity': 'error', - 'pagerduty_v2_payload_source': 'somefield', - 'pagerduty_v2_payload_source_args': ['@timestamp', 'somefield'], - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'client': 'ponies inc.', - 'payload': { - 'class': 'ping failure', - 'component': 'mysql', - 'group': 'app-stack', - 'severity': 'error', - 'source': 'somefield', - 'summary': 'Test PD Rule', - 'custom_details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'timestamp': '2017-01-01T00:00:00' - }, - 'event_action': 'trigger', - 'dedup_key': '', - 'routing_key': 'magicalbadgers', - } - mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', - data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_pagerduty_alerter_v2_payload_custom_details(): - rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_api_version': 'v2', - 'pagerduty_v2_payload_class': 'ping failure', - 'pagerduty_v2_payload_component': 'mysql', - 'pagerduty_v2_payload_group': 'app-stack', - 'pagerduty_v2_payload_severity': 'error', - 'pagerduty_v2_payload_source': 'mysql.host.name', - 'pagerduty_v2_payload_custom_details': {'a': 'somefield', 'c': 'f'}, - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'client': 'ponies inc.', - 'payload': { - 'class': 'ping failure', - 'component': 'mysql', - 'group': 'app-stack', - 'severity': 'error', - 'source': 'mysql.host.name', - 'summary': 'Test PD Rule', - 'custom_details': { - 'a': 'foobarbaz', - 'c': None, - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'timestamp': '2017-01-01T00:00:00' - }, - 'event_action': 'trigger', - 'dedup_key': '', - 'routing_key': 'magicalbadgers', - } - mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', - data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_pagerduty_alerter_v2_payload_include_all_info(): - rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_api_version': 'v2', - 'pagerduty_v2_payload_class': 'ping failure', - 'pagerduty_v2_payload_component': 'mysql', - 'pagerduty_v2_payload_group': 'app-stack', - 'pagerduty_v2_payload_severity': 'error', - 'pagerduty_v2_payload_source': 'mysql.host.name', - 'pagerduty_v2_payload_include_all_info': False, - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'client': 'ponies inc.', - 'payload': { - 'class': 'ping failure', - 'component': 'mysql', - 'group': 'app-stack', - 'severity': 'error', - 'source': 'mysql.host.name', - 'summary': 'Test PD Rule', - 'custom_details': {}, - 'timestamp': '2017-01-01T00:00:00' - }, - 'event_action': 'trigger', - 'dedup_key': '', - 'routing_key': 'magicalbadgers', - } - mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', - data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_pagerduty_alerter_custom_incident_key(): - rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom key', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'client': 'ponies inc.', - 'description': 'Test PD Rule', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': 'custom key', - 'service_key': 'magicalbadgers', - } - mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_pagerduty_alerter_custom_incident_key_with_args(): - rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom {0}', - 'pagerduty_incident_key_args': ['somefield'], - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'client': 'ponies inc.', - 'description': 'Test PD Rule', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': 'custom foobarbaz', - 'service_key': 'magicalbadgers', - } - mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_pagerduty_alerter_custom_alert_subject(): - rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'alert_subject': 'Hungry kittens', - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom {0}', - 'pagerduty_incident_key_args': ['somefield'], - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'client': 'ponies inc.', - 'description': 'Hungry kittens', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': 'custom foobarbaz', - 'service_key': 'magicalbadgers', - } - mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_pagerduty_alerter_custom_alert_subject_with_args(): - rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'alert_subject': '{0} kittens', - 'alert_subject_args': ['somefield'], - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom {0}', - 'pagerduty_incident_key_args': ['someotherfield'], - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'Stinky', - 'someotherfield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'client': 'ponies inc.', - 'description': 'Stinky kittens', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinky\nsomeotherfield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': 'custom foobarbaz', - 'service_key': 'magicalbadgers', - } - mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_pagerduty_alerter_custom_alert_subject_with_args_specifying_trigger(): - rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'alert_subject': '{0} kittens', - 'alert_subject_args': ['somefield'], - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_event_type': 'trigger', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom {0}', - 'pagerduty_incident_key_args': ['someotherfield'], - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'Stinkiest', - 'someotherfield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'client': 'ponies inc.', - 'description': 'Stinkiest kittens', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinkiest\nsomeotherfield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': 'custom foobarbaz', - 'service_key': 'magicalbadgers', - } - mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_pagerduty_alerter_proxy(): - rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'alert_subject': '{0} kittens', - 'alert_subject_args': ['somefield'], - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_event_type': 'trigger', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom {0}', - 'pagerduty_incident_key_args': ['someotherfield'], - 'pagerduty_proxy': 'http://proxy.url', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'Stinkiest', - 'someotherfield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'client': 'ponies inc.', - 'description': 'Stinkiest kittens', - 'details': { - 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinkiest\nsomeotherfield: foobarbaz\n' - }, - 'event_type': 'trigger', - 'incident_key': 'custom foobarbaz', - 'service_key': 'magicalbadgers', - } - mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, - proxies={'https': 'http://proxy.url'}) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - -def test_pagerduty_ea_exception(): - try: - rule = { - 'name': 'Test PD Rule', - 'type': 'any', - 'alert_subject': '{0} kittens', - 'alert_subject_args': ['somefield'], - 'pagerduty_service_key': 'magicalbadgers', - 'pagerduty_event_type': 'trigger', - 'pagerduty_client_name': 'ponies inc.', - 'pagerduty_incident_key': 'custom {0}', - 'pagerduty_incident_key_args': ['someotherfield'], - 'pagerduty_proxy': 'http://proxy.url', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerDutyAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'Stinkiest', - 'someotherfield': 'foobarbaz' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_alert_text_kw(ea): - rule = ea.rules[0].copy() - rule['alert_text'] = '{field} at {time}' - rule['alert_text_kw'] = { - '@timestamp': 'time', - 'field': 'field', - } - match = {'@timestamp': '1918-01-17', 'field': 'value'} - alert_text = str(BasicMatchString(rule, match)) - body = '{field} at {@timestamp}'.format(**match) - assert body in alert_text - - -def test_alert_text_global_substitution(ea): - rule = ea.rules[0].copy() - rule['owner'] = 'the owner from rule' - rule['priority'] = 'priority from rule' - rule['abc'] = 'abc from rule' - rule['alert_text'] = 'Priority: {0}; Owner: {1}; Abc: {2}' - rule['alert_text_args'] = ['priority', 'owner', 'abc'] - - match = { - '@timestamp': '2016-01-01', - 'field': 'field_value', - 'abc': 'abc from match', - } - - alert_text = str(BasicMatchString(rule, match)) - assert 'Priority: priority from rule' in alert_text - assert 'Owner: the owner from rule' in alert_text - - # When the key exists in both places, it will come from the match - assert 'Abc: abc from match' in alert_text - - -def test_alert_text_kw_global_substitution(ea): - rule = ea.rules[0].copy() - rule['foo_rule'] = 'foo from rule' - rule['owner'] = 'the owner from rule' - rule['abc'] = 'abc from rule' - rule['alert_text'] = 'Owner: {owner}; Foo: {foo}; Abc: {abc}' - rule['alert_text_kw'] = { - 'owner': 'owner', - 'foo_rule': 'foo', - 'abc': 'abc', - } - - match = { - '@timestamp': '2016-01-01', - 'field': 'field_value', - 'abc': 'abc from match', - } - - alert_text = str(BasicMatchString(rule, match)) - assert 'Owner: the owner from rule' in alert_text - assert 'Foo: foo from rule' in alert_text - - # When the key exists in both places, it will come from the match - assert 'Abc: abc from match' in alert_text - - -def test_resolving_rule_references(): - rule = { - 'name': 'test_rule', - 'type': mock_rule(), - 'owner': 'the_owner', - 'priority': 2, - 'list_of_things': [ - '1', - '$owner$', - [ - '11', - '$owner$', - ], - ], - 'nested_dict': { - 'nested_one': '1', - 'nested_owner': '$owner$', - }, - 'resolved_string_reference': '$owner$', - 'resolved_int_reference': '$priority$', - 'unresolved_reference': '$foo$', - } - alert = Alerter(rule) - assert 'the_owner' == alert.rule['resolved_string_reference'] - assert 2 == alert.rule['resolved_int_reference'] - assert '$foo$' == alert.rule['unresolved_reference'] - assert 'the_owner' == alert.rule['list_of_things'][1] - assert 'the_owner' == alert.rule['list_of_things'][2][1] - assert 'the_owner' == alert.rule['nested_dict']['nested_owner'] - - -def test_alerta_no_auth(): - rule = { - 'name': 'Test Alerta rule!', - 'alerta_api_url': 'http://elastalerthost:8080/api/alert', - 'timeframe': datetime.timedelta(hours=1), - 'timestamp_field': '@timestamp', - 'alerta_api_skip_ssl': True, - 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], - 'alerta_attributes_values': ["%(key)s", "%(logdate)s", "%(sender_ip)s"], - 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], - 'alerta_event': "ProbeUP", - 'alerta_group': "Health", - 'alerta_origin': "ElastAlert 2", - 'alerta_severity': "debug", - 'alerta_text': "Probe %(hostname)s is UP at %(logdate)s GMT", - 'alerta_value': "UP", - 'type': 'any', - 'alerta_use_match_timestamp': True, - 'alert': 'alerta' - } - - match = { - '@timestamp': '2014-10-10T00:00:00', - # 'key': ---- missing field on purpose, to verify that simply the text is left empty - # 'logdate': ---- missing field on purpose, to verify that simply the text is left empty - 'sender_ip': '1.1.1.1', - 'hostname': 'aProbe' - } - - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = AlertaAlerter(rule) - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - "origin": "ElastAlert 2", - "resource": "elastalert", - "severity": "debug", - "service": ["elastalert"], - "tags": [], - "text": "Probe aProbe is UP at GMT", - "value": "UP", - "createTime": "2014-10-10T00:00:00.000000Z", - "environment": "Production", - "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", - "timeout": 86400, - "correlate": ["ProbeUP", "ProbeDOWN"], - "group": "Health", - "attributes": {"senderIP": "1.1.1.1", "hostname": "", "TimestampEvent": ""}, - "type": "elastalert", - "event": "ProbeUP" - } - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - headers={ - 'content-type': 'application/json'}, - verify=False - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_alerta_auth(): - rule = { - 'name': 'Test Alerta rule!', - 'alerta_api_url': 'http://elastalerthost:8080/api/alert', - 'alerta_api_key': '123456789ABCDEF', - 'timeframe': datetime.timedelta(hours=1), - 'timestamp_field': '@timestamp', - 'alerta_severity': "debug", - 'type': 'any', - 'alerta_use_match_timestamp': True, - 'alert': 'alerta' - } - - match = { - '@timestamp': '2014-10-10T00:00:00', - 'sender_ip': '1.1.1.1', - 'hostname': 'aProbe' - } - - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = AlertaAlerter(rule) - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - verify=True, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Key {}'.format(rule['alerta_api_key'])}) - - -def test_alerta_new_style(): - rule = { - 'name': 'Test Alerta rule!', - 'alerta_api_url': 'http://elastalerthost:8080/api/alert', - 'timeframe': datetime.timedelta(hours=1), - 'timestamp_field': '@timestamp', - 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], - 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], - 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], - 'alerta_event': "ProbeUP", - 'alerta_group': "Health", - 'alerta_origin': "ElastAlert 2", - 'alerta_severity': "debug", - 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", - 'alerta_value': "UP", - 'type': 'any', - 'alerta_use_match_timestamp': True, - 'alert': 'alerta' - } - - match = { - '@timestamp': '2014-10-10T00:00:00', - # 'key': ---- missing field on purpose, to verify that simply the text is left empty - # 'logdate': ---- missing field on purpose, to verify that simply the text is left empty - 'sender_ip': '1.1.1.1', - 'hostname': 'aProbe' - } - - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = AlertaAlerter(rule) - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - "origin": "ElastAlert 2", - "resource": "elastalert", - "severity": "debug", - "service": ["elastalert"], - "tags": [], - "text": "Probe aProbe is UP at GMT", - "value": "UP", - "createTime": "2014-10-10T00:00:00.000000Z", - "environment": "Production", - "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", - "timeout": 86400, - "correlate": ["ProbeUP", "ProbeDOWN"], - "group": "Health", - "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, - "type": "elastalert", - "event": "ProbeUP" - } - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - verify=True, - headers={ - 'content-type': 'application/json'} - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_alerta_use_qk_as_resource(): - rule = { - 'name': 'Test Alerta rule!', - 'alerta_api_url': 'http://elastalerthost:8080/api/alert', - 'timeframe': datetime.timedelta(hours=1), - 'timestamp_field': '@timestamp', - 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], - 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], - 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], - 'alerta_event': "ProbeUP", - 'alerta_group': "Health", - 'alerta_origin': "ElastAlert 2", - 'alerta_severity': "debug", - 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", - 'alerta_value': "UP", - 'type': 'any', - 'alerta_use_match_timestamp': True, - 'alerta_use_qk_as_resource': True, - 'query_key': 'hostname', - 'alert': 'alerta' - } - - match = { - '@timestamp': '2014-10-10T00:00:00', - 'sender_ip': '1.1.1.1', - 'hostname': 'aProbe' - } - - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = AlertaAlerter(rule) - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - "origin": "ElastAlert 2", - "resource": "aProbe", - "severity": "debug", - "service": ["elastalert"], - "tags": [], - "text": "Probe aProbe is UP at GMT", - "value": "UP", - "createTime": "2014-10-10T00:00:00.000000Z", - "environment": "Production", - "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", - "timeout": 86400, - "correlate": ["ProbeUP", "ProbeDOWN"], - "group": "Health", - "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, - "type": "elastalert", - "event": "ProbeUP" - } - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - verify=True, - headers={ - 'content-type': 'application/json'} - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_alerta_timeout(): - rule = { - 'name': 'Test Alerta rule!', - 'alerta_api_url': 'http://elastalerthost:8080/api/alert', - 'timeframe': datetime.timedelta(hours=1), - 'timestamp_field': '@timestamp', - 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], - 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], - 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], - 'alerta_event': "ProbeUP", - 'alerta_group': "Health", - 'alerta_origin': "ElastAlert 2", - 'alerta_severity': "debug", - 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", - 'alerta_value': "UP", - 'type': 'any', - 'alerta_use_match_timestamp': True, - 'alerta_timeout': 86450, - 'alert': 'alerta' - } - - match = { - '@timestamp': '2014-10-10T00:00:00', - 'sender_ip': '1.1.1.1', - 'hostname': 'aProbe' - } - - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = AlertaAlerter(rule) - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - "origin": "ElastAlert 2", - "resource": "elastalert", - "severity": "debug", - "service": ["elastalert"], - "tags": [], - "text": "Probe aProbe is UP at GMT", - "value": "UP", - "createTime": "2014-10-10T00:00:00.000000Z", - "environment": "Production", - "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", - "timeout": 86450, - "correlate": ["ProbeUP", "ProbeDOWN"], - "group": "Health", - "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, - "type": "elastalert", - "event": "ProbeUP" - } - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - verify=True, - headers={ - 'content-type': 'application/json'} - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_alerta_type(): - rule = { - 'name': 'Test Alerta rule!', - 'alerta_api_url': 'http://elastalerthost:8080/api/alert', - 'timeframe': datetime.timedelta(hours=1), - 'timestamp_field': '@timestamp', - 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], - 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], - 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], - 'alerta_event': "ProbeUP", - 'alerta_group': "Health", - 'alerta_origin': "ElastAlert 2", - 'alerta_severity': "debug", - 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", - 'alerta_value': "UP", - 'type': 'any', - 'alerta_use_match_timestamp': True, - 'alerta_type': 'elastalert2', - 'alert': 'alerta' - } - - match = { - '@timestamp': '2014-10-10T00:00:00', - 'sender_ip': '1.1.1.1', - 'hostname': 'aProbe' - } - - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = AlertaAlerter(rule) - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - "origin": "ElastAlert 2", - "resource": "elastalert", - "severity": "debug", - "service": ["elastalert"], - "tags": [], - "text": "Probe aProbe is UP at GMT", - "value": "UP", - "createTime": "2014-10-10T00:00:00.000000Z", - "environment": "Production", - "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", - "timeout": 86400, - "correlate": ["ProbeUP", "ProbeDOWN"], - "group": "Health", - "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, - "type": "elastalert2", - "event": "ProbeUP" - } - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - verify=True, - headers={ - 'content-type': 'application/json'} - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_alerta_resource(): - rule = { - 'name': 'Test Alerta rule!', - 'alerta_api_url': 'http://elastalerthost:8080/api/alert', - 'timeframe': datetime.timedelta(hours=1), - 'timestamp_field': '@timestamp', - 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], - 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], - 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], - 'alerta_event': "ProbeUP", - 'alerta_group': "Health", - 'alerta_origin': "ElastAlert 2", - 'alerta_severity': "debug", - 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", - 'alerta_value': "UP", - 'type': 'any', - 'alerta_use_match_timestamp': True, - 'alerta_resource': 'elastalert2', - 'alert': 'alerta' - } - - match = { - '@timestamp': '2014-10-10T00:00:00', - 'sender_ip': '1.1.1.1', - 'hostname': 'aProbe' - } - - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = AlertaAlerter(rule) - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - "origin": "ElastAlert 2", - "resource": "elastalert2", - "severity": "debug", - "service": ["elastalert"], - "tags": [], - "text": "Probe aProbe is UP at GMT", - "value": "UP", - "createTime": "2014-10-10T00:00:00.000000Z", - "environment": "Production", - "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", - "timeout": 86400, - "correlate": ["ProbeUP", "ProbeDOWN"], - "group": "Health", - "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, - "type": "elastalert", - "event": "ProbeUP" - } - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - verify=True, - headers={ - 'content-type': 'application/json'} - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_alerta_service(): - rule = { - 'name': 'Test Alerta rule!', - 'alerta_api_url': 'http://elastalerthost:8080/api/alert', - 'timeframe': datetime.timedelta(hours=1), - 'timestamp_field': '@timestamp', - 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], - 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], - 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], - 'alerta_event': "ProbeUP", - 'alerta_group': "Health", - 'alerta_origin': "ElastAlert 2", - 'alerta_severity': "debug", - 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", - 'alerta_value': "UP", - 'type': 'any', - 'alerta_use_match_timestamp': True, - 'alerta_service': ['elastalert2'], - 'alert': 'alerta' - } - - match = { - '@timestamp': '2014-10-10T00:00:00', - 'sender_ip': '1.1.1.1', - 'hostname': 'aProbe' - } - - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = AlertaAlerter(rule) - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - "origin": "ElastAlert 2", - "resource": "elastalert", - "severity": "debug", - "service": ["elastalert2"], - "tags": [], - "text": "Probe aProbe is UP at GMT", - "value": "UP", - "createTime": "2014-10-10T00:00:00.000000Z", - "environment": "Production", - "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", - "timeout": 86400, - "correlate": ["ProbeUP", "ProbeDOWN"], - "group": "Health", - "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, - "type": "elastalert", - "event": "ProbeUP" - } - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - verify=True, - headers={ - 'content-type': 'application/json'} - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_alerta_environment(): - rule = { - 'name': 'Test Alerta rule!', - 'alerta_api_url': 'http://elastalerthost:8080/api/alert', - 'timeframe': datetime.timedelta(hours=1), - 'timestamp_field': '@timestamp', - 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], - 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], - 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], - 'alerta_event': "ProbeUP", - 'alerta_group': "Health", - 'alerta_origin': "ElastAlert 2", - 'alerta_severity': "debug", - 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", - 'alerta_value': "UP", - 'type': 'any', - 'alerta_use_match_timestamp': True, - 'alerta_environment': 'Production2', - 'alert': 'alerta' - } - - match = { - '@timestamp': '2014-10-10T00:00:00', - 'sender_ip': '1.1.1.1', - 'hostname': 'aProbe' - } - - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = AlertaAlerter(rule) - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - "origin": "ElastAlert 2", - "resource": "elastalert", - "severity": "debug", - "service": ["elastalert"], - "tags": [], - "text": "Probe aProbe is UP at GMT", - "value": "UP", - "createTime": "2014-10-10T00:00:00.000000Z", - "environment": "Production2", - "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", - "timeout": 86400, - "correlate": ["ProbeUP", "ProbeDOWN"], - "group": "Health", - "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, - "type": "elastalert", - "event": "ProbeUP" - } - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - verify=True, - headers={ - 'content-type': 'application/json'} - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_alerta_tags(): - rule = { - 'name': 'Test Alerta rule!', - 'alerta_api_url': 'http://elastalerthost:8080/api/alert', - 'timeframe': datetime.timedelta(hours=1), - 'timestamp_field': '@timestamp', - 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], - 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], - 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], - 'alerta_event': "ProbeUP", - 'alerta_group': "Health", - 'alerta_origin': "ElastAlert 2", - 'alerta_severity': "debug", - 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", - 'alerta_value': "UP", - 'type': 'any', - 'alerta_use_match_timestamp': True, - 'alerta_tags': ['elastalert2'], - 'alert': 'alerta' - } - - match = { - '@timestamp': '2014-10-10T00:00:00', - 'sender_ip': '1.1.1.1', - 'hostname': 'aProbe' - } - - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = AlertaAlerter(rule) - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - "origin": "ElastAlert 2", - "resource": "elastalert", - "severity": "debug", - "service": ["elastalert"], - "tags": ['elastalert2'], - "text": "Probe aProbe is UP at GMT", - "value": "UP", - "createTime": "2014-10-10T00:00:00.000000Z", - "environment": "Production", - "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", - "timeout": 86400, - "correlate": ["ProbeUP", "ProbeDOWN"], - "group": "Health", - "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, - "type": "elastalert", - "event": "ProbeUP" - } - - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - verify=True, - headers={ - 'content-type': 'application/json'} - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) - - -def test_alerta_ea_exception(): - try: - rule = { - 'name': 'Test Alerta rule!', - 'alerta_api_url': 'http://elastalerthost:8080/api/alert', - 'timeframe': datetime.timedelta(hours=1), - 'timestamp_field': '@timestamp', - 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], - 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], - 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], - 'alerta_event': "ProbeUP", - 'alerta_group': "Health", - 'alerta_origin': "ElastAlert 2", - 'alerta_severity': "debug", - 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", - 'alerta_value': "UP", - 'type': 'any', - 'alerta_use_match_timestamp': True, - 'alerta_tags': ['elastalert2'], - 'alert': 'alerta' - } - - match = { - '@timestamp': '2014-10-10T00:00:00', - 'sender_ip': '1.1.1.1', - 'hostname': 'aProbe' - } - - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = AlertaAlerter(rule) - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_alert_subject_size_limit_no_args(): - rule = { - 'name': 'test_rule', - 'type': mock_rule(), - 'owner': 'the_owner', - 'priority': 2, - 'alert_subject': 'A very long subject', - 'alert_subject_max_len': 5 - } - alert = Alerter(rule) - alertSubject = alert.create_custom_title([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - assert 5 == len(alertSubject) - - -def test_alert_error(): - rule = { - 'name': 'test_rule', - 'type': mock_rule(), - 'owner': 'the_owner', - 'priority': 2, - 'alert_subject': 'A very long subject', - 'alert_subject_max_len': 5 - } - match = { - '@timestamp': '2021-01-01T00:00:00', - 'name': 'datadog-test-name' - } - alert = Alerter(rule) - try: - alert.alert([match]) - except NotImplementedError: - assert True - - -def test_alert_get_aggregation_summary_text__maximum_width(): - rule = { - 'name': 'test_rule', - 'type': mock_rule(), - 'owner': 'the_owner', - 'priority': 2, - 'alert_subject': 'A very long subject', - 'alert_subject_max_len': 5 - } - alert = Alerter(rule) - assert 80 == alert.get_aggregation_summary_text__maximum_width() - - -def test_alert_subject_size_limit_with_args(ea): - rule = { - 'name': 'test_rule', - 'type': mock_rule(), - 'owner': 'the_owner', - 'priority': 2, - 'alert_subject': 'Test alert for {0} {1}', - 'alert_subject_args': ['test_term', 'test.term'], - 'alert_subject_max_len': 6 - } - alert = Alerter(rule) - alertSubject = alert.create_custom_title([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - assert 6 == len(alertSubject) - - -def test_datadog_alerter(): - rule = { - 'name': 'Test Datadog Event Alerter', - 'type': 'any', - 'datadog_api_key': 'test-api-key', - 'datadog_app_key': 'test-app-key', - 'alert': [], - 'alert_subject': 'Test Datadog Event Alert' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = DatadogAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'name': 'datadog-test-name' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'title': rule['alert_subject'], - 'text': "Test Datadog Event Alerter\n\n@timestamp: 2021-01-01T00:00:00\nname: datadog-test-name\n" - } - mock_post_request.assert_called_once_with( - "https://api.datadoghq.com/api/v1/events", - data=mock.ANY, - headers={ - 'Content-Type': 'application/json', - 'DD-API-KEY': rule['datadog_api_key'], - 'DD-APPLICATION-KEY': rule['datadog_app_key'] - } - ) - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_datadog_alerterea_exception(): - try: - rule = { - 'name': 'Test Datadog Event Alerter', - 'type': 'any', - 'datadog_api_key': 'test-api-key', - 'datadog_app_key': 'test-app-key', - 'alert': [], - 'alert_subject': 'Test Datadog Event Alert' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = DatadogAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'name': 'datadog-test-name' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_pagertree(): - rule = { - 'name': 'Test PagerTree Rule', - 'type': 'any', - 'pagertree_integration_url': 'https://api.pagertree.com/integration/xxxxx', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerTreeAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'event_type': 'create', - 'Id': str(uuid.uuid4()), - 'Title': 'Test PagerTree Rule', - 'Description': 'Test PagerTree Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' - } - - mock_post_request.assert_called_once_with( - rule['pagertree_integration_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - uuid4hex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) - match = uuid4hex.match(actual_data['Id']) - assert bool(match) is True - assert expected_data["event_type"] == actual_data['event_type'] - assert expected_data["Title"] == actual_data['Title'] - assert expected_data["Description"] == actual_data['Description'] - - -def test_pagertree_proxy(): - rule = { - 'name': 'Test PagerTree Rule', - 'type': 'any', - 'pagertree_integration_url': 'https://api.pagertree.com/integration/xxxxx', - 'pagertree_proxy': 'http://proxy.url', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerTreeAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'event_type': 'create', - 'Id': str(uuid.uuid4()), - 'Title': 'Test PagerTree Rule', - 'Description': 'Test PagerTree Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' - } - - mock_post_request.assert_called_once_with( - rule['pagertree_integration_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies={'https': 'http://proxy.url'} - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - uuid4hex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) - match = uuid4hex.match(actual_data['Id']) - assert bool(match) is True - assert expected_data["event_type"] == actual_data['event_type'] - assert expected_data["Title"] == actual_data['Title'] - assert expected_data["Description"] == actual_data['Description'] - - -def test_pagertree_ea_exception(): - try: - rule = { - 'name': 'Test PagerTree Rule', - 'type': 'any', - 'pagertree_integration_url': 'https://api.pagertree.com/integration/xxxxx', - 'pagertree_proxy': 'http://proxy.url', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = PagerTreeAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_line_notify(): - rule = { - 'name': 'Test LineNotify Rule', - 'type': 'any', - 'linenotify_access_token': 'xxxxx', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = LineNotifyAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'message': 'Test LineNotify Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' - } - - mock_post_request.assert_called_once_with( - 'https://notify-api.line.me/api/notify', - data=mock.ANY, - headers={ - 'Content-Type': 'application/x-www-form-urlencoded', - 'Authorization': 'Bearer {}'.format('xxxxx') - } - ) - - actual_data = mock_post_request.call_args_list[0][1]['data'] - assert expected_data == actual_data - - -def test_line_notify_ea_exception(): - try: - rule = { - 'name': 'Test LineNotify Rule', - 'type': 'any', - 'linenotify_access_token': 'xxxxx', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = LineNotifyAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_gitter_msg_level_default(): - rule = { - 'name': 'Test Gitter Rule', - 'type': 'any', - 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = GitterAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'message': 'Test Gitter Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - 'level': 'error' - } - - mock_post_request.assert_called_once_with( - rule['gitter_webhook_url'], - mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) - assert expected_data == actual_data - assert 'error' in actual_data['level'] - - -def test_gitter_msg_level_info(): - rule = { - 'name': 'Test Gitter Rule', - 'type': 'any', - 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', - 'gitter_msg_level': 'info', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = GitterAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'message': 'Test Gitter Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - 'level': 'info' - } - - mock_post_request.assert_called_once_with( - rule['gitter_webhook_url'], - mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) - assert expected_data == actual_data - assert 'info' in actual_data['level'] - - -def test_gitter_msg_level_error(): - rule = { - 'name': 'Test Gitter Rule', - 'type': 'any', - 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', - 'gitter_msg_level': 'error', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = GitterAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'message': 'Test Gitter Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - 'level': 'error' - } - - mock_post_request.assert_called_once_with( - rule['gitter_webhook_url'], - mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) - assert expected_data == actual_data - assert 'error' in actual_data['level'] - - -def test_gitter_proxy(): - rule = { - 'name': 'Test Gitter Rule', - 'type': 'any', - 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', - 'gitter_msg_level': 'error', - 'gitter_proxy': 'http://proxy.url', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = GitterAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'message': 'Test Gitter Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - 'level': 'error' - } - - mock_post_request.assert_called_once_with( - rule['gitter_webhook_url'], - mock.ANY, - headers={'content-type': 'application/json'}, - proxies={'https': 'http://proxy.url'} - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) - assert expected_data == actual_data - assert 'error' in actual_data['level'] - - -def test_gitter_ea_exception(): - try: - rule = { - 'name': 'Test Gitter Rule', - 'type': 'any', - 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', - 'gitter_msg_level': 'error', - 'gitter_proxy': 'http://proxy.url', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = GitterAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_chatwork(): - rule = { - 'name': 'Test Chatwork Rule', - 'type': 'any', - 'chatwork_apikey': 'xxxx1', - 'chatwork_room_id': 'xxxx2', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = ChatworkAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'body': 'Test Chatwork Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - } - - mock_post_request.assert_called_once_with( - 'https://api.chatwork.com/v2/rooms/xxxx2/messages', - params=mock.ANY, - headers={'X-ChatWorkToken': 'xxxx1'}, - proxies=None, - auth=None - ) - - actual_data = mock_post_request.call_args_list[0][1]['params'] - assert expected_data == actual_data - - -def test_chatwork_proxy(): - rule = { - 'name': 'Test Chatwork Rule', - 'type': 'any', - 'chatwork_apikey': 'xxxx1', - 'chatwork_room_id': 'xxxx2', - 'chatwork_proxy': 'http://proxy.url', - 'chatwork_proxy_login': 'admin', - 'chatwork_proxy_pass': 'password', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = ChatworkAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'body': 'Test Chatwork Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - } - - mock_post_request.assert_called_once_with( - 'https://api.chatwork.com/v2/rooms/xxxx2/messages', - params=mock.ANY, - headers={'X-ChatWorkToken': 'xxxx1'}, - proxies={'https': 'http://proxy.url'}, - auth=HTTPProxyAuth('admin', 'password') - ) - - actual_data = mock_post_request.call_args_list[0][1]['params'] - assert expected_data == actual_data - - -def test_chatwork_ea_exception(): - try: - rule = { - 'name': 'Test Chatwork Rule', - 'type': 'any', - 'chatwork_apikey': 'xxxx1', - 'chatwork_room_id': 'xxxx2', - 'chatwork_proxy': 'http://proxy.url', - 'chatwork_proxy_login': 'admin', - 'chatwork_proxy_pass': 'password', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = ChatworkAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_telegram(): - rule = { - 'name': 'Test Telegram Rule', - 'type': 'any', - 'telegram_bot_token': 'xxxxx1', - 'telegram_room_id': 'xxxxx2', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = TelegramAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'chat_id': rule['telegram_room_id'], - 'text': '⚠ *Test Telegram Rule* ⚠ ```\nTest Telegram Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n ```', - 'parse_mode': 'markdown', - 'disable_web_page_preview': True - } - - mock_post_request.assert_called_once_with( - 'https://api.telegram.org/botxxxxx1/sendMessage', - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - auth=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_telegram_proxy(): - rule = { - 'name': 'Test Telegram Rule', - 'type': 'any', - 'telegram_bot_token': 'xxxxx1', - 'telegram_room_id': 'xxxxx2', - 'telegram_proxy': 'http://proxy.url', - 'telegram_proxy_login': 'admin', - 'telegram_proxy_pass': 'password', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = TelegramAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'chat_id': rule['telegram_room_id'], - 'text': '⚠ *Test Telegram Rule* ⚠ ```\nTest Telegram Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n ```', - 'parse_mode': 'markdown', - 'disable_web_page_preview': True - } - - mock_post_request.assert_called_once_with( - 'https://api.telegram.org/botxxxxx1/sendMessage', - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies={'https': 'http://proxy.url'}, - auth=HTTPProxyAuth('admin', 'password') - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_telegram_text_maxlength(): - rule = { - 'name': 'Test Telegram Rule' + ('a' * 3985), - 'type': 'any', - 'telegram_bot_token': 'xxxxx1', - 'telegram_room_id': 'xxxxx2', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = TelegramAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'chat_id': rule['telegram_room_id'], - 'text': '⚠ *Test Telegram Rule' + ('a' * 3979) + - '\n⚠ *message was cropped according to telegram limits!* ⚠ ```', - 'parse_mode': 'markdown', - 'disable_web_page_preview': True - } - - mock_post_request.assert_called_once_with( - 'https://api.telegram.org/botxxxxx1/sendMessage', - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - auth=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_telegram_ea_exception(): - try: - rule = { - 'name': 'Test Telegram Rule' + ('a' * 3985), - 'type': 'any', - 'telegram_bot_token': 'xxxxx1', - 'telegram_room_id': 'xxxxx2', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = TelegramAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_service_now(): - rule = { - 'name': 'Test ServiceNow Rule', - 'type': 'any', - 'username': 'ServiceNow username', - 'password': 'ServiceNow password', - 'servicenow_rest_url': 'https://xxxxxxxxxx', - 'short_description': 'ServiceNow short_description', - 'comments': 'ServiceNow comments', - 'assignment_group': 'ServiceNow assignment_group', - 'category': 'ServiceNow category', - 'subcategory': 'ServiceNow subcategory', - 'cmdb_ci': 'ServiceNow cmdb_ci', - 'caller_id': 'ServiceNow caller_id', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = ServiceNowAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'description': 'Test ServiceNow Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - 'short_description': rule['short_description'], - 'comments': rule['comments'], - 'assignment_group': rule['assignment_group'], - 'category': rule['category'], - 'subcategory': rule['subcategory'], - 'cmdb_ci': rule['cmdb_ci'], - 'caller_id': rule['caller_id'] - } - - mock_post_request.assert_called_once_with( - rule['servicenow_rest_url'], - auth=(rule['username'], rule['password']), - headers={ - 'Content-Type': 'application/json', - 'Accept': 'application/json;charset=utf-8' - }, - data=mock.ANY, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_service_now_proxy(): - rule = { - 'name': 'Test ServiceNow Rule', - 'type': 'any', - 'username': 'ServiceNow username', - 'password': 'ServiceNow password', - 'servicenow_rest_url': 'https://xxxxxxxxxx', - 'short_description': 'ServiceNow short_description', - 'comments': 'ServiceNow comments', - 'assignment_group': 'ServiceNow assignment_group', - 'category': 'ServiceNow category', - 'subcategory': 'ServiceNow subcategory', - 'cmdb_ci': 'ServiceNow cmdb_ci', - 'caller_id': 'ServiceNow caller_id', - 'servicenow_proxy': 'http://proxy.url', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = ServiceNowAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'description': 'Test ServiceNow Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - 'short_description': rule['short_description'], - 'comments': rule['comments'], - 'assignment_group': rule['assignment_group'], - 'category': rule['category'], - 'subcategory': rule['subcategory'], - 'cmdb_ci': rule['cmdb_ci'], - 'caller_id': rule['caller_id'] - } - - mock_post_request.assert_called_once_with( - rule['servicenow_rest_url'], - auth=(rule['username'], rule['password']), - headers={ - 'Content-Type': 'application/json', - 'Accept': 'application/json;charset=utf-8' - }, - data=mock.ANY, - proxies={'https': 'http://proxy.url'} - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_service_now_ea_exception(): - try: - rule = { - 'name': 'Test ServiceNow Rule', - 'type': 'any', - 'username': 'ServiceNow username', - 'password': 'ServiceNow password', - 'servicenow_rest_url': 'https://xxxxxxxxxx', - 'short_description': 'ServiceNow short_description', - 'comments': 'ServiceNow comments', - 'assignment_group': 'ServiceNow assignment_group', - 'category': 'ServiceNow category', - 'subcategory': 'ServiceNow subcategory', - 'cmdb_ci': 'ServiceNow cmdb_ci', - 'caller_id': 'ServiceNow caller_id', - 'servicenow_proxy': 'http://proxy.url', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = ServiceNowAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_victor_ops(): - rule = { - 'name': 'Test VictorOps Rule', - 'type': 'any', - 'victorops_api_key': 'xxxx1', - 'victorops_routing_key': 'xxxx2', - 'victorops_message_type': 'INFO', - 'victorops_entity_display_name': 'no entity display name', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = VictorOpsAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'message_type': rule['victorops_message_type'], - 'entity_display_name': rule['victorops_entity_display_name'], - 'monitoring_tool': 'ElastAlert', - 'state_message': 'Test VictorOps Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' - } - - mock_post_request.assert_called_once_with( - 'https://alert.victorops.com/integrations/generic/20131114/alert/xxxx1/xxxx2', - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_victor_ops_proxy(): - rule = { - 'name': 'Test VictorOps Rule', - 'type': 'any', - 'victorops_api_key': 'xxxx1', - 'victorops_routing_key': 'xxxx2', - 'victorops_message_type': 'INFO', - 'victorops_entity_display_name': 'no entity display name', - 'victorops_proxy': 'http://proxy.url', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = VictorOpsAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'message_type': rule['victorops_message_type'], - 'entity_display_name': rule['victorops_entity_display_name'], - 'monitoring_tool': 'ElastAlert', - 'state_message': 'Test VictorOps Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' - } - - mock_post_request.assert_called_once_with( - 'https://alert.victorops.com/integrations/generic/20131114/alert/xxxx1/xxxx2', - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies={'https': 'http://proxy.url'} - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_victor_ops_ea_exception(): - try: - rule = { - 'name': 'Test VictorOps Rule', - 'type': 'any', - 'victorops_api_key': 'xxxx1', - 'victorops_routing_key': 'xxxx2', - 'victorops_message_type': 'INFO', - 'victorops_entity_display_name': 'no entity display name', - 'victorops_proxy': 'http://proxy.url', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = VictorOpsAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_google_chat_basic(): - rule = { - 'name': 'Test GoogleChat Rule', - 'type': 'any', - 'googlechat_webhook_url': 'http://xxxxxxx', - 'googlechat_format': 'basic', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = GoogleChatAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'text': 'Test GoogleChat Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' - } - - mock_post_request.assert_called_once_with( - rule['googlechat_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'} - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_google_chat_card(): - rule = { - 'name': 'Test GoogleChat Rule', - 'type': 'any', - 'googlechat_webhook_url': 'http://xxxxxxx', - 'googlechat_format': 'card', - 'googlechat_header_title': 'xxxx1', - 'googlechat_header_subtitle': 'xxxx2', - 'googlechat_header_image': 'http://xxxx/image.png', - 'googlechat_footer_kibanalink': 'http://xxxxx/kibana', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = GoogleChatAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'cards': [{ - 'header': { - 'title': rule['googlechat_header_title'], - 'subtitle': rule['googlechat_header_subtitle'], - 'imageUrl': rule['googlechat_header_image'] - }, - 'sections': [ - { - 'widgets': [{ - "textParagraph": { - 'text': 'Test GoogleChat Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' - } - }] - }, - { - 'widgets': [{ - 'buttons': [{ - 'textButton': { - 'text': 'VISIT KIBANA', - 'onClick': { - 'openLink': { - 'url': rule['googlechat_footer_kibanalink'] - } - } - } - }] - }] - } - ]} - ] - } - - mock_post_request.assert_called_once_with( - rule['googlechat_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'} - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_google_chat_ea_exception(): - try: - rule = { - 'name': 'Test GoogleChat Rule', - 'type': 'any', - 'googlechat_webhook_url': 'http://xxxxxxx', - 'googlechat_format': 'card', - 'googlechat_header_title': 'xxxx1', - 'googlechat_header_subtitle': 'xxxx2', - 'googlechat_header_image': 'http://xxxx/image.png', - 'googlechat_footer_kibanalink': 'http://xxxxx/kibana', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = GoogleChatAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_discord(): - rule = { - 'name': 'Test Discord Rule', - 'type': 'any', - 'discord_webhook_url': 'http://xxxxxxx', - 'discord_emoji_title': ':warning:', - 'discord_embed_color': 0xffffff, - 'discord_embed_footer': 'footer', - 'discord_embed_icon_url': 'http://xxxx/image.png', - 'alert': [], - 'alert_subject': 'Test Discord' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = DiscordAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'content': ':warning: Test Discord :warning:', - 'embeds': - [{ - 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', - 'color': 0xffffff, - 'footer': { - 'text': 'footer', - 'icon_url': 'http://xxxx/image.png' - } - }] - } - - mock_post_request.assert_called_once_with( - rule['discord_webhook_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json'}, - proxies=None, - auth=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_discord_not_footer(): - rule = { - 'name': 'Test Discord Rule', - 'type': 'any', - 'discord_webhook_url': 'http://xxxxxxx', - 'discord_emoji_title': ':warning:', - 'discord_embed_color': 0xffffff, - 'alert': [], - 'alert_subject': 'Test Discord' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = DiscordAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'content': ':warning: Test Discord :warning:', - 'embeds': - [{ - 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', - 'color': 0xffffff - }] - } - - mock_post_request.assert_called_once_with( - rule['discord_webhook_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json'}, - proxies=None, - auth=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_discord_proxy(): - rule = { - 'name': 'Test Discord Rule', - 'type': 'any', - 'discord_webhook_url': 'http://xxxxxxx', - 'discord_emoji_title': ':warning:', - 'discord_embed_color': 0xffffff, - 'discord_proxy': 'http://proxy.url', - 'discord_proxy_login': 'admin', - 'discord_proxy_password': 'password', - 'alert': [], - 'alert_subject': 'Test Discord' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = DiscordAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'content': ':warning: Test Discord :warning:', - 'embeds': - [{ - 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', - 'color': 0xffffff - }] - } - - mock_post_request.assert_called_once_with( - rule['discord_webhook_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json'}, - proxies={'https': 'http://proxy.url'}, - auth=HTTPProxyAuth('admin', 'password') - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_discord_description_maxlength(): - rule = { - 'name': 'Test Discord Rule' + ('a' * 2069), - 'type': 'any', - 'discord_webhook_url': 'http://xxxxxxx', - 'discord_emoji_title': ':warning:', - 'discord_embed_color': 0xffffff, - 'alert': [], - 'alert_subject': 'Test Discord' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = DiscordAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'content': ':warning: Test Discord :warning:', - 'embeds': - [{ - 'description': 'Test Discord Rule' + ('a' * 1933) + - '\n *message was cropped according to discord embed description limits!* ```', - 'color': 0xffffff - }] - } - - mock_post_request.assert_called_once_with( - rule['discord_webhook_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json'}, - proxies=None, - auth=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_discord_ea_exception(): - try: - rule = { - 'name': 'Test Discord Rule' + ('a' * 2069), - 'type': 'any', - 'discord_webhook_url': 'http://xxxxxxx', - 'discord_emoji_title': ':warning:', - 'discord_embed_color': 0xffffff, - 'alert': [], - 'alert_subject': 'Test Discord' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = DiscordAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_dingtalk_text(): - rule = { - 'name': 'Test DingTalk Rule', - 'type': 'any', - 'dingtalk_access_token': 'xxxxxxx', - 'dingtalk_msgtype': 'text', - 'alert': [], - 'alert_subject': 'Test DingTalk' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = DingTalkAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'msgtype': 'text', - 'text': {'content': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n'} - } - - mock_post_request.assert_called_once_with( - 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', - data=mock.ANY, - headers={ - 'Content-Type': 'application/json', - 'Accept': 'application/json;charset=utf-8' - }, - proxies=None, - auth=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_dingtalk_markdown(): - rule = { - 'name': 'Test DingTalk Rule', - 'type': 'any', - 'dingtalk_access_token': 'xxxxxxx', - 'dingtalk_msgtype': 'markdown', - 'alert': [], - 'alert_subject': 'Test DingTalk' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = DingTalkAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'msgtype': 'markdown', - 'markdown': { - 'title': 'Test DingTalk', - 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' - } - } - - mock_post_request.assert_called_once_with( - 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', - data=mock.ANY, - headers={ - 'Content-Type': 'application/json', - 'Accept': 'application/json;charset=utf-8' - }, - proxies=None, - auth=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_dingtalk_single_action_card(): - rule = { - 'name': 'Test DingTalk Rule', - 'type': 'any', - 'dingtalk_access_token': 'xxxxxxx', - 'dingtalk_msgtype': 'single_action_card', - 'dingtalk_single_title': 'elastalert', - 'dingtalk_single_url': 'http://xxxxx2', - 'alert': [], - 'alert_subject': 'Test DingTalk' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = DingTalkAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'msgtype': 'actionCard', - 'actionCard': { - 'title': 'Test DingTalk', - 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - 'singleTitle': rule['dingtalk_single_title'], - 'singleURL': rule['dingtalk_single_url'] - } - } - - mock_post_request.assert_called_once_with( - 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', - data=mock.ANY, - headers={ - 'Content-Type': 'application/json', - 'Accept': 'application/json;charset=utf-8' - }, - proxies=None, - auth=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_dingtalk_action_card(): - rule = { - 'name': 'Test DingTalk Rule', - 'type': 'any', - 'dingtalk_access_token': 'xxxxxxx', - 'dingtalk_msgtype': 'action_card', - 'dingtalk_single_title': 'elastalert', - 'dingtalk_single_url': 'http://xxxxx2', - 'dingtalk_btn_orientation': '1', - 'dingtalk_btns': [ - { - 'title': 'test1', - 'actionURL': 'https://xxxxx0/' - }, - { - 'title': 'test2', - 'actionURL': 'https://xxxxx1/' - } - ], - 'alert': [], - 'alert_subject': 'Test DingTalk' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = DingTalkAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'msgtype': 'actionCard', - 'actionCard': { - 'title': 'Test DingTalk', - 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - 'btnOrientation': rule['dingtalk_btn_orientation'], - 'btns': rule['dingtalk_btns'] - } - } - - mock_post_request.assert_called_once_with( - 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', - data=mock.ANY, - headers={ - 'Content-Type': 'application/json', - 'Accept': 'application/json;charset=utf-8' - }, - proxies=None, - auth=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_dingtalk_proxy(): - rule = { - 'name': 'Test DingTalk Rule', - 'type': 'any', - 'dingtalk_access_token': 'xxxxxxx', - 'dingtalk_msgtype': 'action_card', - 'dingtalk_single_title': 'elastalert', - 'dingtalk_single_url': 'http://xxxxx2', - 'dingtalk_btn_orientation': '1', - 'dingtalk_btns': [ - { - 'title': 'test1', - 'actionURL': 'https://xxxxx0/' - }, - { - 'title': 'test2', - 'actionURL': 'https://xxxxx1/' - } - ], - 'dingtalk_proxy': 'http://proxy.url', - 'dingtalk_proxy_login': 'admin', - 'dingtalk_proxy_pass': 'password', - 'alert': [], - 'alert_subject': 'Test DingTalk' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = DingTalkAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'msgtype': 'actionCard', - 'actionCard': { - 'title': 'Test DingTalk', - 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - 'btnOrientation': rule['dingtalk_btn_orientation'], - 'btns': rule['dingtalk_btns'] - } - } - - mock_post_request.assert_called_once_with( - 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', - data=mock.ANY, - headers={ - 'Content-Type': 'application/json', - 'Accept': 'application/json;charset=utf-8' - }, - proxies={'https': 'http://proxy.url'}, - auth=HTTPProxyAuth('admin', 'password') - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_dingtalk_ea_exception(): - try: - rule = { - 'name': 'Test DingTalk Rule', - 'type': 'any', - 'dingtalk_access_token': 'xxxxxxx', - 'dingtalk_msgtype': 'action_card', - 'dingtalk_single_title': 'elastalert', - 'dingtalk_single_url': 'http://xxxxx2', - 'dingtalk_btn_orientation': '1', - 'dingtalk_btns': [ - { - 'title': 'test1', - 'actionURL': 'https://xxxxx0/' - }, - { - 'title': 'test2', - 'actionURL': 'https://xxxxx1/' - } - ], - 'dingtalk_proxy': 'http://proxy.url', - 'dingtalk_proxy_login': 'admin', - 'dingtalk_proxy_pass': 'password', - 'alert': [], - 'alert_subject': 'Test DingTalk' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = DingTalkAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_mattermost_proxy(): - rule = { - 'name': 'Test Mattermost Rule', - 'type': 'any', - 'alert_text_type': 'alert_text_only', - 'mattermost_webhook_url': 'http://xxxxx', - 'mattermost_msg_pretext': 'aaaaa', - 'mattermost_msg_color': 'danger', - 'mattermost_proxy': 'https://proxy.url', - 'alert': [], - 'alert_subject': 'Test Mattermost' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MattermostAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'attachments': [ - { - 'fallback': 'Test Mattermost: aaaaa', - 'color': 'danger', - 'title': 'Test Mattermost', - 'pretext': 'aaaaa', - 'fields': [], - 'text': 'Test Mattermost Rule\n\n' - } - ], 'username': 'elastalert' - } - - mock_post_request.assert_called_once_with( - rule['mattermost_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - verify=True, - proxies={'https': 'https://proxy.url'} - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_mattermost_alert_text_only(): - rule = { - 'name': 'Test Mattermost Rule', - 'type': 'any', - 'alert_text_type': 'alert_text_only', - 'mattermost_webhook_url': 'http://xxxxx', - 'mattermost_msg_pretext': 'aaaaa', - 'mattermost_msg_color': 'danger', - 'alert': [], - 'alert_subject': 'Test Mattermost' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MattermostAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'attachments': [ - { - 'fallback': 'Test Mattermost: aaaaa', - 'color': 'danger', - 'title': 'Test Mattermost', - 'pretext': 'aaaaa', - 'fields': [], - 'text': 'Test Mattermost Rule\n\n' - } - ], 'username': 'elastalert' - } - - mock_post_request.assert_called_once_with( - rule['mattermost_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - verify=True, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_mattermost_not_alert_text_only(): - rule = { - 'name': 'Test Mattermost Rule', - 'type': 'any', - 'alert_text_type': 'exclude_fields', - 'mattermost_webhook_url': 'http://xxxxx', - 'mattermost_msg_pretext': 'aaaaa', - 'mattermost_msg_color': 'danger', - 'alert': [], - 'alert_subject': 'Test Mattermost' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MattermostAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'attachments': [ - { - 'fallback': 'Test Mattermost: aaaaa', - 'color': 'danger', - 'title': 'Test Mattermost', - 'pretext': 'aaaaa', - 'fields': [] - } - ], - 'text': 'Test Mattermost Rule\n\n', - 'username': 'elastalert' - } - - mock_post_request.assert_called_once_with( - rule['mattermost_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - verify=True, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_mattermost_msg_fields(): - rule = { - 'name': 'Test Mattermost Rule', - 'type': 'any', - 'alert_text_type': 'alert_text_only', - 'mattermost_webhook_url': 'http://xxxxx', - 'mattermost_msg_pretext': 'aaaaa', - 'mattermost_msg_color': 'danger', - 'mattermost_msg_fields': [ - { - 'title': 'Stack', - 'value': "{0} {1}", - 'short': False, - 'args': ["type", "msg.status_code"] - }, - { - 'title': 'Name', - 'value': 'static field', - 'short': False - } - ], - 'alert': [], - 'alert_subject': 'Test Mattermost' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MattermostAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'attachments': [ - { - 'fallback': 'Test Mattermost: aaaaa', - 'color': 'danger', - 'title': 'Test Mattermost', - 'pretext': 'aaaaa', - 'fields': [ - {'title': 'Stack', 'value': ' ', 'short': False}, - {'title': 'Name', 'value': 'static field', 'short': False} - ], - 'text': 'Test Mattermost Rule\n\n' - } - ], 'username': 'elastalert' - } - - mock_post_request.assert_called_once_with( - rule['mattermost_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - verify=True, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_mattermost_icon_url_override(): - rule = { - 'name': 'Test Mattermost Rule', - 'type': 'any', - 'alert_text_type': 'alert_text_only', - 'mattermost_webhook_url': 'http://xxxxx', - 'mattermost_msg_pretext': 'aaaaa', - 'mattermost_msg_color': 'danger', - 'mattermost_icon_url_override': 'http://xxxx/icon.png', - 'alert': [], - 'alert_subject': 'Test Mattermost' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MattermostAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'attachments': [ - { - 'fallback': 'Test Mattermost: aaaaa', - 'color': 'danger', - 'title': 'Test Mattermost', - 'pretext': 'aaaaa', - 'fields': [], - 'text': 'Test Mattermost Rule\n\n' - } - ], - 'username': 'elastalert', - 'icon_url': 'http://xxxx/icon.png' - } - - mock_post_request.assert_called_once_with( - rule['mattermost_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - verify=True, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_mattermost_channel_override(): - rule = { - 'name': 'Test Mattermost Rule', - 'type': 'any', - 'alert_text_type': 'alert_text_only', - 'mattermost_webhook_url': 'http://xxxxx', - 'mattermost_msg_pretext': 'aaaaa', - 'mattermost_msg_color': 'danger', - 'mattermost_channel_override': 'test channel', - 'alert': [], - 'alert_subject': 'Test Mattermost' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MattermostAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'attachments': [ - { - 'fallback': 'Test Mattermost: aaaaa', - 'color': 'danger', - 'title': 'Test Mattermost', - 'pretext': 'aaaaa', - 'fields': [], - 'text': 'Test Mattermost Rule\n\n' - } - ], - 'username': 'elastalert', - 'channel': 'test channel' - } - - mock_post_request.assert_called_once_with( - rule['mattermost_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - verify=True, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_mattermost_ignore_ssl_errors(): - rule = { - 'name': 'Test Mattermost Rule', - 'type': 'any', - 'alert_text_type': 'alert_text_only', - 'mattermost_webhook_url': 'http://xxxxx', - 'mattermost_msg_pretext': 'aaaaa', - 'mattermost_msg_color': 'danger', - 'mattermost_ignore_ssl_errors': True, - 'alert': [], - 'alert_subject': 'Test Mattermost' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MattermostAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'attachments': [ - { - 'fallback': 'Test Mattermost: aaaaa', - 'color': 'danger', - 'title': 'Test Mattermost', - 'pretext': 'aaaaa', - 'fields': [], - 'text': 'Test Mattermost Rule\n\n' - } - ], - 'username': 'elastalert' - } - - mock_post_request.assert_called_once_with( - rule['mattermost_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - verify=False, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_mattermost_title_link(): - rule = { - 'name': 'Test Mattermost Rule', - 'type': 'any', - 'alert_text_type': 'alert_text_only', - 'mattermost_webhook_url': 'http://xxxxx', - 'mattermost_msg_pretext': 'aaaaa', - 'mattermost_msg_color': 'danger', - 'mattermost_title_link': 'http://title.url', - 'alert': [], - 'alert_subject': 'Test Mattermost' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MattermostAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'attachments': [ - { - 'fallback': 'Test Mattermost: aaaaa', - 'color': 'danger', - 'title': 'Test Mattermost', - 'pretext': 'aaaaa', - 'fields': [], - 'text': 'Test Mattermost Rule\n\n', - 'title_link': 'http://title.url' - } - ], - 'username': 'elastalert' - } - - mock_post_request.assert_called_once_with( - rule['mattermost_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - verify=True, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_mattermost_footer(): - rule = { - 'name': 'Test Mattermost Rule', - 'type': 'any', - 'alert_text_type': 'alert_text_only', - 'mattermost_webhook_url': 'http://xxxxx', - 'mattermost_msg_pretext': 'aaaaa', - 'mattermost_msg_color': 'danger', - 'mattermost_footer': 'Mattermost footer', - 'alert': [], - 'alert_subject': 'Test Mattermost' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MattermostAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'attachments': [ - { - 'fallback': 'Test Mattermost: aaaaa', - 'color': 'danger', - 'title': 'Test Mattermost', - 'pretext': 'aaaaa', - 'fields': [], - 'text': 'Test Mattermost Rule\n\n', - 'footer': 'Mattermost footer' - } - ], - 'username': 'elastalert' - } - - mock_post_request.assert_called_once_with( - rule['mattermost_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - verify=True, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_mattermost_footer_icon(): - rule = { - 'name': 'Test Mattermost Rule', - 'type': 'any', - 'alert_text_type': 'alert_text_only', - 'mattermost_webhook_url': 'http://xxxxx', - 'mattermost_msg_pretext': 'aaaaa', - 'mattermost_msg_color': 'danger', - 'mattermost_footer_icon': 'http://icon.url', - 'alert': [], - 'alert_subject': 'Test Mattermost' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MattermostAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'attachments': [ - { - 'fallback': 'Test Mattermost: aaaaa', - 'color': 'danger', - 'title': 'Test Mattermost', - 'pretext': 'aaaaa', - 'fields': [], - 'text': 'Test Mattermost Rule\n\n', - 'footer_icon': 'http://icon.url' - } - ], - 'username': 'elastalert' - } - - mock_post_request.assert_called_once_with( - rule['mattermost_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - verify=True, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_mattermost_image_url(): - rule = { - 'name': 'Test Mattermost Rule', - 'type': 'any', - 'alert_text_type': 'alert_text_only', - 'mattermost_webhook_url': 'http://xxxxx', - 'mattermost_msg_pretext': 'aaaaa', - 'mattermost_msg_color': 'danger', - 'mattermost_image_url': 'http://image.url', - 'alert': [], - 'alert_subject': 'Test Mattermost' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MattermostAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'attachments': [ - { - 'fallback': 'Test Mattermost: aaaaa', - 'color': 'danger', - 'title': 'Test Mattermost', - 'pretext': 'aaaaa', - 'fields': [], - 'text': 'Test Mattermost Rule\n\n', - 'image_url': 'http://image.url' - } - ], - 'username': 'elastalert' - } - - mock_post_request.assert_called_once_with( - rule['mattermost_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - verify=True, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_mattermost_thumb_url(): - rule = { - 'name': 'Test Mattermost Rule', - 'type': 'any', - 'alert_text_type': 'alert_text_only', - 'mattermost_webhook_url': 'http://xxxxx', - 'mattermost_msg_pretext': 'aaaaa', - 'mattermost_msg_color': 'danger', - 'mattermost_thumb_url': 'http://thumb.url', - 'alert': [], - 'alert_subject': 'Test Mattermost' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MattermostAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'attachments': [ - { - 'fallback': 'Test Mattermost: aaaaa', - 'color': 'danger', - 'title': 'Test Mattermost', - 'pretext': 'aaaaa', - 'fields': [], - 'text': 'Test Mattermost Rule\n\n', - 'thumb_url': 'http://thumb.url' - } - ], - 'username': 'elastalert' - } - - mock_post_request.assert_called_once_with( - rule['mattermost_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - verify=True, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_mattermost_author_name(): - rule = { - 'name': 'Test Mattermost Rule', - 'type': 'any', - 'alert_text_type': 'alert_text_only', - 'mattermost_webhook_url': 'http://xxxxx', - 'mattermost_msg_pretext': 'aaaaa', - 'mattermost_msg_color': 'danger', - 'mattermost_author_name': 'author name', - 'alert': [], - 'alert_subject': 'Test Mattermost' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MattermostAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'attachments': [ - { - 'fallback': 'Test Mattermost: aaaaa', - 'color': 'danger', - 'title': 'Test Mattermost', - 'pretext': 'aaaaa', - 'fields': [], - 'text': 'Test Mattermost Rule\n\n', - 'author_name': 'author name' - } - ], - 'username': 'elastalert' - } - - mock_post_request.assert_called_once_with( - rule['mattermost_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - verify=True, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_mattermost_author_link(): - rule = { - 'name': 'Test Mattermost Rule', - 'type': 'any', - 'alert_text_type': 'alert_text_only', - 'mattermost_webhook_url': 'http://xxxxx', - 'mattermost_msg_pretext': 'aaaaa', - 'mattermost_msg_color': 'danger', - 'mattermost_author_link': 'http://author.link.url', - 'alert': [], - 'alert_subject': 'Test Mattermost' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MattermostAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'attachments': [ - { - 'fallback': 'Test Mattermost: aaaaa', - 'color': 'danger', - 'title': 'Test Mattermost', - 'pretext': 'aaaaa', - 'fields': [], - 'text': 'Test Mattermost Rule\n\n', - 'author_link': 'http://author.link.url' - } - ], - 'username': 'elastalert' - } - - mock_post_request.assert_called_once_with( - rule['mattermost_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - verify=True, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_mattermost_author_icon(): - rule = { - 'name': 'Test Mattermost Rule', - 'type': 'any', - 'alert_text_type': 'alert_text_only', - 'mattermost_webhook_url': 'http://xxxxx', - 'mattermost_msg_pretext': 'aaaaa', - 'mattermost_msg_color': 'danger', - 'mattermost_author_icon': 'http://author.icon.url', - 'alert': [], - 'alert_subject': 'Test Mattermost' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MattermostAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'attachments': [ - { - 'fallback': 'Test Mattermost: aaaaa', - 'color': 'danger', - 'title': 'Test Mattermost', - 'pretext': 'aaaaa', - 'fields': [], - 'text': 'Test Mattermost Rule\n\n', - 'author_icon': 'http://author.icon.url' - } - ], - 'username': 'elastalert' - } - - mock_post_request.assert_called_once_with( - rule['mattermost_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - verify=True, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert expected_data == actual_data - - -def test_mattermost_ea_exception(): - try: - rule = { - 'name': 'Test Mattermost Rule', - 'type': 'any', - 'alert_text_type': 'alert_text_only', - 'mattermost_webhook_url': 'http://xxxxx', - 'mattermost_msg_pretext': 'aaaaa', - 'mattermost_msg_color': 'danger', - 'mattermost_author_icon': 'http://author.icon.url', - 'alert': [], - 'alert_subject': 'Test Mattermost' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = MattermostAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True - - -def test_thehive_alerter(): - rule = {'alert': [], - 'alert_text': '', - 'alert_text_type': 'alert_text_only', - 'description': 'test', - 'hive_alert_config': {'customFields': [{'name': 'test', - 'type': 'string', - 'value': 'test.ip'}], - 'follow': True, - 'severity': 2, - 'source': 'elastalert', - 'status': 'New', - 'tags': ['test.ip'], - 'tlp': 3, - 'type': 'external'}, - 'hive_connection': {'hive_apikey': '', - 'hive_host': 'https://localhost', - 'hive_port': 9000}, - 'hive_observable_data_mapping': [{'ip': 'test.ip'}], - 'name': 'test-thehive', - 'tags': ['a', 'b'], - 'type': 'any'} - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = HiveAlerter(rule) - match = { - "test": { - "ip": "127.0.0.1" - }, - "@timestamp": "2021-05-09T14:43:30", - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - "artifacts": [ - { - "data": "127.0.0.1", - "dataType": "ip", - "message": None, - "tags": [], - "tlp": 2 - } - ], - "customFields": { - "test": { - "order": 0, - "string": "127.0.0.1" - } - }, - "description": "\n\n", - "follow": True, - "severity": 2, - "source": "elastalert", - "status": "New", - "tags": [ - "127.0.0.1" - ], - "title": "test-thehive", - "tlp": 3, - "type": "external" - } - - conn_config = rule['hive_connection'] - alert_url = f"{conn_config['hive_host']}:{conn_config['hive_port']}/api/alert" - mock_post_request.assert_called_once_with( - alert_url, - data=mock.ANY, - headers={'Content-Type': 'application/json', - 'Authorization': 'Bearer '}, - verify=False, - proxies={'http': '', 'https': ''} - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - # The date and sourceRef are autogenerated, so we can't expect them to be a particular value - del actual_data['date'] - del actual_data['sourceRef'] - - assert expected_data == actual_data - - -def test_zabbix_basic(): - rule = { - 'name': 'Basic Zabbix test', - 'type': 'any', - 'alert_text_type': 'alert_text_only', - 'alert': [], - 'alert_subject': 'Test Zabbix', - 'zbx_host': 'example.com', - 'zbx_key': 'example-key' - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = ZabbixAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00Z', - 'somefield': 'foobarbaz' - } - with mock.patch('pyzabbix.ZabbixSender.send') as mock_zbx_send: - alert.alert([match]) - - zabbix_metrics = { - "host": "example.com", - "key": "example-key", - "value": "1", - "clock": 1609459200 - } - alerter_args = mock_zbx_send.call_args.args - assert vars(alerter_args[0][0]) == zabbix_metrics + alert = Alerter(rule) + alertSubject = alert.create_custom_title([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + assert 6 == len(alertSubject) From b8f3ea7009826d888e40a96f19aeb14d187de216 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sat, 22 May 2021 17:03:02 +0100 Subject: [PATCH 0236/1065] Relocate TheHive and DingTalk alerters I moved these out of `alerters.py` but didn't put them into the correct folder the first time around. --- elastalert/{ => alerters}/dingtalk.py | 0 elastalert/{ => alerters}/thehive.py | 0 elastalert/loaders.py | 8 ++++---- tests/alerts_test.py | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) rename elastalert/{ => alerters}/dingtalk.py (100%) rename elastalert/{ => alerters}/thehive.py (100%) diff --git a/elastalert/dingtalk.py b/elastalert/alerters/dingtalk.py similarity index 100% rename from elastalert/dingtalk.py rename to elastalert/alerters/dingtalk.py diff --git a/elastalert/thehive.py b/elastalert/alerters/thehive.py similarity index 100% rename from elastalert/thehive.py rename to elastalert/alerters/thehive.py diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 60824ba62..f74761c93 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -30,8 +30,8 @@ import elastalert.alerters.telegram import elastalert.alerters.twilio import elastalert.alerters.victorops -import elastalert.dingtalk -import elastalert.thehive +import elastalert.alerters.dingtalk +import elastalert.alerters.thehive from . import alerts from . import enhancements from . import ruletypes @@ -108,10 +108,10 @@ class RulesLoader(object): 'post': elastalert.alerters.httppost.HTTPPostAlerter, 'pagertree': elastalert.alerters.pagertree.PagerTreeAlerter, 'linenotify': elastalert.alerters.line.LineNotifyAlerter, - 'hivealerter': elastalert.thehive.HiveAlerter, + 'hivealerter': elastalert.alerters.thehive.HiveAlerter, 'zabbix': ZabbixAlerter, 'discord': elastalert.alerters.discord.DiscordAlerter, - 'dingtalk': elastalert.dingtalk.DingTalkAlerter, + 'dingtalk': elastalert.alerters.dingtalk.DingTalkAlerter, 'chatwork': elastalert.alerters.chatwork.ChatworkAlerter, 'datadog': elastalert.alerters.datadog.DatadogAlerter, 'ses': elastalert.alerters.ses.SesAlerter diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 22a3f84f9..aa9a9a337 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -18,11 +18,11 @@ from elastalert.alerters.chatwork import ChatworkAlerter from elastalert.alerters.command import CommandAlerter from elastalert.alerters.datadog import DatadogAlerter -from elastalert.dingtalk import DingTalkAlerter +from elastalert.alerters.dingtalk import DingTalkAlerter from elastalert.alerters.discord import DiscordAlerter from elastalert.alerters.gitter import GitterAlerter from elastalert.alerters.googlechat import GoogleChatAlerter -from elastalert.thehive import HiveAlerter +from elastalert.alerters.thehive import HiveAlerter from elastalert.alerters.httppost import HTTPPostAlerter from elastalert.alerters.line import LineNotifyAlerter from elastalert.alerters.pagertree import PagerTreeAlerter From 944b112f3dff3676996e1e57ec9d78cda5a0f4f3 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 23 May 2021 03:21:56 +0900 Subject: [PATCH 0237/1065] Fix docs: mattermost_title_link --- docs/source/ruletypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 6e2f6abcb..24bb249d5 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2196,7 +2196,7 @@ Example mattermost_msg_fields:: value: static field short: false -``mattermost_title_link``: You can add a link in your Slack notification by setting this to a valid URL. Requires mattermost_title to be set. Defaults to "". +``mattermost_title_link``: You can add a link in your Mattermost notification by setting this to a valid URL. Defaults to "". ``mattermost_footer``: Add a static footer text for alert. Defaults to "". From 588ed5038ffed13ffa4d070901615a3c1aced72b Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 23 May 2021 04:23:01 +0900 Subject: [PATCH 0238/1065] add docs http post ca certs and ignore ssl http_post_ca_certs http_post_ignore_ssl_errors --- docs/source/ruletypes.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 6e2f6abcb..323d5e1e9 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2018,6 +2018,10 @@ Optional: ``http_post_timeout``: The timeout value, in seconds, for making the post. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. +``http_post_ca_certs``: Set this option to ``True`` if you want to validate the SSL certificate. + +``http_post_ignore_ssl_errors``: By default ElastAlert 2 will verify SSL certificate. Set this option to ``False`` if you want to ignore SSL errors. + Example usage:: alert: post From 9f732fbe0217431c1ade62ac7fbdb9ef4bd5ccd0 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 23 May 2021 04:31:50 +0900 Subject: [PATCH 0239/1065] Bump sphinx from 3.5.4 to 4.0.2 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 52a8cbbe4..c4734e9db 100644 --- a/tox.ini +++ b/tox.ini @@ -25,6 +25,6 @@ norecursedirs = .* virtualenv_run docs build venv env [testenv:docs] deps = {[testenv]deps} - sphinx==3.5.4 + sphinx==4.0.2 changedir = docs commands = sphinx-build -b html -d build/doctrees -W source build/html From 337c35ddbb14c119d212b632841b306c3268cf7c Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 22 May 2021 16:28:40 -0400 Subject: [PATCH 0240/1065] Fix merge conflict. --- tests/alerters/dingtalk_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/alerters/dingtalk_test.py b/tests/alerters/dingtalk_test.py index f91961a86..77f475568 100644 --- a/tests/alerters/dingtalk_test.py +++ b/tests/alerters/dingtalk_test.py @@ -5,7 +5,7 @@ from requests import RequestException from requests.auth import HTTPProxyAuth -from elastalert.dingtalk import DingTalkAlerter +from elastalert.alerters.dingtalk import DingTalkAlerter from elastalert.loaders import FileRulesLoader from elastalert.util import EAException From 073a6f2070165e1b9d0f6fc0599799eef533e002 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 22 May 2021 16:29:06 -0400 Subject: [PATCH 0241/1065] Fix merge conflict. --- tests/alerters/thehive_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/alerters/thehive_test.py b/tests/alerters/thehive_test.py index a2a90e12e..56ccd2c67 100644 --- a/tests/alerters/thehive_test.py +++ b/tests/alerters/thehive_test.py @@ -3,7 +3,7 @@ import mock from elastalert.loaders import FileRulesLoader -from elastalert.thehive import HiveAlerter +from elastalert.alerters.thehive import HiveAlerter def test_thehive_alerter(): From 8a5c9e675aa6c1f06e4af4eed393b24afd7f7877 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 22 May 2021 16:35:53 -0400 Subject: [PATCH 0242/1065] Fix merge conflicts. --- tests/alerts_test.py | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 245efbc34..fab081b8e 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -6,30 +6,6 @@ from elastalert.alerts import Alerter from elastalert.alerts import BasicMatchString -from elastalert.alerters.chatwork import ChatworkAlerter -from elastalert.alerters.command import CommandAlerter -from elastalert.alerters.datadog import DatadogAlerter -from elastalert.alerters.dingtalk import DingTalkAlerter -from elastalert.alerters.discord import DiscordAlerter -from elastalert.alerters.gitter import GitterAlerter -from elastalert.alerters.googlechat import GoogleChatAlerter -from elastalert.alerters.thehive import HiveAlerter -from elastalert.alerters.httppost import HTTPPostAlerter -from elastalert.alerters.line import LineNotifyAlerter -from elastalert.alerters.pagertree import PagerTreeAlerter -from elastalert.alerters.servicenow import ServiceNowAlerter -from elastalert.alerters.telegram import TelegramAlerter -from elastalert.loaders import FileRulesLoader -from elastalert.alerters.jira import JiraAlerter -from elastalert.alerters.jira import JiraFormattedMatchString -from elastalert.alerters.email import EmailAlerter -from elastalert.alerters.mattermost import MattermostAlerter -from elastalert.alerters.opsgenie import OpsGenieAlerter -from elastalert.alerters.pagerduty import PagerDutyAlerter -from elastalert.alerters.slack import SlackAlerter -from elastalert.alerters.teams import MsTeamsAlerter -from elastalert.alerters.zabbix import ZabbixAlerter -from elastalert.alerters.victorops import VictorOpsAlerter from elastalert.util import ts_add From 1ca1130996214708bdce4dba19e46189e71a015b Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sun, 23 May 2021 14:03:26 +0100 Subject: [PATCH 0243/1065] Fix coverage calculation to work with parallelised tests The existing coverage.py doesn't work with xdist, which is used for parallel testing. To fix this, use pytest-cov instead, which can handle parallelised tests. This brings the coverage back up to 69%, which is the same as what coverage.py reports for the refactored files. This is a drop from the previously reported 82%, but as far as I can tell that's entirely due to the refactoring, not due to any files being missed or counted incorrectly. --- requirements-dev.txt | 2 +- tox.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index c94d8dc36..ec51a2c13 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,5 @@ -r requirements.txt -coverage==5.5 +pytest-cov==2.12.0 flake8 m2r2 pluggy>=0.12.0 diff --git a/tox.ini b/tox.ini index 52a8cbbe4..2be38d734 100644 --- a/tox.ini +++ b/tox.ini @@ -5,7 +5,7 @@ envlist = py39,docs [testenv] deps = -rrequirements-dev.txt commands = - coverage run --source=elastalert/,tests/ -m pytest --strict -n 4 {posargs} + pytest --cov=elastalert --strict tests/ -n 4 {posargs} coverage report -m flake8 . From 68f6afd3ed33ba1681e33d303f36333f00594800 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sun, 23 May 2021 14:47:55 +0100 Subject: [PATCH 0244/1065] Fix sphinx dependency version --- requirements-dev.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements-dev.txt b/requirements-dev.txt index c94d8dc36..803ab093f 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,6 @@ -r requirements.txt coverage==5.5 +docutils<0.17 flake8 m2r2 pluggy>=0.12.0 From 1ce1c8b5c9454c120ce0a50bff1c55e48dd96b15 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sun, 23 May 2021 16:01:15 +0100 Subject: [PATCH 0245/1065] Generate the coverage report using pytest-cov - As suggested by @nsano-rururu - Also add branching coverage check for accuracy --- tox.ini | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 810148fdd..32df3ef98 100644 --- a/tox.ini +++ b/tox.ini @@ -5,8 +5,7 @@ envlist = py39,docs [testenv] deps = -rrequirements-dev.txt commands = - pytest --cov=elastalert --strict tests/ -n 4 {posargs} - coverage report -m + pytest --cov=elastalert --cov-report=term-missing --cov-branch --strict tests/ -n 4 {posargs} flake8 . [testenv:lint] From 1d050d090bc8e983400a79563596640130964ba6 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Mon, 24 May 2021 01:46:49 +0900 Subject: [PATCH 0246/1065] Add support for RocketChat --- docs/source/elastalert.rst | 1 + docs/source/ruletypes.rst | 40 +++ elastalert/alerters/rocketchat.py | 98 +++++ elastalert/loaders.py | 5 +- elastalert/schema.yaml | 9 + tests/alerters/rocketchat_test.py | 577 ++++++++++++++++++++++++++++++ 6 files changed, 729 insertions(+), 1 deletion(-) create mode 100644 elastalert/alerters/rocketchat.py create mode 100644 tests/alerters/rocketchat_test.py diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index ba260b33b..b315f321b 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -48,6 +48,7 @@ Currently, we have support built in for these alert types: - OpsGenie - PagerDuty - PagerTree +- RocketChat - Squadcast - ServiceNow - Slack diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 35a4247b9..c96ae7764 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2371,6 +2371,46 @@ Example usage:: - "pagertree" pagertree_integration_url: "PagerTree Integration URL" +RocketChat +~~~~~~~~~~ + +RocketChat alerter will send a notification to a predefined channel. The body of the notification is formatted the same as with other alerters. +https://developer.rocket.chat/api/rest-api/methods/chat/postmessage + +The alerter requires the following option: + +``rocket_chat_webhook_url``: The webhook URL that includes your auth data and the ID of the channel (room) you want to post to. You can use a list of URLs to send to multiple channels. + +Optional: + +``rocket_chat_username_override``: By default RocketChat will use username defined in Integration when posting to the channel. Use this option to change it (free text). + +``rocket_chat_channel_override``: Incoming webhooks have a default channel, but it can be overridden. A public channel can be specified “#other-channel”, and a Direct Message with “@username”. + +``rocket_chat_emoji_override``: By default ElastAlert will use the :ghost: emoji when posting to the channel. You can use a different emoji per +ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If rocket_chat_icon_url_override parameter is provided, emoji is ignored. + +``rocket_chat_msg_color``: By default the alert will be posted with the ‘danger’ color. You can also use ‘good’ or ‘warning’ colors. + +``rocket_chat_text_string``: Notification message you want to add. + +``rocket_chat_proxy``: By default ElastAlert will not use a network proxy to send notifications to RocketChat. Set this option using ``hostname:port`` if you need to use a proxy. + +``slack_alert_fields``: You can add additional fields to your RocketChat alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. + +Example rocket_chat_alert_fields:: + + rocket_chat_alert_fields: + - title: Host + value: monitor.host + short: true + - title: Status + value: monitor.status + short: true + - title: Zone + value: beat.name + short: true + Squadcast ~~~~~~~~~ diff --git a/elastalert/alerters/rocketchat.py b/elastalert/alerters/rocketchat.py new file mode 100644 index 000000000..5b86ce5f1 --- /dev/null +++ b/elastalert/alerters/rocketchat.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +import copy +import json +import requests +from requests.exceptions import RequestException +import warnings + +from ..alerts import Alerter, DateTimeEncoder +from ..util import EAException +from ..util import elastalert_logger +from ..util import lookup_es_key + + +class RocketChatAlerter(Alerter): + """ Creates a RocketChat notification for each alert """ + required_options = set(['rocket_chat_webhook_url']) + + def __init__(self, rule): + super(RocketChatAlerter, self).__init__(rule) + self.rocket_chat_webhook_url = self.rule['rocket_chat_webhook_url'] + if isinstance(self.rocket_chat_webhook_url, str): + self.rocket_chat_webhook_url = [self.rocket_chat_webhook_url] + self.rocket_chat_proxy = self.rule.get('rocket_chat_proxy', None) + + self.rocket_chat_username_override = self.rule.get('rocket_chat_username_override', 'elastalert2') + self.rocket_chat_channel_override = self.rule.get('rocket_chat_channel_override', '') + if isinstance(self.rocket_chat_channel_override, str): + self.rocket_chat_channel_override = [self.rocket_chat_channel_override] + self.rocket_chat_emoji_override = self.rule.get('rocket_chat_emoji_override', ':ghost:') + self.rocket_chat_msg_color = self.rule.get('rocket_chat_msg_color', 'danger') + self.rocket_chat_text_string = self.rule.get('rocket_chat_text_string', '') + self.rocket_chat_alert_fields = self.rule.get('rocket_chat_alert_fields', '') + + def format_body(self, body): + return body + + def get_aggregation_summary_text__maximum_width(self): + width = super(RocketChatAlerter, self).get_aggregation_summary_text__maximum_width() + + # Reduced maximum width for prettier Slack display. + return min(width, 75) + + def get_aggregation_summary_text(self, matches): + text = super(RocketChatAlerter, self).get_aggregation_summary_text(matches) + if text: + text = '```\n{0}```\n'.format(text) + return text + + def populate_fields(self, matches): + alert_fields = [] + for arg in self.rocket_chat_alert_fields: + arg = copy.copy(arg) + arg['value'] = lookup_es_key(matches[0], arg['value']) + alert_fields.append(arg) + return alert_fields + + def alert(self, matches): + body = self.create_alert_body(matches) + body = self.format_body(body) + headers = {'content-type': 'application/json'} + proxies = {'https': self.rocket_chat_proxy} if self.rocket_chat_proxy else None + payload = { + 'username': self.rocket_chat_username_override, + 'text': self.rocket_chat_text_string, + 'attachments': [ + { + 'color': self.rocket_chat_msg_color, + 'title': self.create_title(matches), + 'text': body, + 'fields': [] + } + ] + } + + # if we have defined fields, populate noteable fields for the alert + if self.rocket_chat_alert_fields != '': + payload['attachments'][0]['fields'] = self.populate_fields(matches) + + if self.rocket_chat_emoji_override != '': + payload['emoji'] = self.rocket_chat_emoji_override + + for url in self.rocket_chat_webhook_url: + for channel_override in self.rocket_chat_channel_override: + try: + payload['channel'] = channel_override + response = requests.post( + url, data=json.dumps(payload, cls=DateTimeEncoder), + headers=headers, + proxies=proxies) + warnings.resetwarnings() + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to Rocket.Chat: %s" % e) + elastalert_logger.info("Alert sent to Rocket.Chat") + + def get_info(self): + return {'type': 'rocketchat', + 'rocket_chat_username_override': self.rocket_chat_username_override} diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 60824ba62..ef074b3c6 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -32,6 +32,8 @@ import elastalert.alerters.victorops import elastalert.dingtalk import elastalert.thehive +import elastalert.alerters.rocketchat + from . import alerts from . import enhancements from . import ruletypes @@ -114,7 +116,8 @@ class RulesLoader(object): 'dingtalk': elastalert.dingtalk.DingTalkAlerter, 'chatwork': elastalert.alerters.chatwork.ChatworkAlerter, 'datadog': elastalert.alerters.datadog.DatadogAlerter, - 'ses': elastalert.alerters.ses.SesAlerter + 'ses': elastalert.alerters.ses.SesAlerter, + 'rocketchat': elastalert.alerters.rocketchat.RocketChatAlerter } # A partial ordering of alert types. Relative order will be preserved in the resulting alerts list diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 0d911f70b..33aee2745 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -457,6 +457,15 @@ properties: pagertree_integration_url: {type: string} pagertree_proxy: {type: string} + ### RocketChat + rocket_chat_webhook_url: *arrayOfString + rocket_chat_username_override: {type: string} + rocket_chat_channel_override: {type: string} + rocket_chat_emoji_override: {type: string} + rocket_chat_msg_color: {enum: [good, warning, danger]} + rocket_chat_text_string: {type: string} + rocket_chat_proxy: {type: string} + ### ServiceNow servicenow_rest_url: {type: string} username: {type: string} diff --git a/tests/alerters/rocketchat_test.py b/tests/alerters/rocketchat_test.py new file mode 100644 index 000000000..81335090d --- /dev/null +++ b/tests/alerters/rocketchat_test.py @@ -0,0 +1,577 @@ +import json + +import mock +import pytest +from requests import RequestException + +from elastalert.alerters.rocketchat import RocketChatAlerter +from elastalert.alerts import BasicMatchString +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_rocketchat_uses_custom_title(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert2', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_uses_rule_name_when_custom_title_is_not_provided(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert2', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_username_override(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'test elastalert', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'test elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_chat_channel(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'], + 'rocket_chat_channel_override': '#test-alert', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert2', + 'channel': '#test-alert', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_uses_list_of_custom_rocket_chat_channel(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'], + 'rocket_chat_channel_override': ['#test-alert', '#test-alert2'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data1 = { + 'username': 'elastalert2', + 'channel': '#test-alert', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + expected_data2 = { + 'username': 'elastalert2', + 'channel': '#test-alert2', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_with( + rule['rocket_chat_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data1 == json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data2 == json.loads(mock_post_request.call_args_list[1][1]['data']) + + +def test_rocketchat_emoji_override(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'], + 'rocket_chat_emoji_override': ':shushing_face:', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert2', + 'channel': '', + 'emoji': ':shushing_face:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_msg_color_good(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'elastalert2', + 'rocket_chat_msg_color': 'good', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert2', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'good', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_msg_color_warning(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'elastalert2', + 'rocket_chat_msg_color': 'warning', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert2', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'warning', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_text_string(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'elastalert2', + 'rocket_chat_text_string': 'text str', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert2', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': 'text str' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_proxy(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_proxy': 'http://proxy.url', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert2', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': rule['rocket_chat_proxy']} + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_alert_fields(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'elastalert2', + 'rocket_chat_alert_fields': [ + { + 'title': 'Host', + 'value': 'somefield', + 'short': 'true' + }, + { + 'title': 'Sensors', + 'value': '@timestamp', + 'short': 'true' + } + ], + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert2', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': + [ + { + 'short': 'true', + 'title': 'Host', + 'value': 'foobarbaz' + }, + { + 'short': 'true', + 'title': 'Sensors', + 'value': '2021-01-01T00:00:00' + } + ], + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_required_options_key_error(): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post'): + alert.alert([match]) + except KeyError: + assert True + + +def test_rocketchat_msg_color_key_error(): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_msg_color': 'abc', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post'): + alert.alert([match]) + except KeyError: + assert True + + +def test_rocketchat_ea_exception(): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'elastalert2', + 'rocket_chat_msg_pretext': 'pretext value', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True From 2a7af8f6a453e57f046efabda1f312b06274c5ed Mon Sep 17 00:00:00 2001 From: Naoyuki Sano Date: Mon, 24 May 2021 01:57:08 +0900 Subject: [PATCH 0247/1065] Update ruletypes.rst --- docs/source/ruletypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index c96ae7764..79d7f0056 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2396,7 +2396,7 @@ ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . ``rocket_chat_proxy``: By default ElastAlert will not use a network proxy to send notifications to RocketChat. Set this option using ``hostname:port`` if you need to use a proxy. -``slack_alert_fields``: You can add additional fields to your RocketChat alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. +```rocket_chat_alert_fields``: You can add additional fields to your RocketChat alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. Example rocket_chat_alert_fields:: From 353da32e64e999d75aeb6ad4bb3027ad22942c26 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Mon, 24 May 2021 04:52:01 +0900 Subject: [PATCH 0248/1065] Fix docs:rocket_chat_emoji_override --- docs/source/ruletypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 79d7f0056..ed5d36585 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2388,7 +2388,7 @@ Optional: ``rocket_chat_channel_override``: Incoming webhooks have a default channel, but it can be overridden. A public channel can be specified “#other-channel”, and a Direct Message with “@username”. ``rocket_chat_emoji_override``: By default ElastAlert will use the :ghost: emoji when posting to the channel. You can use a different emoji per -ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If rocket_chat_icon_url_override parameter is provided, emoji is ignored. +ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . ``rocket_chat_msg_color``: By default the alert will be posted with the ‘danger’ color. You can also use ‘good’ or ‘warning’ colors. From 56b6b95c01bbe1ef8b90ce9ca4c4cb735609ec9a Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Mon, 24 May 2021 06:53:17 +0900 Subject: [PATCH 0249/1065] fix typo Rocket.Chat --- docs/source/elastalert.rst | 2 +- docs/source/ruletypes.rst | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index b315f321b..bdd1071e8 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -48,7 +48,7 @@ Currently, we have support built in for these alert types: - OpsGenie - PagerDuty - PagerTree -- RocketChat +- Rocket.Chat - Squadcast - ServiceNow - Slack diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index ed5d36585..b41387bfa 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2371,10 +2371,10 @@ Example usage:: - "pagertree" pagertree_integration_url: "PagerTree Integration URL" -RocketChat -~~~~~~~~~~ +Rocket.Chat +~~~~~~~~~~~ -RocketChat alerter will send a notification to a predefined channel. The body of the notification is formatted the same as with other alerters. +Rocket.Chat alerter will send a notification to a predefined channel. The body of the notification is formatted the same as with other alerters. https://developer.rocket.chat/api/rest-api/methods/chat/postmessage The alerter requires the following option: From c57bf4522b2ff33399c80e8a0f5906762a2ac7a3 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Mon, 24 May 2021 06:59:59 +0900 Subject: [PATCH 0250/1065] pytest form 6.0.0 to 6.1.2 --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 05ab898cf..3da2884fd 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,7 +6,7 @@ m2r2 pluggy>=0.12.0 pre-commit pylint<2.9 -pytest==6.0.0 +pytest==6.1.2 pytest-xdist==2.2.1 setuptools sphinx_rtd_theme From c01e485cb1c21165b825593e0e6ef2a65ecb3b54 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Tue, 25 May 2021 07:45:38 -0400 Subject: [PATCH 0251/1065] Reword ISSUE_TEMPLATE --- .github/ISSUE_TEMPLATE | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE b/.github/ISSUE_TEMPLATE index 81f621e05..cca969eb0 100644 --- a/.github/ISSUE_TEMPLATE +++ b/.github/ISSUE_TEMPLATE @@ -1,3 +1,3 @@ -PLEASE READ: The majority of topics are better suited for the Discussion forum. You can access this area by clicking The Discussions link above. Please search the discussions area first, for keywords that could be associated with the problem you are experiencing. If you do not see an existing discussion, please open a new discussion and include sufficient details for someone in the community to help you. +STOP - You are probably in the wrong place! The majority of topics are better suited for the Discussion forum. You can access this area by clicking The Discussions link above. Please search the discussions area first, for keywords that could be associated with the problem you are experiencing. If you do not see an existing discussion, please open a new discussion and include sufficient details for someone in the community to help you. -If you are confident you have discovered a legitimate issue, attach logs and reproduction steps to this issue. Failure to provide sufficient information will likely cause this issue to go stale and eventually be deleted. \ No newline at end of file +If you are confident you have discovered a legitimate issue, attach logs and reproduction steps to this issue. Failure to provide sufficient information will likely cause this issue to go stale and eventually be deleted. From 7ae1161c8d5b1b7efb22fe14dba968a49d943ac8 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Wed, 26 May 2021 12:14:08 -0400 Subject: [PATCH 0252/1065] Add optional configuration values for scheduler max_threads and misfire_grace_time --- docs/source/elastalert.rst | 4 ++++ elastalert/elastalert.py | 11 ++++++++++- elastalert/schema.yaml | 2 ++ 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index bdd1071e8..f5884b972 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -164,6 +164,8 @@ it ran the query for a given rule, and periodically query from that time until t this field is a nested unit of time, such as ``minutes: 5``. This is how time is defined in every ElastAlert configuration. +``misfire_grace_time``: If the rule scheduler is running behind, due to large numbers of rules or long-running rules, this grace time settings allows a rule to still be executed, provided its next scheduled runt time is no more than this grace period, in seconds, overdue. The default is 5 seconds. + ``writeback_index``: The index on ``es_host`` to use. ``max_query_size``: The maximum number of documents that will be downloaded from Elasticsearch in a single query. The @@ -174,6 +176,8 @@ using the size of ``max_query_size`` through the set amount of pages, when ``max ``max_scrolling_count``: The maximum amount of pages to scroll through. The default is ``0``, which means the scrolling has no limit. For example if this value is set to ``5`` and the ``max_query_size`` is set to ``10000`` then ``50000`` documents will be downloaded at most. +``max_threads``: The maximum number of concurrent threads available to process scheduled rules. Large numbers of long-running rules may require this value be increased, though this could overload the Elasticsearch cluster if too many complex queries are running concurrently. Default is 10. + ``scroll_keepalive``: The maximum time (formatted in `Time Units `_) the scrolling context should be kept alive. Avoid using high values as it abuses resources in Elasticsearch, but be mindful to allow sufficient time to finish processing all the results. ``max_aggregation``: The maximum number of alerts to aggregate together. If a rule has ``aggregation`` set, all diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index d9a4a4e77..e30f991c1 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -22,6 +22,7 @@ import dateutil.tz import pytz from apscheduler.schedulers.background import BackgroundScheduler +from apscheduler.executors.pool import ThreadPoolExecutor from croniter import croniter from elasticsearch.exceptions import ConnectionError from elasticsearch.exceptions import ElasticsearchException @@ -171,7 +172,15 @@ def __init__(self, args): self.thread_data.alerts_sent = 0 self.thread_data.num_hits = 0 self.thread_data.num_dupes = 0 - self.scheduler = BackgroundScheduler() + executors = { + 'default': ThreadPoolExecutor(max_workers=self.conf.get('max_threads', 10)), + } + job_defaults = { + 'misfire_grace_time': self.conf.get('misfire_grace_time', 5), + 'coalesce': True, + 'max_instances': 1 + } + self.scheduler = BackgroundScheduler(executors=executors, job_defaults=job_defaults) self.string_multi_field_name = self.conf.get('string_multi_field_name', False) self.statsd_instance_tag = self.conf.get('statsd_instance_tag', '') self.statsd_host = self.conf.get('statsd_host', '') diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 33aee2745..db2f52486 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -198,6 +198,8 @@ properties: query_delay: *timeframe max_query_size: {type: integer} max_scrolling: {type: integer} + max_threads: {type: integer} + misfire_grace_time: {type: integer} owner: {type: string} priority: {type: integer} From 0cf1075a9ec5df5a45fb07f190621b4831645212 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Wed, 26 May 2021 12:16:06 -0400 Subject: [PATCH 0253/1065] Add changelog entry --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cd53aa35a..cf3e0ed19 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,7 +15,7 @@ - None ## New features -- None +- Expose rule scheduler properties as configurable settings - [#192](https://github.com/jertel/elastalert2/pull/192) - #jertel ## Other changes - Speed up unit tests by adding default parallelism - [164](https://github.com/jertel/elastalert2/pull/164) - @ferozsalam From d82c7c0709ca804193373b37f312322a5a4fe450 Mon Sep 17 00:00:00 2001 From: LaZyDK Date: Thu, 27 May 2021 08:29:39 +0200 Subject: [PATCH 0254/1065] Update thehive.py Do not add empty observables. --- elastalert/alerters/thehive.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/elastalert/alerters/thehive.py b/elastalert/alerters/thehive.py index 9910398eb..3ad47be19 100644 --- a/elastalert/alerters/thehive.py +++ b/elastalert/alerters/thehive.py @@ -35,12 +35,13 @@ def load_observable_artifacts(self, match: dict): for mapping in self.rule.get('hive_observable_data_mapping', []): for observable_type, mapping_key in mapping.items(): data = self.lookup_field(match, mapping_key, '') - artifact = {'tlp': 2, - 'tags': [], - 'message': None, - 'dataType': observable_type, - 'data': data} - artifacts.append(artifact) + if len(data) != 0: + artifact = {'tlp': 2, + 'tags': [], + 'message': None, + 'dataType': observable_type, + 'data': data} + artifacts.append(artifact) return artifacts From 5b02fc27a8e206b70c71a9082bf29ffbe8d43744 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sat, 29 May 2021 17:21:47 +0100 Subject: [PATCH 0255/1065] Refactor to absolute imports and add linter for the future PEP-8 recommends absolute imports (https://www.python.org/dev/peps/pep-0008/#imports) and there's no reason for ElastAlert not to be using them. It also helps makes the import structure clearer. I have also added a linter to automatically check for this in the future. Where possible I have also tidied the imports, removing uneccessary multi-line `from` statements that import from the same file. --- elastalert/alerters/email.py | 6 ++--- elastalert/alerters/jira.py | 16 ++++++------- elastalert/alerters/mattermost.py | 7 ++---- elastalert/alerters/opsgenie.py | 7 ++---- elastalert/alerters/rocketchat.py | 6 ++--- elastalert/alerters/slack.py | 7 ++---- elastalert/alerters/sns.py | 4 ++-- elastalert/alerters/teams.py | 6 ++--- elastalert/alerters/zabbix.py | 4 ++-- elastalert/alerts.py | 6 ++--- elastalert/config.py | 6 ++--- elastalert/create_index.py | 2 +- elastalert/elastalert.py | 39 +++++++++---------------------- elastalert/enhancements.py | 2 +- elastalert/loaders.py | 38 ++++++++++++------------------ elastalert/ruletypes.py | 16 +++---------- elastalert/util.py | 4 ++-- requirements-dev.txt | 1 + 18 files changed, 63 insertions(+), 114 deletions(-) diff --git a/elastalert/alerters/email.py b/elastalert/alerters/email.py index c921373fa..896647982 100644 --- a/elastalert/alerters/email.py +++ b/elastalert/alerters/email.py @@ -1,9 +1,7 @@ import os -from ..alerts import Alerter -from ..util import elastalert_logger -from ..util import lookup_es_key -from ..util import EAException +from elastalert.alerts import Alerter +from elastalert.util import elastalert_logger, lookup_es_key, EAException from email.mime.text import MIMEText from email.mime.multipart import MIMEMultipart from email.mime.image import MIMEImage diff --git a/elastalert/alerters/jira.py b/elastalert/alerters/jira.py index 4614ea40a..eaae11ee2 100644 --- a/elastalert/alerters/jira.py +++ b/elastalert/alerters/jira.py @@ -1,14 +1,14 @@ import datetime import sys -from ..alerts import Alerter -from ..alerts import BasicMatchString -from ..util import elastalert_logger -from ..util import lookup_es_key -from ..util import pretty_ts -from ..util import ts_now -from ..util import ts_to_dt -from ..util import EAException +from elastalert.alerts import Alerter +from elastalert.alerts import BasicMatchString +from elastalert.util import elastalert_logger +from elastalert.util import lookup_es_key +from elastalert.util import pretty_ts +from elastalert.util import ts_now +from elastalert.util import ts_to_dt +from elastalert.util import EAException from jira.client import JIRA from jira.exceptions import JIRAError diff --git a/elastalert/alerters/mattermost.py b/elastalert/alerters/mattermost.py index 56f4c860c..1ed7c56b5 100644 --- a/elastalert/alerters/mattermost.py +++ b/elastalert/alerters/mattermost.py @@ -3,11 +3,8 @@ import requests import warnings -from ..alerts import Alerter -from ..alerts import DateTimeEncoder -from ..util import elastalert_logger -from ..util import lookup_es_key -from ..util import EAException +from elastalert.alerts import Alerter, DateTimeEncoder +from elastalert.util import elastalert_logger, lookup_es_key, EAException from requests import RequestException diff --git a/elastalert/alerters/opsgenie.py b/elastalert/alerters/opsgenie.py index 7b3ac7027..b916446df 100644 --- a/elastalert/alerters/opsgenie.py +++ b/elastalert/alerters/opsgenie.py @@ -3,11 +3,8 @@ import os.path import requests -from ..alerts import Alerter -from ..alerts import BasicMatchString -from ..util import EAException -from ..util import elastalert_logger -from ..util import lookup_es_key +from elastalert.alerts import Alerter, BasicMatchString +from elastalert.util import EAException, elastalert_logger, lookup_es_key class OpsGenieAlerter(Alerter): diff --git a/elastalert/alerters/rocketchat.py b/elastalert/alerters/rocketchat.py index 5b86ce5f1..00da58cce 100644 --- a/elastalert/alerters/rocketchat.py +++ b/elastalert/alerters/rocketchat.py @@ -5,10 +5,8 @@ from requests.exceptions import RequestException import warnings -from ..alerts import Alerter, DateTimeEncoder -from ..util import EAException -from ..util import elastalert_logger -from ..util import lookup_es_key +from elastalert.alerts import Alerter, DateTimeEncoder +from elastalert.util import EAException, elastalert_logger, lookup_es_key class RocketChatAlerter(Alerter): diff --git a/elastalert/alerters/slack.py b/elastalert/alerters/slack.py index 578128e95..97902ac69 100644 --- a/elastalert/alerters/slack.py +++ b/elastalert/alerters/slack.py @@ -3,11 +3,8 @@ import requests import warnings -from ..alerts import Alerter -from ..alerts import DateTimeEncoder -from ..util import elastalert_logger -from ..util import lookup_es_key -from ..util import EAException +from elastalert.alerts import Alerter, DateTimeEncoder +from elastalert.util import elastalert_logger, EAException, lookup_es_key from requests.exceptions import RequestException diff --git a/elastalert/alerters/sns.py b/elastalert/alerters/sns.py index 06bc7b835..e77eeb48e 100644 --- a/elastalert/alerters/sns.py +++ b/elastalert/alerters/sns.py @@ -1,7 +1,7 @@ import boto3 -from ..alerts import Alerter -from ..util import elastalert_logger +from elastalert.alerts import Alerter +from elastalert.util import elastalert_logger class SnsAlerter(Alerter): diff --git a/elastalert/alerters/teams.py b/elastalert/alerters/teams.py index c242982a9..201159bab 100644 --- a/elastalert/alerters/teams.py +++ b/elastalert/alerters/teams.py @@ -1,10 +1,8 @@ import json import requests -from ..alerts import Alerter -from ..alerts import DateTimeEncoder -from ..util import EAException -from ..util import elastalert_logger +from elastalert.alerts import Alerter, DateTimeEncoder +from elastalert.util import EAException, elastalert_logger from requests.exceptions import RequestException diff --git a/elastalert/alerters/zabbix.py b/elastalert/alerters/zabbix.py index 1ec9eab09..533ac7ef4 100644 --- a/elastalert/alerters/zabbix.py +++ b/elastalert/alerters/zabbix.py @@ -2,8 +2,8 @@ from pyzabbix import ZabbixSender, ZabbixMetric, ZabbixAPI -from ..alerts import Alerter -from ..util import elastalert_logger, EAException +from elastalert.alerts import Alerter +from elastalert.util import elastalert_logger, EAException class ZabbixClient(ZabbixAPI): diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 194667ef5..15bb5fdc1 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -5,9 +5,9 @@ from texttable import Texttable -from .util import EAException -from .util import lookup_es_key -from .yaml import read_yaml +from elastalert.util import EAException +from elastalert.util import lookup_es_key +from elastalert.yaml import read_yaml class DateTimeEncoder(json.JSONEncoder): diff --git a/elastalert/config.py b/elastalert/config.py index 1c1669448..de6ae77e1 100644 --- a/elastalert/config.py +++ b/elastalert/config.py @@ -5,10 +5,8 @@ from envparse import Env -from . import loaders -from .util import EAException -from .util import elastalert_logger -from .util import get_module +from elastalert import loaders +from elastalert.util import EAException, elastalert_logger, get_module from elastalert.yaml import read_yaml diff --git a/elastalert/create_index.py b/elastalert/create_index.py index 3664eb362..f8b4ecbbe 100644 --- a/elastalert/create_index.py +++ b/elastalert/create_index.py @@ -14,7 +14,7 @@ from elasticsearch.exceptions import NotFoundError from envparse import Env -from .auth import Auth +from elastalert.auth import Auth env = Env(ES_USE_SSL=bool) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index e30f991c1..74a3d9fef 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -28,36 +28,19 @@ from elasticsearch.exceptions import ElasticsearchException from elasticsearch.exceptions import NotFoundError from elasticsearch.exceptions import TransportError -from .prometheus_wrapper import PrometheusWrapper -from . import kibana +from elastalert import kibana from elastalert.alerters.debug import DebugAlerter -from .config import load_conf -from .enhancements import DropMatchException -from .kibana_discover import generate_kibana_discover_url -from .ruletypes import FlatlineRule -from .util import add_raw_postfix -from .util import cronite_datetime_to_timestamp -from .util import dt_to_ts -from .util import dt_to_unix -from .util import EAException -from .util import elastalert_logger -from .util import elasticsearch_client -from .util import format_index -from .util import lookup_es_key -from .util import parse_deadline -from .util import parse_duration -from .util import pretty_ts -from .util import replace_dots_in_field_names -from .util import seconds -from .util import set_es_key -from .util import should_scrolling_continue -from .util import total_seconds -from .util import ts_add -from .util import ts_now -from .util import ts_to_dt -from .util import unix_to_dt -from .util import ts_utc_to_tz +from elastalert.config import load_conf +from elastalert.enhancements import DropMatchException +from elastalert.kibana_discover import generate_kibana_discover_url +from elastalert.prometheus_wrapper import PrometheusWrapper +from elastalert.ruletypes import FlatlineRule +from elastalert.util import (add_raw_postfix, cronite_datetime_to_timestamp, dt_to_ts, dt_to_unix, EAException, + elastalert_logger, elasticsearch_client, format_index, lookup_es_key, parse_deadline, + parse_duration, pretty_ts, replace_dots_in_field_names, seconds, set_es_key, + should_scrolling_continue, total_seconds, ts_add, ts_now, ts_to_dt, unix_to_dt, + ts_utc_to_tz) class ElastAlerter(object): diff --git a/elastalert/enhancements.py b/elastalert/enhancements.py index 6cc1cdd57..bad17875d 100644 --- a/elastalert/enhancements.py +++ b/elastalert/enhancements.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -from .util import pretty_ts +from elastalert.util import pretty_ts class BaseEnhancement(object): diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 99c722604..081a0e5aa 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -34,30 +34,22 @@ import elastalert.alerters.thehive import elastalert.alerters.rocketchat -from . import alerts -from . import enhancements -from . import ruletypes -from .alerters.email import EmailAlerter -from .alerters.jira import JiraAlerter -from .alerters.mattermost import MattermostAlerter -from .alerters.opsgenie import OpsGenieAlerter +from elastalert import alerts +from elastalert import enhancements +from elastalert import ruletypes +from elastalert.alerters.email import EmailAlerter +from elastalert.alerters.jira import JiraAlerter +from elastalert.alerters.mattermost import MattermostAlerter +from elastalert.alerters.opsgenie import OpsGenieAlerter from elastalert.alerters.pagerduty import PagerDutyAlerter -from .alerters.teams import MsTeamsAlerter -from .alerters.slack import SlackAlerter -from .alerters.sns import SnsAlerter -from .alerters.zabbix import ZabbixAlerter -from .util import dt_to_ts -from .util import dt_to_ts_with_format -from .util import dt_to_unix -from .util import dt_to_unixms -from .util import EAException -from .util import elastalert_logger -from .util import get_module -from .util import ts_to_dt -from .util import ts_to_dt_with_format -from .util import unix_to_dt -from .util import unixms_to_dt -from .yaml import read_yaml +from elastalert.alerters.teams import MsTeamsAlerter +from elastalert.alerters.slack import SlackAlerter +from elastalert.alerters.sns import SnsAlerter +from elastalert.alerters.zabbix import ZabbixAlerter +from elastalert.util import dt_to_ts +from elastalert.util import (dt_to_ts_with_format, dt_to_unix, dt_to_unixms, EAException, elastalert_logger, get_module, + ts_to_dt, ts_to_dt_with_format, unix_to_dt, unixms_to_dt) +from elastalert.yaml import read_yaml class RulesLoader(object): diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 42fcc95b7..fce33f50b 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -5,19 +5,9 @@ from sortedcontainers import SortedKeyList as sortedlist -from .util import add_raw_postfix -from .util import dt_to_ts -from .util import EAException -from .util import elastalert_logger -from .util import elasticsearch_client -from .util import format_index -from .util import hashable -from .util import lookup_es_key -from .util import new_get_event_ts -from .util import pretty_ts -from .util import total_seconds -from .util import ts_now -from .util import ts_to_dt +from elastalert.util import (add_raw_postfix, dt_to_ts, EAException, elastalert_logger, elasticsearch_client, + format_index, hashable, lookup_es_key, new_get_event_ts, pretty_ts, total_seconds, + ts_now, ts_to_dt) class RuleType(object): diff --git a/elastalert/util.py b/elastalert/util.py index b512b092c..45d9cbb2a 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -10,8 +10,8 @@ import pytz from six import string_types -from . import ElasticSearchClient -from .auth import Auth +from elastalert import ElasticSearchClient +from elastalert.auth import Auth logging.basicConfig() elastalert_logger = logging.getLogger('elastalert') diff --git a/requirements-dev.txt b/requirements-dev.txt index 3da2884fd..f3d183c22 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,6 +2,7 @@ docutils<0.17 pytest-cov==2.12.0 flake8 +flake8-absolute-import m2r2 pluggy>=0.12.0 pre-commit From 3cda876df6450ebd77939eb491d16861f79b0522 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sat, 29 May 2021 17:42:29 +0100 Subject: [PATCH 0256/1065] Tidy a couple more imports --- elastalert/alerts.py | 3 +-- elastalert/loaders.py | 4 +--- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 15bb5fdc1..2525a0ce3 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -5,8 +5,7 @@ from texttable import Texttable -from elastalert.util import EAException -from elastalert.util import lookup_es_key +from elastalert.util import EAException, lookup_es_key from elastalert.yaml import read_yaml diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 081a0e5aa..132d3c1bb 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -34,9 +34,7 @@ import elastalert.alerters.thehive import elastalert.alerters.rocketchat -from elastalert import alerts -from elastalert import enhancements -from elastalert import ruletypes +from elastalert import alerts, enhancements, ruletypes from elastalert.alerters.email import EmailAlerter from elastalert.alerters.jira import JiraAlerter from elastalert.alerters.mattermost import MattermostAlerter From 27fd31d5a065f3197c312e6b1b7a66ba0fccd623 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sat, 29 May 2021 17:47:28 +0100 Subject: [PATCH 0257/1065] Tidy Jira alerter imports --- elastalert/alerters/jira.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/elastalert/alerters/jira.py b/elastalert/alerters/jira.py index eaae11ee2..d4659bd8c 100644 --- a/elastalert/alerters/jira.py +++ b/elastalert/alerters/jira.py @@ -3,12 +3,8 @@ from elastalert.alerts import Alerter from elastalert.alerts import BasicMatchString -from elastalert.util import elastalert_logger -from elastalert.util import lookup_es_key -from elastalert.util import pretty_ts -from elastalert.util import ts_now -from elastalert.util import ts_to_dt -from elastalert.util import EAException +from elastalert.util import (elastalert_logger, lookup_es_key, pretty_ts, ts_now, + ts_to_dt, EAException) from jira.client import JIRA from jira.exceptions import JIRAError From 553c4c60526724da019d9ff285bef9c7f7c7f5bb Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 31 May 2021 05:10:42 +0100 Subject: [PATCH 0258/1065] Fix Zabbix test file name Thanks to @nsano-rururu for spotting this --- tests/alerters/{zabbix_text.py => zabbix_test.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/alerters/{zabbix_text.py => zabbix_test.py} (100%) diff --git a/tests/alerters/zabbix_text.py b/tests/alerters/zabbix_test.py similarity index 100% rename from tests/alerters/zabbix_text.py rename to tests/alerters/zabbix_test.py From 58abd89813c64feaef3b5b437f4bbcec358742e2 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 31 May 2021 06:18:55 +0100 Subject: [PATCH 0259/1065] Fix minor overflow in documentation --- docs/source/running_elastalert.rst | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index 443ac0516..268abfccf 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -182,13 +182,17 @@ Let's break down the response to see what's happening. ``Queried rule Example rule from 1-15 14:22 PST to 1-15 15:07 PST: 5 hits`` -ElastAlert periodically queries the most recent ``buffer_time`` (default 45 minutes) for data matching the filters. Here we see that it matched 5 hits. +ElastAlert periodically queries the most recent ``buffer_time`` (default 45 minutes) for data matching the filters. Here we see that it matched 5 hits: -``POST http://elasticsearch.example.com:14900/elastalert_status/elastalert_status?op_type=create [status:201 request:0.025s]`` +.. code-block:: -This line showing that ElastAlert uploaded a document to the elastalert_status index with information about the query it just made. + POST http://elasticsearch.example.com:14900/elastalert_status/elastalert_status?op_type=create [status:201 request:0.025s] -``Ran Example rule from 1-15 14:22 PST to 1-15 15:07 PST: 5 query hits (0 already seen), 0 matches, 0 alerts sent`` +This line showing that ElastAlert uploaded a document to the elastalert_status index with information about the query it just made: + +.. code-block:: + + Ran Example rule from 1-15 14:22 PST to 1-15 15:07 PST: 5 query hits (0 already seen), 0 matches, 0 alerts sent The line means ElastAlert has finished processing the rule. For large time periods, sometimes multiple queries may be run, but their data will be processed together. ``query hits`` is the number of documents that are downloaded from Elasticsearch, ``already seen`` refers to documents that were already counted in a previous overlapping query and will be ignored, ``matches`` is the number of matches the rule type outputted, and ``alerts sent`` is the number of alerts actually sent. This may differ from ``matches`` because of options like ``realert`` and ``aggregation`` or because of an error. @@ -215,8 +219,9 @@ The body of the email will contain something like:: If an error occurred, such as an unreachable SMTP server, you may see: +.. code-block:: -``ERROR:root:Error while running alert email: Error connecting to SMTP host: [Errno 61] Connection refused`` + ERROR:root:Error while running alert email: Error connecting to SMTP host: [Errno 61] Connection refused Note that if you stop ElastAlert and then run it again later, it will look up ``elastalert_status`` and begin querying From af8bd42a631765c5252c44b0326c8b9b1e2ccde1 Mon Sep 17 00:00:00 2001 From: LaZyDK Date: Mon, 31 May 2021 11:45:09 +0200 Subject: [PATCH 0260/1065] Force tags as strings As explained in issue #201 --- elastalert/alerters/thehive.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/elastalert/alerters/thehive.py b/elastalert/alerters/thehive.py index 3ad47be19..b4a75490a 100644 --- a/elastalert/alerters/thehive.py +++ b/elastalert/alerters/thehive.py @@ -66,9 +66,9 @@ def load_tags(self, tag_names: list, match: dict): tag_value = self.lookup_field(match, tag, tag) if isinstance(tag_value, list): for sub_tag in tag_value: - tag_values.add(sub_tag) + tag_values.add(str(sub_tag)) else: - tag_values.add(tag_value) + tag_values.add(str(tag_value)) return tag_values From ef8f626afe704e79d10c55be263bb99e005fdc52 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Mon, 31 May 2021 08:17:02 -0400 Subject: [PATCH 0261/1065] Updated thehive alerter unit test to prove the fix works --- tests/alerters/thehive_test.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/alerters/thehive_test.py b/tests/alerters/thehive_test.py index 56ccd2c67..0a61dc32a 100644 --- a/tests/alerters/thehive_test.py +++ b/tests/alerters/thehive_test.py @@ -18,7 +18,7 @@ def test_thehive_alerter(): 'severity': 2, 'source': 'elastalert', 'status': 'New', - 'tags': ['test.ip'], + 'tags': ['test.port'], 'tlp': 3, 'type': 'external'}, 'hive_connection': {'hive_apikey': '', @@ -33,7 +33,8 @@ def test_thehive_alerter(): alert = HiveAlerter(rule) match = { "test": { - "ip": "127.0.0.1" + "ip": "127.0.0.1", + "port": 9876 }, "@timestamp": "2021-05-09T14:43:30", } @@ -62,7 +63,7 @@ def test_thehive_alerter(): "source": "elastalert", "status": "New", "tags": [ - "127.0.0.1" + "9876" ], "title": "test-thehive", "tlp": 3, From 83504aec8977a83564a6c17ae1fdbad12043d2e7 Mon Sep 17 00:00:00 2001 From: Vincent Bisserie Date: Wed, 2 Jun 2021 11:17:23 +0200 Subject: [PATCH 0262/1065] Add ApiKey authentication Signed-off-by: Vincent Bisserie --- .pre-commit-config.yaml | 4 ++-- docs/source/elastalert.rst | 2 ++ docs/source/ruletypes.rst | 17 +++++++++++------ elastalert/config.py | 6 ++++-- elastalert/create_index.py | 5 +++++ elastalert/util.py | 17 +++++++++++++++-- 6 files changed, 39 insertions(+), 12 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cf58a0ac6..059f7314e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,5 @@ repos: -- repo: git://github.com/pre-commit/pre-commit-hooks +- repo: https://github.com/pre-commit/pre-commit-hooks sha: v1.1.1 hooks: - id: trailing-whitespace @@ -13,7 +13,7 @@ repos: - id: debug-statements - id: requirements-txt-fixer - id: name-tests-test -- repo: git://github.com/asottile/reorder_python_imports +- repo: https://github.com/asottile/reorder_python_imports sha: v0.3.5 hooks: - id: reorder-python-imports diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index f5884b972..69c662217 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -144,6 +144,8 @@ The environment variable ``ES_USE_SSL`` will override this field. ``es_password``: Optional; basic-auth password for connecting to ``es_host``. The environment variable ``ES_PASSWORD`` will override this field. +``es_api_key``: Optional; Base64 api-key token for connecting to ``es_host``. The environment variable ``ES_API_KEY`` will override this field. + ``es_url_prefix``: Optional; URL prefix for the Elasticsearch endpoint. The environment variable ``ES_URL_PREFIX`` will override this field. ``es_send_get_body_as``: Optional; Method for querying Elasticsearch - ``GET``, ``POST`` or ``source``. The default is ``GET`` diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index b41387bfa..bc88fe83e 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -50,7 +50,7 @@ Rule Configuration Cheat Sheet +--------------------------------------------------------------+ | | ``es_send_get_body_as`` (string, default "GET") | | +--------------------------------------------------------------+ | -| ``aggregation`` (time, no default) | | +| ``aggregation`` (time, no default) | | +--------------------------------------------------------------+ | | ``limit_execution`` (string, no default) | | +--------------------------------------------------------------+ | @@ -301,6 +301,11 @@ es_bearer ``es_bearer``: bearer-token authorization for connecting to ``es_host``. (Optional, string, no default) The environment variable ``ES_BEARER`` will override this field. +es_api_key +^^^^^^^^^^^ + +``es_api_key``: api-key-token authorization for connecting to ``es_host``. (Optional, base64 string, no default) The environment variable ``ES_API_KEY`` will override this field. + es_url_prefix ^^^^^^^^^^^^^ @@ -735,7 +740,7 @@ Some rules and alerts require additional options, which also go in the top level query_timezone ^^^^^^^^^^^^^^ -``query_timezone``: Whether to convert UTC time to the specified time zone in rule queries. +``query_timezone``: Whether to convert UTC time to the specified time zone in rule queries. If not set, start and end time of query will be used UTC. (Optional, string, default empty string) Example value : query_timezone: "Europe/Istanbul" @@ -1577,7 +1582,7 @@ AWS SES requires one option: ``ses_email``: An address or list of addresses to sent the alert to. -``ses_from_addr``: This sets the From header in the email. +``ses_from_addr``: This sets the From header in the email. Optional: @@ -1589,7 +1594,7 @@ Optional: ``ses_aws_profile``: The AWS profile to use. If none specified, the default will be used. -``ses_email_reply_to``: This sets the Reply-To header in the email. +``ses_email_reply_to``: This sets the Reply-To header in the email. ``ses_cc``: This adds the CC emails to the list of recipients. By default, this is left empty. @@ -1655,7 +1660,7 @@ Example When not using aws_profile usage:: sns_aws_access_key_id: 'XXXXXXXXXXXXXXXXXX'' sns_aws_secret_access_key: 'YYYYYYYYYYYYYYYYYYYY' sns_aws_region: 'us-east-1' # You must nest aws_region within your alert configuration so it is not used to sign AWS requests. - + Example When to use aws_profile usage:: # Create ~/.aws/credentials @@ -1742,7 +1747,7 @@ Example usage using new-style format:: Datadog ~~~~~~~ -This alert will create a [Datadog Event](https://docs.datadoghq.com/events/). Events are limited to 4000 characters. If an event is sent that contains +This alert will create a [Datadog Event](https://docs.datadoghq.com/events/). Events are limited to 4000 characters. If an event is sent that contains a message that is longer than 4000 characters, only his first 4000 characters will be displayed. This alert requires two additional options: diff --git a/elastalert/config.py b/elastalert/config.py index de6ae77e1..b6c1fd45e 100644 --- a/elastalert/config.py +++ b/elastalert/config.py @@ -6,8 +6,9 @@ from envparse import Env from elastalert import loaders -from elastalert.util import EAException, elastalert_logger, get_module - +from elastalert.util import EAException +from elastalert.util import elastalert_logger +from elastalert.util import get_module from elastalert.yaml import read_yaml # Required global (config.yaml) configuration options @@ -18,6 +19,7 @@ 'ES_BEARER': 'es_bearer', 'ES_PASSWORD': 'es_password', 'ES_USERNAME': 'es_username', + 'ES_API_KEY': 'es_api_key', 'ES_HOST': 'es_host', 'ES_PORT': 'es_port', 'ES_URL_PREFIX': 'es_url_prefix', diff --git a/elastalert/create_index.py b/elastalert/create_index.py index f8b4ecbbe..d4dd08cb5 100644 --- a/elastalert/create_index.py +++ b/elastalert/create_index.py @@ -153,6 +153,7 @@ def main(): parser.add_argument('--username', default=os.environ.get('ES_USERNAME', None), help='Elasticsearch username') parser.add_argument('--password', default=os.environ.get('ES_PASSWORD', None), help='Elasticsearch password') parser.add_argument('--bearer', default=os.environ.get('ES_BEARER', None), help='Elasticsearch bearer token') + parser.add_argument('--api-key', default=os.environ.get('ES_API_KEY', None), help='Elasticsearch api-key token') parser.add_argument('--url-prefix', help='Elasticsearch URL prefix') parser.add_argument('--no-auth', action='store_const', const=True, help='Suppress prompt for basic auth') parser.add_argument('--ssl', action='store_true', default=env('ES_USE_SSL', None), help='Use TLS') @@ -198,6 +199,7 @@ def main(): username = args.username if args.username else data.get('es_username') password = args.password if args.password else data.get('es_password') bearer = args.bearer if args.bearer else data.get('es_bearer') + api_key = args.api_key if args.api_key else data.get('es_api_key') url_prefix = args.url_prefix if args.url_prefix is not None else data.get('es_url_prefix', '') use_ssl = args.ssl if args.ssl is not None else data.get('use_ssl') verify_certs = args.verify_certs if args.verify_certs is not None else data.get('verify_certs') is not False @@ -212,6 +214,7 @@ def main(): username = args.username if args.username else None password = args.password if args.password else None bearer = args.bearer if args.bearer else None + api_key = args.api_key if args.api_key else None aws_region = args.aws_region host = args.host if args.host else input('Enter Elasticsearch host: ') port = args.port if args.port else int(input('Enter Elasticsearch port: ')) @@ -249,6 +252,8 @@ def main(): headers = {} if bearer is not None: headers.update({'Authorization': f'Bearer {bearer}'}) + if api_key is not None: + headers.update({'Authorization': f'ApiKey {api_key}'}) es = Elasticsearch( host=host, diff --git a/elastalert/util.py b/elastalert/util.py index 45d9cbb2a..22d881205 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -323,13 +323,20 @@ def elasticsearch_client(conf): """ returns an :class:`ElasticSearchClient` instance configured using an es_conn_config """ es_conn_conf = build_es_conn_config(conf) auth = Auth() + username = es_conn_conf['es_username'] + password = es_conn_conf['es_password'] + if es_conn_conf['es_bearer'] or es_conn_conf['es_api_key']: + username = None + password = None es_conn_conf['http_auth'] = auth(host=es_conn_conf['es_host'], - username=None if es_conn_conf['es_bearer'] else es_conn_conf['es_username'], - password=None if es_conn_conf['es_bearer'] else es_conn_conf['es_password'], + username=username, + password=password, aws_region=es_conn_conf['aws_region'], profile_name=es_conn_conf['profile']) if es_conn_conf['es_bearer']: es_conn_conf['headers'] = {"Authorization": "Bearer " + es_conn_conf['es_bearer']} + if es_conn_conf['es_api_key']: + es_conn_conf['headers'] = {"Authorization": "ApiKey " + es_conn_conf['es_api_key']} return ElasticSearchClient(es_conn_conf) @@ -348,6 +355,7 @@ def build_es_conn_config(conf): parsed_conf['http_auth'] = None parsed_conf['es_username'] = None parsed_conf['es_password'] = None + parsed_conf['es_api_key'] = None parsed_conf['es_bearer'] = None parsed_conf['aws_region'] = None parsed_conf['profile'] = None @@ -366,6 +374,11 @@ def build_es_conn_config(conf): parsed_conf['es_username'] = conf['es_username'] parsed_conf['es_password'] = conf['es_password'] + if os.environ.get('ES_API_KEY'): + parsed_conf['es_api_key'] = os.environ.get('ES_API_KEY') + elif 'es_api_key' in conf: + parsed_conf['es_api_key'] = conf['es_api_key'] + if os.environ.get('ES_BEARER'): parsed_conf['es_bearer'] = os.environ.get('ES_BEARER') elif 'es_bearer' in conf: From 0dc9f650c78987e3dd847204e2e18bfb108c20e8 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Wed, 2 Jun 2021 07:34:11 -0400 Subject: [PATCH 0263/1065] Clarify auth priorities --- docs/source/elastalert.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 69c662217..3cf1df206 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -144,7 +144,9 @@ The environment variable ``ES_USE_SSL`` will override this field. ``es_password``: Optional; basic-auth password for connecting to ``es_host``. The environment variable ``ES_PASSWORD`` will override this field. -``es_api_key``: Optional; Base64 api-key token for connecting to ``es_host``. The environment variable ``ES_API_KEY`` will override this field. +``es_bearer``: Optional; Bearer token for connecting to ``es_host``. The environment variable ``ES_BEARER`` will override this field. This authentication option will override the password authentication option. + +``es_api_key``: Optional; Base64 api-key token for connecting to ``es_host``. The environment variable ``ES_API_KEY`` will override this field. This authentication option will override both the bearer and the password authentication options. ``es_url_prefix``: Optional; URL prefix for the Elasticsearch endpoint. The environment variable ``ES_URL_PREFIX`` will override this field. From d6862f1da6289e0e296a73d2d86cc2b9dd87172b Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Wed, 2 Jun 2021 07:36:42 -0400 Subject: [PATCH 0264/1065] Clarify rule auth priorities --- docs/source/ruletypes.rst | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index bc88fe83e..a54e46144 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -42,6 +42,8 @@ Rule Configuration Cheat Sheet +--------------------------------------------------------------+ | | ``es_bearer`` (string, no default) | | +--------------------------------------------------------------+ | +| ``es_api_token`` (string, no default) | | ++--------------------------------------------------------------+ | | ``es_url_prefix`` (string, no default) | | +--------------------------------------------------------------+ | | ``statsd_instance_tag`` (string, no default) | | @@ -299,12 +301,12 @@ es_password es_bearer ^^^^^^^^^^^ -``es_bearer``: bearer-token authorization for connecting to ``es_host``. (Optional, string, no default) The environment variable ``ES_BEARER`` will override this field. +``es_bearer``: bearer-token authorization for connecting to ``es_host``. (Optional, string, no default) The environment variable ``ES_BEARER`` will override this field. This authentication option will override the password authentication option. es_api_key ^^^^^^^^^^^ -``es_api_key``: api-key-token authorization for connecting to ``es_host``. (Optional, base64 string, no default) The environment variable ``ES_API_KEY`` will override this field. +``es_api_key``: api-key-token authorization for connecting to ``es_host``. (Optional, base64 string, no default) The environment variable ``ES_API_KEY`` will override this field. This authentication option will override both the bearer and the password authentication options. es_url_prefix ^^^^^^^^^^^^^ From c912cee7f37677b041227d667cb1ae4da3a9c543 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Wed, 2 Jun 2021 22:05:49 +0900 Subject: [PATCH 0265/1065] Kibana Discover support kibana 7.13 --- docs/source/ruletypes.rst | 4 ++-- elastalert/kibana_discover.py | 2 +- elastalert/schema.yaml | 2 +- tests/kibana_discover_test.py | 27 +++++++++++++++++++++------ 4 files changed, 25 insertions(+), 10 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index a54e46144..2480e4fac 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -597,9 +597,9 @@ The currently supported versions of Kibana Discover are: - `5.6` - `6.0`, `6.1`, `6.2`, `6.3`, `6.4`, `6.5`, `6.6`, `6.7`, `6.8` -- `7.0`, `7.1`, `7.2`, `7.3`, `7.4`, `7.5`, `7.6`, `7.7`, `7.8`, `7.9`, `7.10`, `7.11`, `7.12` +- `7.0`, `7.1`, `7.2`, `7.3`, `7.4`, `7.5`, `7.6`, `7.7`, `7.8`, `7.9`, `7.10`, `7.11`, `7.12`, `7.13` -``kibana_discover_version: '7.12'`` +``kibana_discover_version: '7.13'`` kibana_discover_index_pattern_id ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/elastalert/kibana_discover.py b/elastalert/kibana_discover.py index 58e3476f4..13d7961cf 100644 --- a/elastalert/kibana_discover.py +++ b/elastalert/kibana_discover.py @@ -15,7 +15,7 @@ kibana_default_timedelta = datetime.timedelta(minutes=10) kibana5_kibana6_versions = frozenset(['5.6', '6.0', '6.1', '6.2', '6.3', '6.4', '6.5', '6.6', '6.7', '6.8']) -kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10', '7.11', '7.12']) +kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10', '7.11', '7.12', '7.13']) def generate_kibana_discover_url(rule, match): ''' Creates a link for a kibana discover app. ''' diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index db2f52486..bffcc1406 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -227,7 +227,7 @@ properties: ### Kibana Discover App Link generate_kibana_discover_url: {type: boolean} kibana_discover_app_url: {type: string, format: uri} - kibana_discover_version: {type: string, enum: ['7.12', '7.11', '7.10', '7.9', '7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} + kibana_discover_version: {type: string, enum: ['7.13', '7.12', '7.11', '7.10', '7.9', '7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} kibana_discover_index_pattern_id: {type: string, minLength: 1} kibana_discover_columns: {type: array, items: {type: string, minLength: 1}, minItems: 1} kibana_discover_from_timedelta: *timedelta diff --git a/tests/kibana_discover_test.py b/tests/kibana_discover_test.py index 0e796e480..c9fbbb31a 100644 --- a/tests/kibana_discover_test.py +++ b/tests/kibana_discover_test.py @@ -38,7 +38,22 @@ def test_generate_kibana_discover_url_with_kibana_5x_and_6x(kibana_version): assert url == expectedUrl -@pytest.mark.parametrize("kibana_version", ['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10', '7.11', '7.12']) +@pytest.mark.parametrize("kibana_version", [ + '7.0', + '7.1', + '7.2', + '7.3', + '7.4', + '7.5', + '7.6', + '7.7', + '7.8', + '7.9', + '7.10', + '7.11', + '7.12', + '7.13' +]) def test_generate_kibana_discover_url_with_kibana_7x(kibana_version): url = generate_kibana_discover_url( rule={ @@ -171,7 +186,7 @@ def test_generate_kibana_discover_url_with_from_timedelta(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.12', + 'kibana_discover_version': '7.13', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_from_timedelta': timedelta(hours=1), 'timestamp_field': 'timestamp' @@ -204,7 +219,7 @@ def test_generate_kibana_discover_url_with_from_timedelta_and_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.12', + 'kibana_discover_version': '7.13', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_from_timedelta': timedelta(hours=1), 'timeframe': timedelta(minutes=20), @@ -238,7 +253,7 @@ def test_generate_kibana_discover_url_with_to_timedelta(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.12', + 'kibana_discover_version': '7.13', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_to_timedelta': timedelta(hours=1), 'timestamp_field': 'timestamp' @@ -271,7 +286,7 @@ def test_generate_kibana_discover_url_with_to_timedelta_and_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.12', + 'kibana_discover_version': '7.13', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'kibana_discover_to_timedelta': timedelta(hours=1), 'timeframe': timedelta(minutes=20), @@ -305,7 +320,7 @@ def test_generate_kibana_discover_url_with_timeframe(): url = generate_kibana_discover_url( rule={ 'kibana_discover_app_url': 'http://kibana:5601/#/discover', - 'kibana_discover_version': '7.12', + 'kibana_discover_version': '7.13', 'kibana_discover_index_pattern_id': 'd6cabfb6-aaef-44ea-89c5-600e9a76991a', 'timeframe': timedelta(minutes=20), 'timestamp_field': 'timestamp' From b06602e159448678c1af955f6d601915f446e7b5 Mon Sep 17 00:00:00 2001 From: Vincent Bisserie Date: Thu, 3 Jun 2021 14:59:20 +0200 Subject: [PATCH 0266/1065] Follow symlinks for rule files Signed-off-by: Vincent Bisserie --- elastalert/loaders.py | 33 +++++++++++++++++++++------------ 1 file changed, 21 insertions(+), 12 deletions(-) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 132d3c1bb..9a1c6bdae 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -8,45 +8,54 @@ import jsonschema import yaml import yaml.scanner -from jinja2 import Template from jinja2 import Environment from jinja2 import FileSystemLoader +from jinja2 import Template import elastalert.alerters.alerta import elastalert.alerters.chatwork import elastalert.alerters.command import elastalert.alerters.datadog import elastalert.alerters.debug +import elastalert.alerters.dingtalk import elastalert.alerters.discord +import elastalert.alerters.exotel import elastalert.alerters.gitter import elastalert.alerters.googlechat import elastalert.alerters.httppost import elastalert.alerters.line import elastalert.alerters.pagertree -import elastalert.alerters.exotel +import elastalert.alerters.rocketchat import elastalert.alerters.servicenow import elastalert.alerters.ses import elastalert.alerters.stomp import elastalert.alerters.telegram +import elastalert.alerters.thehive import elastalert.alerters.twilio import elastalert.alerters.victorops -import elastalert.alerters.dingtalk -import elastalert.alerters.thehive -import elastalert.alerters.rocketchat - -from elastalert import alerts, enhancements, ruletypes +from elastalert import alerts +from elastalert import enhancements +from elastalert import ruletypes from elastalert.alerters.email import EmailAlerter from elastalert.alerters.jira import JiraAlerter from elastalert.alerters.mattermost import MattermostAlerter from elastalert.alerters.opsgenie import OpsGenieAlerter from elastalert.alerters.pagerduty import PagerDutyAlerter -from elastalert.alerters.teams import MsTeamsAlerter from elastalert.alerters.slack import SlackAlerter from elastalert.alerters.sns import SnsAlerter +from elastalert.alerters.teams import MsTeamsAlerter from elastalert.alerters.zabbix import ZabbixAlerter from elastalert.util import dt_to_ts -from elastalert.util import (dt_to_ts_with_format, dt_to_unix, dt_to_unixms, EAException, elastalert_logger, get_module, - ts_to_dt, ts_to_dt_with_format, unix_to_dt, unixms_to_dt) +from elastalert.util import dt_to_ts_with_format +from elastalert.util import dt_to_unix +from elastalert.util import dt_to_unixms +from elastalert.util import EAException +from elastalert.util import elastalert_logger +from elastalert.util import get_module +from elastalert.util import ts_to_dt +from elastalert.util import ts_to_dt_with_format +from elastalert.util import unix_to_dt +from elastalert.util import unixms_to_dt from elastalert.yaml import read_yaml @@ -423,7 +432,7 @@ def _dt_to_ts_with_format(dt): elastalert_logger.warning('Did you mean to use %s in the index? ' 'The index will be formatted like %s' % (token, datetime.datetime.now().strftime( - rule.get('index')))) + rule.get('index')))) if rule.get('scan_entire_timeframe') and not rule.get('timeframe'): raise EAException('scan_entire_timeframe can only be used if there is a timeframe specified') @@ -539,7 +548,7 @@ def get_names(self, conf, use_rule=None): rule_files = [] if 'scan_subdirectories' in conf and conf['scan_subdirectories']: for ruledir in rule_folders: - for root, folders, files in os.walk(ruledir): + for root, folders, files in os.walk(ruledir, followlinks=True): # Openshift/k8s configmap fix for ..data and ..2021_05..date directories that loop with os.walk() folders[:] = [d for d in folders if not d.startswith('..')] for filename in files: From b68c2ffec9ce08f9b1736e090335736f93706dde Mon Sep 17 00:00:00 2001 From: Vincent Bisserie Date: Thu, 3 Jun 2021 15:18:16 +0200 Subject: [PATCH 0267/1065] Encode json output before writing test data Signed-off-by: Vincent Bisserie --- elastalert/test_rule.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/test_rule.py b/elastalert/test_rule.py index 0797985dd..041fa04e7 100644 --- a/elastalert/test_rule.py +++ b/elastalert/test_rule.py @@ -434,7 +434,7 @@ def run_rule_test(self): with open(args.save, 'wb') as data_file: # Add _id to _source for dump [doc['_source'].update({'_id': doc['_id']}) for doc in hits] - data_file.write(json.dumps([doc['_source'] for doc in hits], indent=4)) + data_file.write(str.encode(json.dumps([doc['_source'] for doc in hits], indent=4))) if args.use_downloaded: if hits: args.json = args.save From 8e06619074c39bacef87029dc0e7bf18de846f48 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 4 Jun 2021 19:04:01 +0900 Subject: [PATCH 0268/1065] Fixed post_ignore_ssl_errors in http_post alerter --- elastalert/alerters/httppost.py | 6 +- tests/alerters/httppost_test.py | 100 +++++++++++++++++++++----------- 2 files changed, 69 insertions(+), 37 deletions(-) diff --git a/elastalert/alerters/httppost.py b/elastalert/alerters/httppost.py index 74f1635b0..d5f4aaff3 100644 --- a/elastalert/alerters/httppost.py +++ b/elastalert/alerters/httppost.py @@ -13,11 +13,11 @@ class HTTPPostAlerter(Alerter): def __init__(self, rule): super(HTTPPostAlerter, self).__init__(rule) - post_url = self.rule.get('http_post_url') + post_url = self.rule['http_post_url'] if isinstance(post_url, str): post_url = [post_url] self.post_url = post_url - self.post_proxy = self.rule.get('http_post_proxy') + self.post_proxy = self.rule.get('http_post_proxy', None) self.post_payload = self.rule.get('http_post_payload', {}) self.post_static_payload = self.rule.get('http_post_static_payload', {}) self.post_all_values = self.rule.get('http_post_all_values', not self.post_payload) @@ -41,6 +41,8 @@ def alert(self, matches): verify = self.post_ca_certs else: verify = not self.post_ignore_ssl_errors + if self.post_ignore_ssl_errors: + requests.packages.urllib3.disable_warnings() headers.update(self.post_http_headers) proxies = {'https': self.post_proxy} if self.post_proxy else None diff --git a/tests/alerters/httppost_test.py b/tests/alerters/httppost_test.py index bff4aa202..e84f45fef 100644 --- a/tests/alerters/httppost_test.py +++ b/tests/alerters/httppost_test.py @@ -213,49 +213,31 @@ def test_http_alerter_headers(): assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_http_alerter_post_ca_certs_true(): +@pytest.mark.parametrize('ca_certs, ignore_ssl_errors, excpet_verify', [ + ('', '', True), + ('', True, False), + ('', False, True), + (True, '', True), + (True, True, True), + (True, False, True), + (False, '', True), + (False, True, False), + (False, False, True) +]) +def test_http_alerter_post_ca_certs(ca_certs, ignore_ssl_errors, excpet_verify): rule = { 'name': 'Test HTTP Post Alerter Without Payload', 'type': 'any', 'http_post_url': 'http://test.webhook.url', 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_ca_certs': True, 'alert': [] } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = HTTPPostAlerter(rule) - match = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - '@timestamp': '2017-01-01T00:00:00', - 'somefield': 'foobarbaz', - 'name': 'somestaticname' - } - mock_post_request.assert_called_once_with( - rule['http_post_url'], - data=mock.ANY, - headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, - proxies=None, - timeout=10, - verify=True - ) - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + if ca_certs != '': + rule['http_post_ca_certs'] = ca_certs + if ignore_ssl_errors != '': + rule['http_post_ignore_ssl_errors'] = ignore_ssl_errors -def test_http_alerter_post_ca_certs_false(): - rule = { - 'name': 'Test HTTP Post Alerter Without Payload', - 'type': 'any', - 'http_post_url': 'http://test.webhook.url', - 'http_post_static_payload': {'name': 'somestaticname'}, - 'http_post_ca_certs': False, - 'alert': [] - } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) alert = HTTPPostAlerter(rule) @@ -276,7 +258,7 @@ def test_http_alerter_post_ca_certs_false(): headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, proxies=None, timeout=10, - verify=True + verify=excpet_verify ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -303,3 +285,51 @@ def test_http_alerter_post_ea_exception(): alert.alert([match]) except EAException: assert True + + +def test_http_getinfo(): + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'http_post_url': 'http://test.webhook.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + + expected_data = { + 'type': 'http_post', + 'http_post_webhook_url': ['http://test.webhook.url'] + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('http_post_webhook_url, expected_data', [ + ('', True), + ('http://test.webhook.url', + { + 'type': 'http_post', + 'http_post_webhook_url': ['http://test.webhook.url'] + }), +]) +def test_http_key_error(http_post_webhook_url, expected_data): + try: + rule = { + 'name': 'Test HTTP Post Alerter Without Payload', + 'type': 'any', + 'alert': [] + } + + if http_post_webhook_url != '': + rule['http_post_webhook_url'] = http_post_webhook_url + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HTTPPostAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data From f1d46ab07799ed08a400f5a2cc735d10691225d1 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 4 Jun 2021 19:16:02 +0900 Subject: [PATCH 0269/1065] Bump pytest form 6.1.2 to 6.2.4 --- pytest.ini | 2 ++ requirements-dev.txt | 2 +- tox.ini | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/pytest.ini b/pytest.ini index 0ad3341d9..259ba35a2 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,3 +1,5 @@ [pytest] markers = elasticsearch: mark a test as using elasticsearch. +filterwarnings = + ignore::pytest.PytestUnhandledThreadExceptionWarning \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt index f3d183c22..09da6ae27 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -7,7 +7,7 @@ m2r2 pluggy>=0.12.0 pre-commit pylint<2.9 -pytest==6.1.2 +pytest==6.2.4 pytest-xdist==2.2.1 setuptools sphinx_rtd_theme diff --git a/tox.ini b/tox.ini index 32df3ef98..6f42fae25 100644 --- a/tox.ini +++ b/tox.ini @@ -5,7 +5,7 @@ envlist = py39,docs [testenv] deps = -rrequirements-dev.txt commands = - pytest --cov=elastalert --cov-report=term-missing --cov-branch --strict tests/ -n 4 {posargs} + pytest --cov=elastalert --cov-report=term-missing --cov-branch --strict-markers tests/ -n 4 {posargs} flake8 . [testenv:lint] From 7809dce1031b8e51664cfb08c6af324ee1a5659c Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 4 Jun 2021 19:27:02 +0900 Subject: [PATCH 0270/1065] Fix chatwork alerter --- elastalert/alerters/chatwork.py | 4 +-- tests/alerters/chatwork_test.py | 54 +++++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 2 deletions(-) diff --git a/elastalert/alerters/chatwork.py b/elastalert/alerters/chatwork.py index f7f7db3fd..4a6330bca 100644 --- a/elastalert/alerters/chatwork.py +++ b/elastalert/alerters/chatwork.py @@ -14,8 +14,8 @@ class ChatworkAlerter(Alerter): def __init__(self, rule): super(ChatworkAlerter, self).__init__(rule) - self.chatwork_apikey = self.rule.get('chatwork_apikey') - self.chatwork_room_id = self.rule.get('chatwork_room_id') + self.chatwork_apikey = self.rule['chatwork_apikey'] + self.chatwork_room_id = self.rule['chatwork_room_id'] self.url = 'https://api.chatwork.com/v2/rooms/%s/messages' % (self.chatwork_room_id) self.chatwork_proxy = self.rule.get('chatwork_proxy', None) self.chatwork_proxy_login = self.rule.get('chatwork_proxy_login', None) diff --git a/tests/alerters/chatwork_test.py b/tests/alerters/chatwork_test.py index 8d60a0f37..bc81ae34a 100644 --- a/tests/alerters/chatwork_test.py +++ b/tests/alerters/chatwork_test.py @@ -101,3 +101,57 @@ def test_chatwork_ea_exception(): alert.alert([match]) except EAException: assert True + + +def test_chatwork_getinfo(): + rule = { + 'name': 'Test Chatwork Rule', + 'type': 'any', + 'chatwork_apikey': 'xxxx1', + 'chatwork_room_id': 'xxxx2', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ChatworkAlerter(rule) + + expected_data = { + "type": "chatwork", + "chatwork_room_id": "xxxx2" + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('chatwork_apikey, chatwork_room_id, expected_data', [ + ('', '', True), + ('xxxx1', '', True), + ('', 'xxxx2', True), + ('xxxx1', 'xxxx2', + { + "type": "chatwork", + "chatwork_room_id": "xxxx2" + }), +]) +def test_chatwork_key_error(chatwork_apikey, chatwork_room_id, expected_data): + try: + rule = { + 'name': 'Test Chatwork Rule', + 'type': 'any', + 'alert': [] + } + + if chatwork_apikey != '': + rule['chatwork_apikey'] = chatwork_apikey + + if chatwork_room_id != '': + rule['chatwork_room_id'] = chatwork_room_id + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ChatworkAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data From 47c1ce7be9d19ec432df01f6ba1fdb7bf9d0a73c Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 4 Jun 2021 19:39:01 +0900 Subject: [PATCH 0271/1065] Fix datadog alerter --- elastalert/alerters/datadog.py | 6 ++-- tests/alerters/datadog_test.py | 53 ++++++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 3 deletions(-) diff --git a/elastalert/alerters/datadog.py b/elastalert/alerters/datadog.py index b5796e95d..3b71e4264 100644 --- a/elastalert/alerters/datadog.py +++ b/elastalert/alerters/datadog.py @@ -8,13 +8,13 @@ class DatadogAlerter(Alerter): - ''' Creates a Datadog Event for each alert ''' + """ Creates a Datadog Event for each alert """ required_options = frozenset(['datadog_api_key', 'datadog_app_key']) def __init__(self, rule): super(DatadogAlerter, self).__init__(rule) - self.dd_api_key = self.rule.get('datadog_api_key', None) - self.dd_app_key = self.rule.get('datadog_app_key', None) + self.dd_api_key = self.rule['datadog_api_key'] + self.dd_app_key = self.rule['datadog_app_key'] def alert(self, matches): url = 'https://api.datadoghq.com/api/v1/events' diff --git a/tests/alerters/datadog_test.py b/tests/alerters/datadog_test.py index 956396e9d..74b7c6547 100644 --- a/tests/alerters/datadog_test.py +++ b/tests/alerters/datadog_test.py @@ -67,3 +67,56 @@ def test_datadog_alerterea_exception(): alert.alert([match]) except EAException: assert True + + +def test_datadog_getinfo(): + rule = { + 'name': 'Test Datadog Event Alerter', + 'type': 'any', + 'datadog_api_key': 'test-api-key', + 'datadog_app_key': 'test-app-key', + 'alert': [], + 'alert_subject': 'Test Datadog Event Alert' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DatadogAlerter(rule) + + expected_data = {'type': 'datadog'} + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('datadog_api_key, datadog_app_key, expected_data', [ + ('', '', True), + ('xxxx1', '', True), + ('', 'xxxx2', True), + ('xxxx1', 'xxxx2', + { + 'type': 'datadog' + }), +]) +def test_datadog_key_error(datadog_api_key, datadog_app_key, expected_data): + try: + rule = { + 'name': 'Test Datadog Event Alerter', + 'type': 'any', + 'alert': [], + 'alert_subject': 'Test Datadog Event Alert' + } + + if datadog_api_key != '': + rule['datadog_api_key'] = datadog_api_key + + if datadog_app_key != '': + rule['datadog_app_key'] = datadog_app_key + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DatadogAlerter(rule) + + expected_data = {'type': 'datadog'} + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data From 4f026433a52bc1fc4e349a555de1676825f06bde Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 4 Jun 2021 19:50:28 +0900 Subject: [PATCH 0272/1065] Changed required items of dingtalk alerter --- docs/source/ruletypes.rst | 2 +- elastalert/alerters/dingtalk.py | 6 ++-- tests/alerters/dingtalk_test.py | 50 +++++++++++++++++++++++++++++++++ 3 files changed, 54 insertions(+), 4 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 2480e4fac..4e9fd5050 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1779,7 +1779,7 @@ Required: ``dingtalk_access_token``: Dingtalk access token. -``dingtalk_msgtype``: Dingtalk msgtype. ``text``, ``markdown``, ``single_action_card``, ``action_card``. +``dingtalk_msgtype``: Dingtalk msgtype, default to ``text``. ``markdown``, ``single_action_card``, ``action_card``. dingtalk_msgtype single_action_card Required: diff --git a/elastalert/alerters/dingtalk.py b/elastalert/alerters/dingtalk.py index 3c5282f15..e87eca6b3 100644 --- a/elastalert/alerters/dingtalk.py +++ b/elastalert/alerters/dingtalk.py @@ -11,13 +11,13 @@ class DingTalkAlerter(Alerter): """ Creates a DingTalk room message for each alert """ - required_options = frozenset(['dingtalk_access_token', 'dingtalk_msgtype']) + required_options = frozenset(['dingtalk_access_token']) def __init__(self, rule): super(DingTalkAlerter, self).__init__(rule) - self.dingtalk_access_token = self.rule.get('dingtalk_access_token') + self.dingtalk_access_token = self.rule['dingtalk_access_token'] self.dingtalk_webhook_url = 'https://oapi.dingtalk.com/robot/send?access_token=%s' % (self.dingtalk_access_token) - self.dingtalk_msgtype = self.rule.get('dingtalk_msgtype') + self.dingtalk_msgtype = self.rule.get('dingtalk_msgtype', 'text') self.dingtalk_single_title = self.rule.get('dingtalk_single_title', 'elastalert') self.dingtalk_single_url = self.rule.get('dingtalk_single_url', '') self.dingtalk_btn_orientation = self.rule.get('dingtalk_btn_orientation', '') diff --git a/tests/alerters/dingtalk_test.py b/tests/alerters/dingtalk_test.py index 77f475568..5a4a59ae1 100644 --- a/tests/alerters/dingtalk_test.py +++ b/tests/alerters/dingtalk_test.py @@ -292,3 +292,53 @@ def test_dingtalk_ea_exception(): alert.alert([match]) except EAException: assert True + + +def test_dingtalk_getinfo(): + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + + expected_data = { + 'type': 'dingtalk', + "dingtalk_webhook_url": 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx' + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('dingtalk_access_token,, expected_data', [ + ('', True), + ('xxxxxxx', + { + 'type': 'dingtalk', + "dingtalk_webhook_url": 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx' + }), +]) +def test_dingtalk_key_error(dingtalk_access_token, expected_data): + try: + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + + if dingtalk_access_token != '': + rule['dingtalk_access_token'] = dingtalk_access_token + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data From 48bcc6f1f5cfac3b84279ac823f8ca652cbd4837 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 4 Jun 2021 19:59:52 +0900 Subject: [PATCH 0273/1065] fix zabbix alerter --- docs/source/ruletypes.rst | 4 +-- elastalert/alerters/zabbix.py | 4 +-- tests/alerters/zabbix_test.py | 57 +++++++++++++++++++++++++++++++++++ 3 files changed, 61 insertions(+), 4 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 2480e4fac..3f588995a 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2768,9 +2768,9 @@ Zabbix will send notification to a Zabbix server. The item in the host specified Required: -``zbx_sender_host``: The address where zabbix server is running. +``zbx_sender_host``: The address where zabbix server is running, defaults to ``'localhost'``. -``zbx_sender_port``: The port where zabbix server is listenning. +``zbx_sender_port``: The port where zabbix server is listenning, defaults to ``10051``. ``zbx_host``: This field setup the host in zabbix that receives the value sent by ElastAlert 2. diff --git a/elastalert/alerters/zabbix.py b/elastalert/alerters/zabbix.py index 533ac7ef4..da96f0ed3 100644 --- a/elastalert/alerters/zabbix.py +++ b/elastalert/alerters/zabbix.py @@ -53,8 +53,8 @@ def __init__(self, *args): self.zbx_sender_host = self.rule.get('zbx_sender_host', 'localhost') self.zbx_sender_port = self.rule.get('zbx_sender_port', 10051) - self.zbx_host = self.rule.get('zbx_host') - self.zbx_key = self.rule.get('zbx_key') + self.zbx_host = self.rule['zbx_host'] + self.zbx_key = self.rule['zbx_key'] self.timestamp_field = self.rule.get('timestamp_field', '@timestamp') self.timestamp_type = self.rule.get('timestamp_type', 'iso') self.timestamp_strptime = self.rule.get('timestamp_strptime', '%Y-%m-%dT%H:%M:%S.%fZ') diff --git a/tests/alerters/zabbix_test.py b/tests/alerters/zabbix_test.py index a25cae9cc..1cee2ec75 100644 --- a/tests/alerters/zabbix_test.py +++ b/tests/alerters/zabbix_test.py @@ -1,3 +1,4 @@ +import pytest import mock from elastalert.alerters.zabbix import ZabbixAlerter @@ -32,3 +33,59 @@ def test_zabbix_basic(): } alerter_args = mock_zbx_send.call_args.args assert vars(alerter_args[0][0]) == zabbix_metrics + + +def test_zabbix_getinfo(): + rule = { + 'name': 'Basic Zabbix test', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'alert': [], + 'alert_subject': 'Test Zabbix', + 'zbx_host': 'example.com', + 'zbx_key': 'example-key' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ZabbixAlerter(rule) + + expected_data = { + 'type': 'zabbix Alerter' + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('zbx_host, zbx_key, expected_data', [ + ('', '', True), + ('example.com', '', True), + ('', 'example-key', True), + ('example.com', 'example-key', + { + 'type': 'zabbix Alerter' + }) +]) +def test_zabbix_key_error(zbx_host, zbx_key, expected_data): + try: + rule = { + 'name': 'Basic Zabbix test', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'alert': [], + 'alert_subject': 'Test Zabbix' + } + + if zbx_host != '': + rule['zbx_host'] = zbx_host + + if zbx_key != '': + rule['zbx_key'] = zbx_key + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ZabbixAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data From 00b25e6ac223d6a6d0dbdbc20da7efae69b6b314 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 4 Jun 2021 20:09:12 +0900 Subject: [PATCH 0274/1065] Changed required items of msteams alerter --- docs/source/ruletypes.rst | 4 +-- elastalert/alerters/teams.py | 2 +- tests/alerters/teams_test.py | 50 ++++++++++++++++++++++++++++++++++++ 3 files changed, 53 insertions(+), 3 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 2480e4fac..f82851c75 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2233,10 +2233,10 @@ The alerter requires the following options: ``ms_teams_webhook_url``: The webhook URL that includes your auth data and the ID of the channel you want to post to. Go to the Connectors menu in your channel and configure an Incoming Webhook, then copy the resulting URL. You can use a list of URLs to send to multiple channels. -``ms_teams_alert_summary``: Summary should be configured according to `MS documentation `_, although it seems not displayed by Teams currently. - Optional: +``ms_teams_alert_summary``: Summary should be configured according to `MS documentation `_, although it seems not displayed by Teams currently, defaults to ``ElastAlert Message``. + ``ms_teams_theme_color``: By default the alert will be posted without any color line. To add color, set this attribute to a HTML color value e.g. ``#ff0000`` for red. ``ms_teams_proxy``: By default ElastAlert will not use a network proxy to send notifications to MS Teams. Set this option using ``hostname:port`` if you need to use a proxy. diff --git a/elastalert/alerters/teams.py b/elastalert/alerters/teams.py index 201159bab..fb33fe310 100644 --- a/elastalert/alerters/teams.py +++ b/elastalert/alerters/teams.py @@ -8,7 +8,7 @@ class MsTeamsAlerter(Alerter): """ Creates a Microsoft Teams Conversation Message for each alert """ - required_options = frozenset(['ms_teams_webhook_url', 'ms_teams_alert_summary']) + required_options = frozenset(['ms_teams_webhook_url']) def __init__(self, rule): super(MsTeamsAlerter, self).__init__(rule) diff --git a/tests/alerters/teams_test.py b/tests/alerters/teams_test.py index a55a402b3..72970427b 100644 --- a/tests/alerters/teams_test.py +++ b/tests/alerters/teams_test.py @@ -143,3 +143,53 @@ def test_ms_teams_ea_exception(): alert.alert([match]) except EAException: assert True + + +def test_ms_teams_getinfo(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'ms_teams_webhook_url': 'http://test.webhook.url', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MsTeamsAlerter(rule) + + expected_data = { + 'type': 'ms_teams', + 'ms_teams_webhook_url': ['http://test.webhook.url'] + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('ms_teams_webhook_url, expected_data', [ + ('', True), + ('http://test.webhook.url', + { + 'type': 'ms_teams', + 'ms_teams_webhook_url': ['http://test.webhook.url'] + }) +]) +def test_ms_teams_key_error(ms_teams_webhook_url, expected_data): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'alert': [] + } + + if ms_teams_webhook_url != '': + rule['ms_teams_webhook_url'] = ms_teams_webhook_url + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MsTeamsAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data From 84e60ce2dac542100a9bc8c0e4c7ba11fe1440ed Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 4 Jun 2021 20:19:14 +0900 Subject: [PATCH 0275/1065] Fix sns and ses alerter --- docs/source/elastalert.rst | 4 +-- docs/source/ruletypes.rst | 8 ++--- elastalert/alerters/ses.py | 4 +-- elastalert/alerters/sns.py | 9 ++++-- tests/alerters/ses_test.py | 60 ++++++++++++++++++++++++++++++++++++++ tests/alerters/sns_test.py | 55 ++++++++++++++++++++++++++++++++++ 6 files changed, 129 insertions(+), 11 deletions(-) create mode 100644 tests/alerters/ses_test.py create mode 100644 tests/alerters/sns_test.py diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 3cf1df206..625ae2296 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -28,8 +28,8 @@ Several rule types with common monitoring paradigms are included with ElastAlert Currently, we have support built in for these alert types: - Alerta -- AWS SES -- AWS SNS +- AWS SES (Amazon Simple Email Service) +- AWS SNS (Amazon Simple Notification Service) - Chatwork - Command - Datadog diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 2480e4fac..ef190613a 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1574,8 +1574,8 @@ Example usage using new-style format:: alerta_attributes_values: ["{key}", "{logdate}", "{sender_ip}" ] alerta_text: "Probe {hostname} is UP at {logdate} GMT" -AWS SES -~~~~~~~ +AWS SES (Amazon Simple Email Service) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The AWS SES alerter is similar to Email alerter but uses AWS SES to send emails. The AWS SES alerter can use AWS credentials from the rule yaml, standard AWS config files or environment variables. @@ -1633,8 +1633,8 @@ Example When to use aws_profile usage:: ses_from_addr: "xxxx1@xxx.com" ses_email: "xxxx1@xxx.com" -AWS SNS -~~~~~~~ +AWS SNS (Amazon Simple Notification Service) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The AWS SNS alerter will send an AWS SNS notification. The body of the notification is formatted the same as with other alerters. The AWS SNS alerter uses boto3 and can use credentials in the rule yaml, in a standard AWS credential and config files, or diff --git a/elastalert/alerters/ses.py b/elastalert/alerters/ses.py index 8a48abc6b..fdb118814 100644 --- a/elastalert/alerters/ses.py +++ b/elastalert/alerters/ses.py @@ -11,8 +11,8 @@ class SesAlerter(Alerter): def __init__(self, *args): super(SesAlerter, self).__init__(*args) - self.aws_access_key_id = self.rule.get('ses_aws_access_key_id') - self.aws_secret_access_key = self.rule.get('ses_aws_secret_access_key') + self.aws_access_key_id = self.rule['ses_aws_access_key_id'] + self.aws_secret_access_key = self.rule['ses_aws_secret_access_key'] self.aws_region = self.rule.get('ses_aws_region', 'us-east-1') self.aws_profile = self.rule.get('ses_aws_profile', '') diff --git a/elastalert/alerters/sns.py b/elastalert/alerters/sns.py index e77eeb48e..f3b96fbaf 100644 --- a/elastalert/alerters/sns.py +++ b/elastalert/alerters/sns.py @@ -10,9 +10,9 @@ class SnsAlerter(Alerter): def __init__(self, *args): super(SnsAlerter, self).__init__(*args) - self.sns_topic_arn = self.rule.get('sns_topic_arn', '') - self.sns_aws_access_key_id = self.rule.get('sns_aws_access_key_id') - self.sns_aws_secret_access_key = self.rule.get('sns_aws_secret_access_key') + self.sns_topic_arn = self.rule['sns_topic_arn'] + self.sns_aws_access_key_id = self.rule['sns_aws_access_key_id'] + self.sns_aws_secret_access_key = self.rule['sns_aws_secret_access_key'] self.sns_aws_region = self.rule.get('sns_aws_region', 'us-east-1') self.profile = self.rule.get('sns_aws_profile', None) @@ -39,3 +39,6 @@ def alert(self, matches): Subject=self.create_title(matches) ) elastalert_logger.info("Sent sns notification to %s" % (self.sns_topic_arn)) + + def get_info(self): + return {'type': 'sns'} diff --git a/tests/alerters/ses_test.py b/tests/alerters/ses_test.py new file mode 100644 index 000000000..b1a6b93f7 --- /dev/null +++ b/tests/alerters/ses_test.py @@ -0,0 +1,60 @@ +import pytest + +from elastalert.alerters.ses import SesAlerter +from elastalert.loaders import FileRulesLoader + + +def test_ses_getinfo(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'ses_email': 'test@aaa.com', + 'ses_aws_access_key_id': 'access key id', + 'ses_aws_secret_access_key': 'secret access key', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SesAlerter(rule) + + expected_data = { + 'type': 'ses', + 'recipients': ['test@aaa.com'] + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('ses_email, ses_from_addr, expected_data', [ + ('', '', True), + ('test@aaa.com', '', True), + ('', 'xxxxx2', True), + ('test@aaa.com', 'xxxxx2', + { + 'type': 'ses', + 'recipients': ['test@aaa.com'] + }), +]) +def test_ses_key_error(ses_email, ses_from_addr, expected_data): + try: + rule = { + 'name': 'Test Telegram Rule', + 'type': 'any', + 'alert': [] + } + + if ses_email != '': + rule['ses_email'] = ses_email + + if ses_from_addr != '': + rule['ses_from_addr'] = ses_from_addr + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SesAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data diff --git a/tests/alerters/sns_test.py b/tests/alerters/sns_test.py new file mode 100644 index 000000000..c9f81519b --- /dev/null +++ b/tests/alerters/sns_test.py @@ -0,0 +1,55 @@ +import pytest + +from elastalert.alerters.sns import SnsAlerter +from elastalert.loaders import FileRulesLoader + + +def test_sns_getinfo(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'ses_email': 'test@aaa.com', + 'sns_topic_arn': 'topic arn', + 'sns_aws_access_key_id': 'access key id', + 'sns_aws_secret_access_key': 'secret access key', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SnsAlerter(rule) + + expected_data = { + 'type': 'sns' + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('sns_topic_arn, expected_data', [ + ('', True), + ('xxxx', + { + 'type': 'sns' + }) +]) +def test_sns_key_error(sns_topic_arn, expected_data): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'alert': [] + } + + if sns_topic_arn != '': + rule['sns_topic_arn'] = sns_topic_arn + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SnsAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data From 2bf9f60c9885b3f560936ae9d31e85ca30efcfe7 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 4 Jun 2021 20:54:01 +0900 Subject: [PATCH 0276/1065] fix gitter --- elastalert/alerters/gitter.py | 5 +- tests/alerters/gitter_test.py | 136 +++++++++++++++------------------- 2 files changed, 65 insertions(+), 76 deletions(-) diff --git a/elastalert/alerters/gitter.py b/elastalert/alerters/gitter.py index 326d86eb0..b6d14aa69 100644 --- a/elastalert/alerters/gitter.py +++ b/elastalert/alerters/gitter.py @@ -30,7 +30,10 @@ def alert(self, matches): } try: - response = requests.post(self.gitter_webhook_url, json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies) + response = requests.post(self.gitter_webhook_url, + data=json.dumps(payload, cls=DateTimeEncoder), + headers=headers, + proxies=proxies) response.raise_for_status() except RequestException as e: raise EAException("Error posting to Gitter: %s" % e) diff --git a/tests/alerters/gitter_test.py b/tests/alerters/gitter_test.py index 5bf0e7ee7..696e88599 100644 --- a/tests/alerters/gitter_test.py +++ b/tests/alerters/gitter_test.py @@ -9,48 +9,22 @@ from elastalert.util import EAException -def test_gitter_msg_level_default(): +@pytest.mark.parametrize('msg_level, except_msg_level', [ + ('', 'error'), + ('error', 'error'), + ('info', 'info') +]) +def test_gitter_msg_level(msg_level, except_msg_level): rule = { 'name': 'Test Gitter Rule', 'type': 'any', 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', 'alert': [] } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = GitterAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - expected_data = { - 'message': 'Test Gitter Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - 'level': 'error' - } - - mock_post_request.assert_called_once_with( - rule['gitter_webhook_url'], - mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) - assert expected_data == actual_data - assert 'error' in actual_data['level'] + if msg_level != '': + rule['gitter_msg_level'] = msg_level -def test_gitter_msg_level_info(): - rule = { - 'name': 'Test Gitter Rule', - 'type': 'any', - 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', - 'gitter_msg_level': 'info', - 'alert': [] - } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) alert = GitterAlerter(rule) @@ -63,53 +37,18 @@ def test_gitter_msg_level_info(): expected_data = { 'message': 'Test Gitter Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - 'level': 'info' + 'level': except_msg_level } mock_post_request.assert_called_once_with( rule['gitter_webhook_url'], - mock.ANY, + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None ) - actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data - assert 'info' in actual_data['level'] - - -def test_gitter_msg_level_error(): - rule = { - 'name': 'Test Gitter Rule', - 'type': 'any', - 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', - 'gitter_msg_level': 'error', - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = GitterAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - expected_data = { - 'message': 'Test Gitter Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', - 'level': 'error' - } - - mock_post_request.assert_called_once_with( - rule['gitter_webhook_url'], - mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None - ) - - actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) - assert expected_data == actual_data - assert 'error' in actual_data['level'] def test_gitter_proxy(): @@ -137,14 +76,13 @@ def test_gitter_proxy(): mock_post_request.assert_called_once_with( rule['gitter_webhook_url'], - mock.ANY, + data=mock.ANY, headers={'content-type': 'application/json'}, proxies={'https': 'http://proxy.url'} ) - actual_data = json.loads(mock_post_request.call_args_list[0][0][1]) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data - assert 'error' in actual_data['level'] def test_gitter_ea_exception(): @@ -169,3 +107,51 @@ def test_gitter_ea_exception(): alert.alert([match]) except EAException: assert True + + +def test_gitter_getinfo(): + rule = { + 'name': 'Test Gitter Rule', + 'type': 'any', + 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GitterAlerter(rule) + + expected_data = { + 'type': 'gitter', + 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx' + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('gitter_webhook_url, expected_data', [ + ('', True), + ('https://webhooks.gitter.im/e/xxxxx', + { + 'type': 'gitter', + 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx' + }) +]) +def test_gitter_key_error(gitter_webhook_url, expected_data): + try: + rule = { + 'name': 'Test Gitter Rule', + 'type': 'any', + 'alert': [] + } + + if gitter_webhook_url != '': + rule['gitter_webhook_url'] = gitter_webhook_url + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GitterAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data From 414fc9bf0e447c2b094078e1decb362a692511a5 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 4 Jun 2021 21:54:39 +0900 Subject: [PATCH 0277/1065] add Alerter testcode --- elastalert/alerters/command.py | 1 + elastalert/alerters/exotel.py | 1 + elastalert/alerters/rocketchat.py | 3 +- elastalert/alerters/twilio.py | 4 +- tests/alerters/alerta_test.py | 54 +++++++ tests/alerters/command_test.py | 44 +++++- tests/alerters/debug_test.py | 20 +++ tests/alerters/discord_test.py | 54 +++++++ tests/alerters/email_test.py | 239 +++++++++++++++++++++++++----- tests/alerters/exotel_test.py | 25 ++++ tests/alerters/googlechat_test.py | 54 ++++++- tests/alerters/jira_test.py | 36 +++++ tests/alerters/line_test.py | 48 ++++++ tests/alerters/mattermost_test.py | 130 ++++++++++++++++ tests/alerters/opsgenie_test.py | 225 +++++++--------------------- tests/alerters/pagerduty_test.py | 123 +++++++++++++++ tests/alerters/pagertree_test.py | 52 +++++++ tests/alerters/rocketchat_test.py | 113 ++++++++++---- tests/alerters/servicenow_test.py | 29 ++++ tests/alerters/slack_test.py | 153 ++++++++++++------- tests/alerters/stomp_test.py | 24 +++ tests/alerters/telegram_test.py | 56 ++++++- tests/alerters/thehive_test.py | 75 ++++++++++ tests/alerters/twilio_test.py | 109 ++++++++++++++ tests/alerters/victorops_test.py | 153 ++++++++++++++++++- 25 files changed, 1518 insertions(+), 307 deletions(-) create mode 100644 tests/alerters/debug_test.py create mode 100644 tests/alerters/exotel_test.py create mode 100644 tests/alerters/stomp_test.py create mode 100644 tests/alerters/twilio_test.py diff --git a/elastalert/alerters/command.py b/elastalert/alerters/command.py index ed7edc046..dc79afbb0 100644 --- a/elastalert/alerters/command.py +++ b/elastalert/alerters/command.py @@ -6,6 +6,7 @@ class CommandAlerter(Alerter): + """ Sends an command alert """ required_options = set(['command']) def __init__(self, *args): diff --git a/elastalert/alerters/exotel.py b/elastalert/alerters/exotel.py index 6c1cef688..e7c63ba2a 100644 --- a/elastalert/alerters/exotel.py +++ b/elastalert/alerters/exotel.py @@ -8,6 +8,7 @@ class ExotelAlerter(Alerter): + """ Sends an exotel alert """ required_options = frozenset(['exotel_account_sid', 'exotel_auth_token', 'exotel_to_number', 'exotel_from_number']) def __init__(self, rule): diff --git a/elastalert/alerters/rocketchat.py b/elastalert/alerters/rocketchat.py index 00da58cce..6d5c4ebc6 100644 --- a/elastalert/alerters/rocketchat.py +++ b/elastalert/alerters/rocketchat.py @@ -93,4 +93,5 @@ def alert(self, matches): def get_info(self): return {'type': 'rocketchat', - 'rocket_chat_username_override': self.rocket_chat_username_override} + 'rocket_chat_username_override': self.rocket_chat_username_override, + 'rocket_chat_webhook_url': self.rocket_chat_webhook_url} diff --git a/elastalert/alerters/twilio.py b/elastalert/alerters/twilio.py index f603a06c3..3878633a7 100644 --- a/elastalert/alerters/twilio.py +++ b/elastalert/alerters/twilio.py @@ -13,8 +13,8 @@ def __init__(self, rule): self.twilio_account_sid = self.rule['twilio_account_sid'] self.twilio_auth_token = self.rule['twilio_auth_token'] self.twilio_to_number = self.rule['twilio_to_number'] - self.twilio_from_number = self.rule.get('twilio_from_number') - self.twilio_message_service_sid = self.rule.get('twilio_message_service_sid') + self.twilio_from_number = self.rule.get('twilio_from_number', None) + self.twilio_message_service_sid = self.rule.get('twilio_message_service_sid', None) self.twilio_use_copilot = self.rule.get('twilio_use_copilot', False) def alert(self, matches): diff --git a/tests/alerters/alerta_test.py b/tests/alerters/alerta_test.py index adc537cd0..a9d827797 100644 --- a/tests/alerters/alerta_test.py +++ b/tests/alerters/alerta_test.py @@ -651,3 +651,57 @@ def test_alerta_ea_exception(): alert.alert([match]) except EAException: assert True + + +def test_alerta_getinfo(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'type': 'any', + 'alert': 'alerta' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + + expected_data = { + 'type': 'alerta', + 'alerta_url': 'http://elastalerthost:8080/api/alert' + } + actual_data = alert.get_info() + + assert expected_data == actual_data + + +@pytest.mark.parametrize('alerta_api_url, expected_data', [ + ('', True), + ('http://elastalerthost:8080/api/alert', + { + 'type': 'alerta', + 'alerta_url': 'http://elastalerthost:8080/api/alert' + }), +]) +def test_alerta_key_error(alerta_api_url, expected_data): + try: + rule = { + 'name': 'Test Alerta rule!', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'type': 'any', + 'alert': 'alerta' + } + + if alerta_api_url != '': + rule['alerta_api_url'] = alerta_api_url + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except Exception: + assert expected_data diff --git a/tests/alerters/command_test.py b/tests/alerters/command_test.py index 95279e539..73efb7344 100644 --- a/tests/alerters/command_test.py +++ b/tests/alerters/command_test.py @@ -10,7 +10,7 @@ from tests.alerts_test import mock_rule -def test_command(): +def test_command_getinfo(): # Test command as list with a formatted arg rule = {'command': ['/bin/test/', '--arg', '%(somefield)s']} alert = CommandAlerter(rule) @@ -20,22 +20,39 @@ def test_command(): with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: alert.alert([match]) assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) + expected_data = { + 'type': 'command', + 'command': '/bin/test/ --arg foobarbaz' + } + actual_data = alert.get_info() + assert expected_data == actual_data + +def test_command_old_style_string_format1(): # Test command as string with formatted arg (old-style string format) rule = {'command': '/bin/test/ --arg %(somefield)s'} + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'nested': {'field': 1}} alert = CommandAlerter(rule) with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: alert.alert([match]) assert mock_popen.called_with('/bin/test --arg foobarbaz', stdin=subprocess.PIPE, shell=False) + +def test_command_old_style_string_format2(): # Test command as string without formatted arg (old-style string format) rule = {'command': '/bin/test/foo.sh'} + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'nested': {'field': 1}} alert = CommandAlerter(rule) with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: alert.alert([match]) assert mock_popen.called_with('/bin/test/foo.sh', stdin=subprocess.PIPE, shell=True) - # Test command with pipe_match_json + +def test_command_pipe_match_json(): rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], 'pipe_match_json': True} alert = CommandAlerter(rule) @@ -49,7 +66,8 @@ def test_command(): assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) assert mock_subprocess.communicate.called_with(input=json.dumps(match)) - # Test command with pipe_alert_text + +def test_command_pipe_alert_text(): rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], 'pipe_alert_text': True, 'type': mock_rule(), 'name': 'Test'} alert = CommandAlerter(rule) @@ -64,7 +82,8 @@ def test_command(): assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) assert mock_subprocess.communicate.called_with(input=alert_text.encode()) - # Test command with fail_on_non_zero_exit + +def test_command_fail_on_non_zero_exit(): rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], 'fail_on_non_zero_exit': True} alert = CommandAlerter(rule) @@ -79,14 +98,14 @@ def test_command(): assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False) assert "Non-zero exit code while running command" in str(exception) - # Test OSError + +def test_command_os_error(): try: rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], 'pipe_alert_text': True, 'type': mock_rule(), 'name': 'Test'} alert = CommandAlerter(rule) match = {'@timestamp': '2014-01-01T00:00:00', 'somefield': 'foobarbaz'} - alert_text = str(BasicMatchString(rule, match)) mock_run = mock.MagicMock(side_effect=OSError) with mock.patch("elastalert.alerters.command.subprocess.Popen", mock_run), pytest.raises(OSError) as mock_popen: mock_subprocess = mock.Mock() @@ -95,3 +114,16 @@ def test_command(): alert.alert([match]) except EAException: assert True + + +def test_command_key_error(): + try: + rule = {} + alert = CommandAlerter(rule) + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'nested': {'field': 1}} + with mock.patch("elastalert.alerters.command.subprocess.Popen"): + alert.alert([match]) + except KeyError: + assert True diff --git a/tests/alerters/debug_test.py b/tests/alerters/debug_test.py new file mode 100644 index 000000000..be09cbc2f --- /dev/null +++ b/tests/alerters/debug_test.py @@ -0,0 +1,20 @@ +from elastalert.alerters.debug import DebugAlerter +from elastalert.loaders import FileRulesLoader + + +def test_debug_getinfo(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DebugAlerter(rule) + + expected_data = { + 'type': 'debug' + } + actual_data = alert.get_info() + assert expected_data == actual_data diff --git a/tests/alerters/discord_test.py b/tests/alerters/discord_test.py index fea064774..693ff9be0 100644 --- a/tests/alerters/discord_test.py +++ b/tests/alerters/discord_test.py @@ -207,3 +207,57 @@ def test_discord_ea_exception(): alert.alert([match]) except EAException: assert True + + +def test_discord_getinfo(): + rule = { + 'name': 'Test Discord Rule' + ('a' * 2069), + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + + expected_data = { + 'type': 'discord', + 'discord_webhook_url': 'http://xxxxxxx' + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('discord_webhook_url, expected_data', [ + ('', True), + ('http://xxxxxxx', + { + 'type': 'discord', + 'discord_webhook_url': 'http://xxxxxxx' + }), +]) +def test_discord_key_error(discord_webhook_url, expected_data): + try: + rule = { + 'name': 'Test Discord Rule' + ('a' * 2069), + 'type': 'any', + 'alert': [], + 'alert_subject': 'Test Discord' + } + + if discord_webhook_url != '': + rule['discord_webhook_url'] = discord_webhook_url + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + + expected_data = { + 'type': 'discord', + 'discord_webhook_url': 'http://xxxxxxx' + } + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data diff --git a/tests/alerters/email_test.py b/tests/alerters/email_test.py index ec8bdf2a3..6862cb6a4 100644 --- a/tests/alerters/email_test.py +++ b/tests/alerters/email_test.py @@ -1,15 +1,25 @@ import base64 import mock +import pytest from elastalert.alerters.email import EmailAlerter from tests.alerts_test import mock_rule def test_email(): - rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', 'owner': 'owner_value', - 'alert_subject': 'Test alert for {0}, owned by {1}', 'alert_subject_args': ['test_term', 'owner'], 'snowman': '☃'} + rule = { + 'name': 'test alert', + 'email': ['testing@test.test', 'test@test.test'], + 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_reply_to': 'test@example.com', + 'owner': 'owner_value', + 'alert_subject': 'Test alert for {0}, owned by {1}', + 'alert_subject_args': ['test_term', 'owner'], + 'snowman': '☃' + } with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() @@ -19,7 +29,14 @@ def test_email(): mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), - mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test'], mock.ANY), + mock.call().sendmail( + mock.ANY, + [ + 'testing@test.test', + 'test@test.test' + ], + mock.ANY + ), mock.call().quit()] assert mock_smtp.mock_calls == expected @@ -32,8 +49,15 @@ def test_email(): def test_email_from_field(): - rule = {'name': 'test alert', 'email': ['testing@test.test'], 'email_add_domain': 'example.com', - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_from_field': 'data.user', 'owner': 'owner_value'} + rule = { + 'name': 'test alert', + 'email': ['testing@test.test'], + 'email_add_domain': 'example.com', + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_from_field': 'data.user', + 'owner': 'owner_value' + } # Found, without @ with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() @@ -72,9 +96,18 @@ def test_email_from_field(): def test_email_with_unicode_strings(): - rule = {'name': 'test alert', 'email': 'testing@test.test', 'from_addr': 'testfrom@test.test', - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', 'owner': 'owner_value', - 'alert_subject': 'Test alert for {0}, owned by {1}', 'alert_subject_args': ['test_term', 'owner'], 'snowman': '☃'} + rule = { + 'name': 'test alert', + 'email': 'testing@test.test', + 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_reply_to': 'test@example.com', + 'owner': 'owner_value', + 'alert_subject': 'Test alert for {0}, owned by {1}', + 'alert_subject_args': ['test_term', 'owner'], + 'snowman': '☃' + } with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() @@ -97,10 +130,18 @@ def test_email_with_unicode_strings(): def test_email_with_auth(): - rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', - 'alert_subject': 'Test alert for {0}', 'alert_subject_args': ['test_term'], 'smtp_auth_file': 'file.txt', - 'rule_file': '/tmp/foo.yaml'} + rule = { + 'name': 'test alert', + 'email': ['testing@test.test', 'test@test.test'], + 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_reply_to': 'test@example.com', + 'alert_subject': 'Test alert for {0}', + 'alert_subject_args': ['test_term'], + 'smtp_auth_file': 'file.txt', + 'rule_file': '/tmp/foo.yaml' + } with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: with mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'someone', 'password': 'hunter2'} @@ -113,16 +154,33 @@ def test_email_with_auth(): mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), mock.call().login('someone', 'hunter2'), - mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test'], mock.ANY), + mock.call().sendmail( + mock.ANY, + [ + 'testing@test.test', + 'test@test.test' + ], + mock.ANY + ), mock.call().quit()] assert mock_smtp.mock_calls == expected def test_email_with_cert_key(): - rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', - 'alert_subject': 'Test alert for {0}', 'alert_subject_args': ['test_term'], 'smtp_auth_file': 'file.txt', - 'smtp_cert_file': 'dummy/cert.crt', 'smtp_key_file': 'dummy/client.key', 'rule_file': '/tmp/foo.yaml'} + rule = { + 'name': 'test alert', + 'email': ['testing@test.test', 'test@test.test'], + 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_reply_to': 'test@example.com', + 'alert_subject': 'Test alert for {0}', + 'alert_subject_args': ['test_term'], + 'smtp_auth_file': 'file.txt', + 'smtp_cert_file': 'dummy/cert.crt', + 'smtp_key_file': 'dummy/client.key', + 'rule_file': '/tmp/foo.yaml' + } with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: with mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'someone', 'password': 'hunter2'} @@ -135,15 +193,28 @@ def test_email_with_cert_key(): mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile='dummy/cert.crt', keyfile='dummy/client.key'), mock.call().login('someone', 'hunter2'), - mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test'], mock.ANY), + mock.call().sendmail( + mock.ANY, + [ + 'testing@test.test', + 'test@test.test' + ], + mock.ANY + ), mock.call().quit()] assert mock_smtp.mock_calls == expected def test_email_with_cc(): - rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', - 'cc': 'tester@testing.testing'} + rule = { + 'name': 'test alert', + 'email': ['testing@test.test', 'test@test.test'], + 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_reply_to': 'test@example.com', + 'cc': 'tester@testing.testing' + } with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() @@ -153,7 +224,15 @@ def test_email_with_cc(): mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), - mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test', 'tester@testing.testing'], mock.ANY), + mock.call().sendmail( + mock.ANY, + [ + 'testing@test.test', + 'test@test.test', + 'tester@testing.testing' + ], + mock.ANY + ), mock.call().quit()] assert mock_smtp.mock_calls == expected @@ -166,9 +245,15 @@ def test_email_with_cc(): def test_email_with_bcc(): - rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', - 'bcc': 'tester@testing.testing'} + rule = { + 'name': 'test alert', + 'email': ['testing@test.test', 'test@test.test'], + 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_reply_to': 'test@example.com', + 'bcc': 'tester@testing.testing' + } with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() @@ -178,7 +263,15 @@ def test_email_with_bcc(): mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), - mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test', 'tester@testing.testing'], mock.ANY), + mock.call().sendmail( + mock.ANY, + [ + 'testing@test.test', + 'test@test.test', + 'tester@testing.testing' + ], + mock.ANY + ), mock.call().quit()] assert mock_smtp.mock_calls == expected @@ -191,9 +284,16 @@ def test_email_with_bcc(): def test_email_with_cc_and_bcc(): - rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], 'from_addr': 'testfrom@test.test', - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', - 'cc': ['test1@test.com', 'test2@test.com'], 'bcc': 'tester@testing.testing'} + rule = { + 'name': 'test alert', + 'email': ['testing@test.test', 'test@test.test'], + 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_reply_to': 'test@example.com', + 'cc': ['test1@test.com', 'test2@test.com'], + 'bcc': 'tester@testing.testing' + } with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() @@ -213,8 +313,8 @@ def test_email_with_cc_and_bcc(): 'tester@testing.testing' ], mock.ANY - ), - mock.call().quit()] + ), + mock.call().quit()] assert mock_smtp.mock_calls == expected body = mock_smtp.mock_calls[4][1][2] @@ -248,7 +348,14 @@ def test_email_with_args(): mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), - mock.call().sendmail(mock.ANY, ['testing@test.test', 'test@test.test'], mock.ANY), + mock.call().sendmail( + mock.ANY, + [ + 'testing@test.test', + 'test@test.test' + ], + mock.ANY + ), mock.call().quit()] assert mock_smtp.mock_calls == expected @@ -267,9 +374,14 @@ def test_email_with_args(): def test_email_query_key_in_subject(): - rule = {'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], - 'type': mock_rule(), 'timestamp_field': '@timestamp', 'email_reply_to': 'test@example.com', - 'query_key': 'username'} + rule = { + 'name': 'test alert', + 'email': ['testing@test.test', 'test@test.test'], + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_reply_to': 'test@example.com', + 'query_key': 'username' + } with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() @@ -284,3 +396,58 @@ def test_email_query_key_in_subject(): assert 'werbenjagermanjensen' in line found_subject = True assert found_subject + + +def test_email_getinfo(): + rule = { + 'name': 'test alert', + 'email': ['testing@test.test', 'test@test.test'], + 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_reply_to': 'test@example.com', + 'owner': 'owner_value', + 'alert_subject': 'Test alert for {0}, owned by {1}', + 'alert_subject_args': ['test_term', 'owner'], + 'snowman': '☃' + } + alert = EmailAlerter(rule) + + expected_data = { + 'type': 'email', + 'recipients': ['testing@test.test', 'test@test.test']} + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('email, expected_data', [ + (['testing@test.test', 'test@test.test'], True), + (['testing@test.test', 'test@test.test'], + { + 'type': 'email', + 'recipients': ['testing@test.test', 'test@test.test'] + }), +]) +def test_email_key_error(email, expected_data): + try: + rule = { + 'name': 'test alert', + 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_reply_to': 'test@example.com', + 'owner': 'owner_value', + 'alert_subject': 'Test alert for {0}, owned by {1}', + 'alert_subject_args': ['test_term', 'owner'], + 'snowman': '☃' + } + + if email != '': + rule['email'] = email + + alert = EmailAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except Exception: + assert expected_data diff --git a/tests/alerters/exotel_test.py b/tests/alerters/exotel_test.py new file mode 100644 index 000000000..4559c8443 --- /dev/null +++ b/tests/alerters/exotel_test.py @@ -0,0 +1,25 @@ +from elastalert.alerters.exotel import ExotelAlerter +from elastalert.loaders import FileRulesLoader + + +def test_exotel_getinfo(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'exotel_account_sid': 'xxxxx1', + 'exotel_auth_token': 'xxxxx2', + 'exotel_to_number': 'xxxxx3', + 'exotel_from_number': 'xxxxx4', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ExotelAlerter(rule) + + expected_data = { + 'type': 'exotel', + 'exotel_account': 'xxxxx1' + } + actual_data = alert.get_info() + assert expected_data == actual_data diff --git a/tests/alerters/googlechat_test.py b/tests/alerters/googlechat_test.py index c830cc78a..b60317e40 100644 --- a/tests/alerters/googlechat_test.py +++ b/tests/alerters/googlechat_test.py @@ -14,7 +14,6 @@ def test_google_chat_basic(): 'name': 'Test GoogleChat Rule', 'type': 'any', 'googlechat_webhook_url': 'http://xxxxxxx', - 'googlechat_format': 'basic', 'alert': [] } rules_loader = FileRulesLoader({}) @@ -112,11 +111,6 @@ def test_google_chat_ea_exception(): 'name': 'Test GoogleChat Rule', 'type': 'any', 'googlechat_webhook_url': 'http://xxxxxxx', - 'googlechat_format': 'card', - 'googlechat_header_title': 'xxxx1', - 'googlechat_header_subtitle': 'xxxx2', - 'googlechat_header_image': 'http://xxxx/image.png', - 'googlechat_footer_kibanalink': 'http://xxxxx/kibana', 'alert': [] } rules_loader = FileRulesLoader({}) @@ -131,3 +125,51 @@ def test_google_chat_ea_exception(): alert.alert([match]) except EAException: assert True + + +def test_google_chat_getinfo(): + rule = { + 'name': 'Test GoogleChat Rule', + 'type': 'any', + 'googlechat_webhook_url': 'http://xxxxxxx', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GoogleChatAlerter(rule) + + expected_data = { + 'type': 'googlechat', + 'googlechat_webhook_url': ['http://xxxxxxx'] + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('googlechat_webhook_url, expected_data', [ + ('', True), + ('http://xxxxxxx', + { + 'type': 'googlechat', + 'googlechat_webhook_url': ['http://xxxxxxx'] + }), +]) +def test_google_chat_key_error(googlechat_webhook_url, expected_data): + try: + rule = { + 'name': 'Test GoogleChat Rule', + 'type': 'any', + 'alert': [] + } + + if googlechat_webhook_url != '': + rule['googlechat_webhook_url'] = googlechat_webhook_url + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GoogleChatAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data diff --git a/tests/alerters/jira_test.py b/tests/alerters/jira_test.py index 255f0d444..f7f3a807e 100644 --- a/tests/alerters/jira_test.py +++ b/tests/alerters/jira_test.py @@ -321,3 +321,39 @@ def test_jira_arbitrary_field_support(): alert = JiraAlerter(rule) alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) assert "Exception encountered when trying to add 'invalid_watcher' as a watcher. Does the user exist?" in str(exception) + + +def test_jira_getinfo(): + description_txt = "Description stuff goes here like a runbook link." + rule = { + 'name': 'test alert', + 'jira_account_file': 'jirafile', + 'type': mock_rule(), + 'jira_project': 'testproject', + 'jira_priority': 0, + 'jira_issuetype': 'testtype', + 'jira_server': 'jiraserver', + 'jira_label': 'testlabel', + 'jira_component': 'testcomponent', + 'jira_description': description_txt, + 'jira_watchers': ['testwatcher1', 'testwatcher2'], + 'timestamp_field': '@timestamp', + 'alert_subject': 'Issue {0} occurred at {1}', + 'alert_subject_args': ['test_term', '@timestamp'], + 'rule_file': '/tmp/foo.yaml' + } + + mock_priority = mock.Mock(id='5') + + with mock.patch('elastalert.alerters.jira.JIRA') as mock_jira, \ + mock.patch('elastalert.alerts.read_yaml') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + mock_jira.return_value.priorities.return_value = [mock_priority] + mock_jira.return_value.fields.return_value = [] + alert = JiraAlerter(rule) + + expected_data = { + 'type': 'jira' + } + actual_data = alert.get_info() + assert expected_data == actual_data diff --git a/tests/alerters/line_test.py b/tests/alerters/line_test.py index ff5dc30f1..31340eb23 100644 --- a/tests/alerters/line_test.py +++ b/tests/alerters/line_test.py @@ -61,3 +61,51 @@ def test_line_notify_ea_exception(): alert.alert([match]) except EAException: assert True + + +def test_line_getinfo(): + rule = { + 'name': 'Test LineNotify Rule', + 'type': 'any', + 'linenotify_access_token': 'xxxxx', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = LineNotifyAlerter(rule) + + expected_data = { + "type": "linenotify", + "linenotify_access_token": 'xxxxx' + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('linenotify_access_token, expected_data', [ + ('', True), + ('xxxxx', + { + "type": "linenotify", + "linenotify_access_token": 'xxxxx' + }), +]) +def test_line_key_error(linenotify_access_token, expected_data): + try: + rule = { + 'name': 'Test LineNotify Rule', + 'type': 'any', + 'alert': [] + } + + if linenotify_access_token != '': + rule['linenotify_access_token'] = linenotify_access_token + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = LineNotifyAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data diff --git a/tests/alerters/mattermost_test.py b/tests/alerters/mattermost_test.py index 95897ce9a..1c99e45fc 100644 --- a/tests/alerters/mattermost_test.py +++ b/tests/alerters/mattermost_test.py @@ -774,3 +774,133 @@ def test_mattermost_ea_exception(): alert.alert([match]) except EAException: assert True + + +def test_mattermost_get_aggregation_summary_text__maximum_width(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_author_icon': 'http://author.icon.url', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + assert 75 == alert.get_aggregation_summary_text__maximum_width() + + +@pytest.mark.parametrize('msg_color, except_msg_color', [ + ('', 'danger'), + ('danger', 'danger'), + ('good', 'good'), + ('warning', 'warning') +]) +def test_mattermost_msg_color(msg_color, except_msg_color): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_author_icon': 'http://author.icon.url', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + + if msg_color != '': + rule['mattermost_msg_color'] = msg_color + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': except_msg_color, + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n', + 'author_icon': 'http://author.icon.url' + } + ], + 'username': 'elastalert' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_mattermost_getinfo(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + + expected_data = { + 'type': 'mattermost', + 'mattermost_username_override': 'elastalert', + 'mattermost_webhook_url': ['http://xxxxx'] + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('mattermost_webhook_url, expected_data', [ + ('', True), + ('http://xxxxx', + { + 'type': 'mattermost', + 'mattermost_username_override': 'elastalert', + 'mattermost_webhook_url': ['http://xxxxx'] + }), +]) +def test_mattermost_key_error(mattermost_webhook_url, expected_data): + try: + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + + if mattermost_webhook_url != '': + rule['mattermost_webhook_url'] = mattermost_webhook_url + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data diff --git a/tests/alerters/opsgenie_test.py b/tests/alerters/opsgenie_test.py index 4f68016da..8e0c8ad77 100644 --- a/tests/alerters/opsgenie_test.py +++ b/tests/alerters/opsgenie_test.py @@ -1,5 +1,8 @@ import mock +import pytest +from requests import RequestException +from elastalert.util import EAException from elastalert.alerters.opsgenie import OpsGenieAlerter from elastalert.alerts import BasicMatchString from tests.alerts_test import mock_rule @@ -566,7 +569,14 @@ def test_opsgenie_subject_args(): assert expected_json == actual_json -def test_opsgenie_priority_p1(): +@pytest.mark.parametrize('opsgenie_priority', [ + ('P1'), + ('P2'), + ('P3'), + ('P4'), + ('P5') +]) +def test_opsgenie_priority(opsgenie_priority): rule = { 'name': 'Opsgenie Details', 'type': mock_rule(), @@ -576,7 +586,7 @@ def test_opsgenie_priority_p1(): 'Message': {'field': 'message'}, 'Missing': {'field': 'missing'} }, - 'opsgenie_priority': 'P1' + 'opsgenie_priority': opsgenie_priority } match = { 'message': 'Testing', @@ -601,7 +611,7 @@ def test_opsgenie_priority_p1(): 'description': BasicMatchString(rule, match).__str__(), 'details': {'Message': 'Testing'}, 'message': 'ElastAlert: Opsgenie Details', - 'priority': 'P1', + 'priority': opsgenie_priority, 'source': 'ElastAlert', 'tags': ['ElastAlert', 'Opsgenie Details'], 'user': 'genies' @@ -610,95 +620,7 @@ def test_opsgenie_priority_p1(): assert expected_json == actual_json -def test_opsgenie_priority_p2(): - rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Message': {'field': 'message'}, - 'Missing': {'field': 'missing'} - }, - 'opsgenie_priority': 'P2' - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': 'P2', - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' - } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json - - -def test_opsgenie_priority_p3(): - rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Message': {'field': 'message'}, - 'Missing': {'field': 'missing'} - }, - 'opsgenie_priority': 'P3' - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': 'P3', - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' - } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json - - -def test_opsgenie_priority_p4(): +def test_opsgenie_priority_none(): rule = { 'name': 'Opsgenie Details', 'type': mock_rule(), @@ -708,7 +630,7 @@ def test_opsgenie_priority_p4(): 'Message': {'field': 'message'}, 'Missing': {'field': 'missing'} }, - 'opsgenie_priority': 'P4' + 'opsgenie_priority': 'abc' } match = { 'message': 'Testing', @@ -733,7 +655,6 @@ def test_opsgenie_priority_p4(): 'description': BasicMatchString(rule, match).__str__(), 'details': {'Message': 'Testing'}, 'message': 'ElastAlert: Opsgenie Details', - 'priority': 'P4', 'source': 'ElastAlert', 'tags': ['ElastAlert', 'Opsgenie Details'], 'user': 'genies' @@ -742,7 +663,7 @@ def test_opsgenie_priority_p4(): assert expected_json == actual_json -def test_opsgenie_priority_p5(): +def test_opsgenie_proxy(): rule = { 'name': 'Opsgenie Details', 'type': mock_rule(), @@ -752,7 +673,7 @@ def test_opsgenie_priority_p5(): 'Message': {'field': 'message'}, 'Missing': {'field': 'missing'} }, - 'opsgenie_priority': 'P5' + 'opsgenie_proxy': 'https://proxy.url' } match = { 'message': 'Testing', @@ -770,14 +691,14 @@ def test_opsgenie_priority_p5(): 'Authorization': 'GenieKey ogkey' }, json=mock.ANY, - proxies=None + proxies={'https': 'https://proxy.url'} ) expected_json = { 'description': BasicMatchString(rule, match).__str__(), 'details': {'Message': 'Testing'}, 'message': 'ElastAlert: Opsgenie Details', - 'priority': 'P5', + 'priority': None, 'source': 'ElastAlert', 'tags': ['ElastAlert', 'Opsgenie Details'], 'user': 'genies' @@ -786,50 +707,36 @@ def test_opsgenie_priority_p5(): assert expected_json == actual_json -def test_opsgenie_priority_none(): - rule = { - 'name': 'Opsgenie Details', - 'type': mock_rule(), - 'opsgenie_account': 'genies', - 'opsgenie_key': 'ogkey', - 'opsgenie_details': { - 'Message': {'field': 'message'}, - 'Missing': {'field': 'missing'} - }, - 'opsgenie_priority': 'abc' - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' - } - alert = OpsGenieAlerter(rule) - - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies=None - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'ElastAlert: Opsgenie Details', - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' - } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json +def test_opsgenie_ea_exception(): + try: + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': { + 'Message': {'field': 'message'}, + 'Missing': {'field': 'missing'} + }, + 'opsgenie_proxy': 'https://proxy.url' + } + match = { + 'message': 'Testing', + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True -def test_opsgenie_proxy(): +def test_opsgenie_getinfo(): rule = { 'name': 'Opsgenie Details', 'type': mock_rule(), @@ -839,35 +746,17 @@ def test_opsgenie_proxy(): 'Message': {'field': 'message'}, 'Missing': {'field': 'missing'} }, - 'opsgenie_proxy': 'https://proxy.url' - } - match = { - 'message': 'Testing', - '@timestamp': '2014-10-31T00:00:00' + 'opsgenie_proxy': 'https://proxy.url', + 'opsgenie_teams': ['{TEAM_PREFIX}-Team'], + 'opsgenie_recipients': ['lytics'] } alert = OpsGenieAlerter(rule) - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - 'https://api.opsgenie.com/v2/alerts', - headers={ - 'Content-Type': 'application/json', - 'Authorization': 'GenieKey ogkey' - }, - json=mock.ANY, - proxies={'https': 'https://proxy.url'} - ) - - expected_json = { - 'description': BasicMatchString(rule, match).__str__(), - 'details': {'Message': 'Testing'}, - 'message': 'ElastAlert: Opsgenie Details', - 'priority': None, - 'source': 'ElastAlert', - 'tags': ['ElastAlert', 'Opsgenie Details'], - 'user': 'genies' + expected_data = { + 'type': 'opsgenie', + 'recipients': ['lytics'], + 'account': 'genies', + 'teams': ['{TEAM_PREFIX}-Team'] } - actual_json = mock_post_request.call_args_list[0][1]['json'] - assert expected_json == actual_json + actual_data = alert.get_info() + assert expected_data == actual_data diff --git a/tests/alerters/pagerduty_test.py b/tests/alerters/pagerduty_test.py index 6b268602b..e85333078 100644 --- a/tests/alerters/pagerduty_test.py +++ b/tests/alerters/pagerduty_test.py @@ -608,3 +608,126 @@ def test_pagerduty_ea_exception(): alert.alert([match]) except EAException: assert True + + +@pytest.mark.parametrize('severity, except_severity', [ + ('', 'critical'), + ('critical', 'critical'), + ('error', 'error'), + ('warning', 'warning'), + ('info', 'info') +]) +def test_pagerduty_alerter_v2_payload_severity(severity, except_severity): + rule = {} + if severity == '': + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_source': 'mysql.host.name', + 'alert': [] + } + else: + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'pagerduty_api_version': 'v2', + 'pagerduty_v2_payload_class': 'ping failure', + 'pagerduty_v2_payload_component': 'mysql', + 'pagerduty_v2_payload_group': 'app-stack', + 'pagerduty_v2_payload_severity': severity, + 'pagerduty_v2_payload_source': 'mysql.host.name', + 'alert': [] + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'client': 'ponies inc.', + 'payload': { + 'class': 'ping failure', + 'component': 'mysql', + 'group': 'app-stack', + 'severity': except_severity, + 'source': 'mysql.host.name', + 'summary': 'Test PD Rule', + 'custom_details': { + 'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: foobarbaz\n' + }, + 'timestamp': '2017-01-01T00:00:00' + }, + 'event_action': 'trigger', + 'dedup_key': '', + 'routing_key': 'magicalbadgers', + } + mock_post_request.assert_called_once_with('https://events.pagerduty.com/v2/enqueue', + data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_pagerduty_getinfo(): + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'pagerduty_service_key': 'magicalbadgers', + 'pagerduty_client_name': 'ponies inc.', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + + expected_data = { + 'type': 'pagerduty', + 'pagerduty_client_name': 'ponies inc.' + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('pagerduty_service_key, pagerduty_client_name, expected_data', [ + ('', '', True), + ('xxxxx1', '', True), + ('', 'xxxxx2', True), + ('xxxxx1', 'xxxxx2', + { + 'type': 'pagerduty', + 'pagerduty_client_name': 'xxxxx2' + }), +]) +def test_pagerduty_key_error(pagerduty_service_key, pagerduty_client_name, expected_data): + try: + rule = { + 'name': 'Test PD Rule', + 'type': 'any', + 'alert': [] + } + + if pagerduty_service_key != '': + rule['pagerduty_service_key'] = pagerduty_service_key + + if pagerduty_client_name != '': + rule['pagerduty_client_name'] = pagerduty_client_name + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerDutyAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data diff --git a/tests/alerters/pagertree_test.py b/tests/alerters/pagertree_test.py index 99844e56f..1d61169ad 100644 --- a/tests/alerters/pagertree_test.py +++ b/tests/alerters/pagertree_test.py @@ -113,3 +113,55 @@ def test_pagertree_ea_exception(): alert.alert([match]) except EAException: assert True + + +def test_pagertree_getinfo(): + rule = { + 'name': 'Test PagerTree Rule', + 'type': 'any', + 'pagertree_integration_url': 'https://api.pagertree.com/integration/xxxxx', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerTreeAlerter(rule) + + expected_data = { + 'type': 'pagertree', + 'pagertree_integration_url': 'https://api.pagertree.com/integration/xxxxx' + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('pagertree_integration_url, expected_data', [ + ('', True), + ('https://api.pagertree.com/integration/xxxxx', + { + 'type': 'pagertree', + 'pagertree_integration_url': 'https://api.pagertree.com/integration/xxxxx' + }), +]) +def test_pagertree_key_error(pagertree_integration_url, expected_data): + try: + rule = { + 'name': 'Test PagerTree Rule', + 'type': 'any', + 'alert': [] + } + + if pagertree_integration_url != '': + rule['pagertree_integration_url'] = pagertree_integration_url + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = PagerTreeAlerter(rule) + + expected_data = { + 'type': 'pagertree', + 'pagertree_integration_url': 'https://api.pagertree.com/integration/xxxxx' + } + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data diff --git a/tests/alerters/rocketchat_test.py b/tests/alerters/rocketchat_test.py index 81335090d..ea20fb5a0 100644 --- a/tests/alerters/rocketchat_test.py +++ b/tests/alerters/rocketchat_test.py @@ -271,14 +271,12 @@ def test_rocketchat_emoji_override(): assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_rocketchat_msg_color_good(): +def test_rocketchat_emoji_override_blank(): rule = { 'name': 'Test Rule', 'type': 'any', - 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', - 'rocket_chat_username_override': 'elastalert2', - 'rocket_chat_msg_color': 'good', - 'alert_subject': 'Cool subject', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'], + 'rocket_chat_emoji_override': '', 'alert': [] } rules_loader = FileRulesLoader({}) @@ -294,11 +292,10 @@ def test_rocketchat_msg_color_good(): expected_data = { 'username': 'elastalert2', 'channel': '', - 'emoji': ':ghost:', 'attachments': [ { - 'color': 'good', - 'title': rule['alert_subject'], + 'color': 'danger', + 'title': rule['name'], 'text': BasicMatchString(rule, match).__str__(), 'fields': [] } @@ -306,7 +303,7 @@ def test_rocketchat_msg_color_good(): 'text': '' } mock_post_request.assert_called_once_with( - rule['rocket_chat_webhook_url'], + rule['rocket_chat_webhook_url'][0], data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None @@ -314,16 +311,25 @@ def test_rocketchat_msg_color_good(): assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_rocketchat_msg_color_warning(): +@pytest.mark.parametrize('msg_color, except_msg_color', [ + ('', 'danger'), + ('danger', 'danger'), + ('good', 'good'), + ('warning', 'warning') +]) +def test_rocketchat_msg_color(msg_color, except_msg_color): rule = { 'name': 'Test Rule', 'type': 'any', 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', 'rocket_chat_username_override': 'elastalert2', - 'rocket_chat_msg_color': 'warning', 'alert_subject': 'Cool subject', 'alert': [] } + + if msg_color != '': + rule['rocket_chat_msg_color'] = msg_color + rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) alert = RocketChatAlerter(rule) @@ -340,7 +346,7 @@ def test_rocketchat_msg_color_warning(): 'emoji': ':ghost:', 'attachments': [ { - 'color': 'warning', + 'color': except_msg_color, 'title': rule['alert_subject'], 'text': BasicMatchString(rule, match).__str__(), 'fields': [] @@ -508,11 +514,13 @@ def test_rocketchat_alert_fields(): assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_rocketchat_required_options_key_error(): +def test_rocketchat_msg_color_key_error(): try: rule = { 'name': 'Test Rule', 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_msg_color': 'abc', 'alert_subject': 'Cool subject', 'alert': [] } @@ -529,13 +537,14 @@ def test_rocketchat_required_options_key_error(): assert True -def test_rocketchat_msg_color_key_error(): +def test_rocketchat_ea_exception(): try: rule = { 'name': 'Test Rule', 'type': 'any', 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', - 'rocket_chat_msg_color': 'abc', + 'rocket_chat_username_override': 'elastalert2', + 'rocket_chat_msg_pretext': 'pretext value', 'alert_subject': 'Cool subject', 'alert': [] } @@ -546,32 +555,76 @@ def test_rocketchat_msg_color_key_error(): '@timestamp': '2021-01-01T00:00:00', 'somefield': 'foobarbaz' } - with mock.patch('requests.post'): + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): alert.alert([match]) - except KeyError: + except EAException: assert True -def test_rocketchat_ea_exception(): +def test_rocketchat_get_aggregation_summary_text__maximum_width(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'elastalert2', + 'rocket_chat_msg_pretext': 'pretext value', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + assert 75 == alert.get_aggregation_summary_text__maximum_width() + + +def test_rocketchat_getinfo(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + + expected_data = { + 'type': 'rocketchat', + 'rocket_chat_username_override': 'elastalert2', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'] + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('rocket_chat_webhook_url, expected_data', [ + ('', True), + ('http://please.dontgohere.rocketchat', + { + 'type': 'rocketchat', + 'rocket_chat_username_override': 'elastalert2', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'] + }) +]) +def test_rocketchat_key_error(rocket_chat_webhook_url, expected_data): try: rule = { 'name': 'Test Rule', 'type': 'any', - 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', - 'rocket_chat_username_override': 'elastalert2', - 'rocket_chat_msg_pretext': 'pretext value', 'alert_subject': 'Cool subject', 'alert': [] } + + if rocket_chat_webhook_url != '': + rule['rocket_chat_webhook_url'] = rocket_chat_webhook_url + rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) alert = RocketChatAlerter(rule) - match = { - '@timestamp': '2021-01-01T00:00:00', - 'somefield': 'foobarbaz' - } - mock_run = mock.MagicMock(side_effect=RequestException) - with mock.patch('requests.post', mock_run), pytest.raises(RequestException): - alert.alert([match]) - except EAException: - assert True + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data diff --git a/tests/alerters/servicenow_test.py b/tests/alerters/servicenow_test.py index 2b0d5bbe9..2869bf656 100644 --- a/tests/alerters/servicenow_test.py +++ b/tests/alerters/servicenow_test.py @@ -144,3 +144,32 @@ def test_service_now_ea_exception(): alert.alert([match]) except EAException: assert True + + +def test_servicenow_getinfo(): + rule = { + 'name': 'Test ServiceNow Rule', + 'type': 'any', + 'username': 'ServiceNow username', + 'password': 'ServiceNow password', + 'servicenow_rest_url': 'https://xxxxxxxxxx', + 'short_description': 'ServiceNow short_description', + 'comments': 'ServiceNow comments', + 'assignment_group': 'ServiceNow assignment_group', + 'category': 'ServiceNow category', + 'subcategory': 'ServiceNow subcategory', + 'cmdb_ci': 'ServiceNow cmdb_ci', + 'caller_id': 'ServiceNow caller_id', + 'servicenow_proxy': 'http://proxy.url', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ServiceNowAlerter(rule) + + expected_data = { + 'type': 'ServiceNow', + 'self.servicenow_rest_url': 'https://xxxxxxxxxx' + } + actual_data = alert.get_info() + assert expected_data == actual_data diff --git a/tests/alerters/slack_test.py b/tests/alerters/slack_test.py index 5d6080ab7..7b660310d 100644 --- a/tests/alerters/slack_test.py +++ b/tests/alerters/slack_test.py @@ -452,51 +452,6 @@ def test_slack_kibana_discover_color(): assert expected_data == actual_data -def test_slack_ignore_ssl_errors(): - rule = { - 'name': 'Test Rule', - 'type': 'any', - 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_ignore_ssl_errors': True, - 'alert': [] - } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = SlackAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00' - } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) - - mock_post_request.assert_called_once_with( - rule['slack_webhook_url'], - data=mock.ANY, - headers={'content-type': 'application/json'}, - proxies=None, - verify=False, - timeout=10 - ) - - expected_data = { - 'username': 'elastalert', - 'channel': '', - 'icon_emoji': ':ghost:', - 'attachments': [ - { - 'color': 'danger', - 'title': 'Test Rule', - 'text': BasicMatchString(rule, match).__str__(), - 'mrkdwn_in': ['text', 'pretext'], - 'fields': [] - } - ], - 'text': '', - 'parse': 'none' - } - assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) - - def test_slack_proxy(): rule = { 'name': 'Test Rule', @@ -731,16 +686,25 @@ def test_slack_icon_url_override(): assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_msg_color(): +@pytest.mark.parametrize('msg_color, except_msg_color', [ + ('', 'danger'), + ('danger', 'danger'), + ('good', 'good'), + ('warning', 'warning') +]) +def test_slack_msg_color(msg_color, except_msg_color): rule = { 'name': 'Test Rule', 'type': 'any', 'slack_webhook_url': 'http://please.dontgohere.slack', 'slack_username_override': 'elastalert', - 'slack_msg_color': 'good', 'alert_subject': 'Cool subject', 'alert': [] } + + if msg_color != '': + rule['slack_msg_color'] = msg_color + rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) alert = SlackAlerter(rule) @@ -757,7 +721,7 @@ def test_slack_msg_color(): 'icon_emoji': ':ghost:', 'attachments': [ { - 'color': 'good', + 'color': except_msg_color, 'title': rule['alert_subject'], 'text': BasicMatchString(rule, match).__str__(), 'mrkdwn_in': ['text', 'pretext'], @@ -942,21 +906,36 @@ def test_slack_alert_fields(): assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_slack_ca_certs(): +@pytest.mark.parametrize('ca_certs, ignore_ssl_errors, excpet_verify', [ + ('', '', True), + ('', True, False), + ('', False, True), + (True, '', True), + (True, True, True), + (True, False, True), + (False, '', True), + (False, True, False), + (False, False, True) +]) +def test_slack_ca_certs(ca_certs, ignore_ssl_errors, excpet_verify): rule = { 'name': 'Test Rule', 'type': 'any', 'slack_webhook_url': 'http://please.dontgohere.slack', - 'slack_username_override': 'elastalert', - 'slack_ca_certs': True, 'alert_subject': 'Cool subject', 'alert': [] } + if ca_certs != '': + rule['slack_ca_certs'] = ca_certs + + if ignore_ssl_errors != '': + rule['slack_ignore_ssl_errors'] = ignore_ssl_errors + rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) alert = SlackAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2017-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: @@ -972,7 +951,7 @@ def test_slack_ca_certs(): 'title': rule['alert_subject'], 'text': BasicMatchString(rule, match).__str__(), 'mrkdwn_in': ['text', 'pretext'], - 'fields': [], + 'fields': [] } ], 'text': '', @@ -983,7 +962,7 @@ def test_slack_ca_certs(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=True, + verify=excpet_verify, timeout=10 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1350,3 +1329,69 @@ def test_slack_ea_exception(): alert.alert([match]) except EAException: assert True + + +def test_slack_get_aggregation_summary_text__maximum_width(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_username_override': 'elastalert', + 'slack_msg_pretext': 'pretext value', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + assert 75 == alert.get_aggregation_summary_text__maximum_width() + + +def test_slack_getinfo(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + + expected_data = { + 'type': 'slack', + 'slack_username_override': 'elastalert' + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('slack_webhook_url, expected_data', [ + ('', True), + ('http://please.dontgohere.slack', + { + 'type': 'slack', + 'slack_username_override': 'elastalert' + }), +]) +def test_slack_key_error(slack_webhook_url, expected_data): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'alert': [] + } + + if slack_webhook_url != '': + rule['slack_webhook_url'] = slack_webhook_url + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data diff --git a/tests/alerters/stomp_test.py b/tests/alerters/stomp_test.py new file mode 100644 index 000000000..ebad391c2 --- /dev/null +++ b/tests/alerters/stomp_test.py @@ -0,0 +1,24 @@ +from elastalert.alerters.stomp import StompAlerter +from elastalert.loaders import FileRulesLoader + + +def test_stomp_getinfo(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'stomp_hostname': 'localhost', + 'stomp_hostport': '61613', + 'stomp_login': 'admin', + 'stomp_password': 'admin', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = StompAlerter(rule) + + expected_data = { + 'type': 'stomp' + } + actual_data = alert.get_info() + assert expected_data == actual_data diff --git a/tests/alerters/telegram_test.py b/tests/alerters/telegram_test.py index a7c9b38f6..95a8d02d9 100644 --- a/tests/alerters/telegram_test.py +++ b/tests/alerters/telegram_test.py @@ -125,7 +125,7 @@ def test_telegram_text_maxlength(): def test_telegram_ea_exception(): try: rule = { - 'name': 'Test Telegram Rule' + ('a' * 3985), + 'name': 'Test Telegram Rule', 'type': 'any', 'telegram_bot_token': 'xxxxx1', 'telegram_room_id': 'xxxxx2', @@ -143,3 +143,57 @@ def test_telegram_ea_exception(): alert.alert([match]) except EAException: assert True + + +def test_telegram_getinfo(): + rule = { + 'name': 'Test Telegram Rule', + 'type': 'any', + 'telegram_bot_token': 'xxxxx1', + 'telegram_room_id': 'xxxxx2', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TelegramAlerter(rule) + + expected_data = { + 'type': 'telegram', + 'telegram_room_id': 'xxxxx2' + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('telegram_bot_token, telegram_room_id, expected_data', [ + ('', '', True), + ('xxxxx1', '', True), + ('', 'xxxxx2', True), + ('xxxxx1', 'xxxxx2', + { + 'type': 'telegram', + 'telegram_room_id': 'xxxxx2' + }), +]) +def test_telegram_key_error(telegram_bot_token, telegram_room_id, expected_data): + try: + rule = { + 'name': 'Test Telegram Rule', + 'type': 'any', + 'alert': [] + } + + if telegram_bot_token != '': + rule['telegram_bot_token'] = telegram_bot_token + + if telegram_room_id != '': + rule['telegram_room_id'] = telegram_room_id + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TelegramAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data diff --git a/tests/alerters/thehive_test.py b/tests/alerters/thehive_test.py index 0a61dc32a..d61f4c50b 100644 --- a/tests/alerters/thehive_test.py +++ b/tests/alerters/thehive_test.py @@ -1,7 +1,10 @@ import json import mock +import pytest +from requests import RequestException +from elastalert.util import EAException from elastalert.loaders import FileRulesLoader from elastalert.alerters.thehive import HiveAlerter @@ -87,3 +90,75 @@ def test_thehive_alerter(): del actual_data['sourceRef'] assert expected_data == actual_data + + +def test_thehive_ea_exception(): + try: + rule = {'alert': [], + 'alert_text': '', + 'alert_text_type': 'alert_text_only', + 'description': 'test', + 'hive_alert_config': {'customFields': [{'name': 'test', + 'type': 'string', + 'value': 'test.ip'}], + 'follow': True, + 'severity': 2, + 'source': 'elastalert', + 'status': 'New', + 'tags': ['test.ip'], + 'tlp': 3, + 'type': 'external'}, + 'hive_connection': {'hive_apikey': '', + 'hive_host': 'https://localhost', + 'hive_port': 9000}, + 'hive_observable_data_mapping': [{'ip': 'test.ip'}], + 'name': 'test-thehive', + 'tags': ['a', 'b'], + 'type': 'any'} + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HiveAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + except EAException: + assert True + + +@pytest.mark.parametrize('hive_host, expect', [ + ('https://localhost', {'type': 'hivealerter', 'hive_host': 'https://localhost'}), + ('', {'type': 'hivealerter', 'hive_host': ''}) +]) +def test_thehive_getinfo(hive_host, expect): + rule = {'alert': [], + 'alert_text': '', + 'alert_text_type': 'alert_text_only', + 'description': 'test', + 'hive_alert_config': {'customFields': [{'name': 'test', + 'type': 'string', + 'value': 'test.ip'}], + 'follow': True, + 'severity': 2, + 'source': 'elastalert', + 'status': 'New', + 'tags': ['test.ip'], + 'tlp': 3, + 'type': 'external'}, + 'hive_connection': {'hive_apikey': '', + 'hive_host': hive_host, + 'hive_port': 9000}, + 'hive_observable_data_mapping': [{'ip': 'test.ip'}], + 'name': 'test-thehive', + 'tags': ['a', 'b'], + 'type': 'any'} + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HiveAlerter(rule) + + expected_data = expect + actual_data = alert.get_info() + assert expected_data == actual_data diff --git a/tests/alerters/twilio_test.py b/tests/alerters/twilio_test.py new file mode 100644 index 000000000..8bd04b858 --- /dev/null +++ b/tests/alerters/twilio_test.py @@ -0,0 +1,109 @@ +import pytest + +from elastalert.alerters.twilio import TwilioAlerter +from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException + + +def test_twilio_getinfo(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'twilio_account_sid': 'xxxxx1', + 'twilio_auth_token': 'xxxxx2', + 'twilio_to_number': 'xxxxx3', + 'twilio_from_number': 'xxxxx4', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TwilioAlerter(rule) + + expected_data = { + 'type': 'twilio', + 'twilio_client_name': 'xxxxx4' + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('twilio_account_sid, twilio_auth_token, twilio_to_number, expected_data', [ + ('', '', '', True), + ('xxxx1', '', '', True), + ('', 'xxxx2', '', True), + ('', '', 'INFO', True), + ('xxxx1', 'xxxx2', '', True), + ('xxxx1', '', 'INFO', True), + ('', 'xxxx2', 'INFO', True), + ('xxxx1', 'xxxx2', 'INFO', + { + 'type': 'twilio', + 'twilio_client_name': 'xxxxx4' + }), +]) +def test_twilio_key_error(twilio_account_sid, twilio_auth_token, twilio_to_number, expected_data): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'twilio_from_number': 'xxxxx4', + 'alert': [] + } + + if twilio_account_sid != '': + rule['twilio_account_sid'] = twilio_account_sid + + if twilio_auth_token != '': + rule['twilio_auth_token'] = twilio_auth_token + + if twilio_to_number != '': + rule['twilio_to_number'] = twilio_to_number + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TwilioAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data + + +@pytest.mark.parametrize('twilio_use_copilot, twilio_message_service_sid, twilio_from_number, expected_data', [ + (True, None, 'test', True), + (False, 'test', None, True), +]) +def test_twilio_use_copilot(twilio_use_copilot, twilio_message_service_sid, twilio_from_number, expected_data): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'twilio_account_sid': 'xxxxx1', + 'twilio_auth_token': 'xxxxx2', + 'twilio_to_number': 'xxxxx3', + 'alert': [] + } + + if twilio_use_copilot != '': + rule['twilio_use_copilot'] = twilio_use_copilot + + if twilio_message_service_sid != '': + rule['twilio_message_service_sid'] = twilio_message_service_sid + + if twilio_from_number != '': + rule['twilio_from_number'] = twilio_from_number + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TwilioAlerter(rule) + + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + alert.alert([match]) + except EAException: + assert expected_data diff --git a/tests/alerters/victorops_test.py b/tests/alerters/victorops_test.py index ec7895840..649f9aa32 100644 --- a/tests/alerters/victorops_test.py +++ b/tests/alerters/victorops_test.py @@ -9,7 +9,7 @@ from elastalert.util import EAException -def test_victor_ops(): +def test_victorops(): rule = { 'name': 'Test VictorOps Rule', 'type': 'any', @@ -47,7 +47,7 @@ def test_victor_ops(): assert expected_data == actual_data -def test_victor_ops_proxy(): +def test_victorops_proxy(): rule = { 'name': 'Test VictorOps Rule', 'type': 'any', @@ -86,7 +86,7 @@ def test_victor_ops_proxy(): assert expected_data == actual_data -def test_victor_ops_ea_exception(): +def test_victorops_ea_exception(): try: rule = { 'name': 'Test VictorOps Rule', @@ -110,3 +110,150 @@ def test_victor_ops_ea_exception(): alert.alert([match]) except EAException: assert True + + +def test_victorops_entity_id(): + rule = { + 'name': 'Test VictorOps Rule', + 'type': 'any', + 'victorops_api_key': 'xxxx1', + 'victorops_routing_key': 'xxxx2', + 'victorops_message_type': 'INFO', + 'victorops_entity_display_name': 'no entity display name', + 'victorops_entity_id': '12345', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = VictorOpsAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'message_type': rule['victorops_message_type'], + 'entity_display_name': rule['victorops_entity_display_name'], + 'monitoring_tool': 'ElastAlert', + 'state_message': 'Test VictorOps Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'entity_id': '12345' + } + + mock_post_request.assert_called_once_with( + 'https://alert.victorops.com/integrations/generic/20131114/alert/xxxx1/xxxx2', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +@pytest.mark.parametrize('message_type, except_message_type', [ + ('INFO', 'INFO'), + ('WARNING', 'WARNING'), + ('ACKNOWLEDGEMENT', 'ACKNOWLEDGEMENT'), + ('CRITICAL', 'CRITICAL'), + ('RECOVERY', 'RECOVERY') +]) +def test_victorops_message_type(message_type, except_message_type): + rule = { + 'name': 'Test VictorOps Rule', + 'type': 'any', + 'victorops_api_key': 'xxxx1', + 'victorops_routing_key': 'xxxx2', + 'victorops_message_type': message_type, + 'victorops_entity_display_name': 'no entity display name', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = VictorOpsAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'message_type': except_message_type, + 'entity_display_name': rule['victorops_entity_display_name'], + 'monitoring_tool': 'ElastAlert', + 'state_message': 'Test VictorOps Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + + mock_post_request.assert_called_once_with( + 'https://alert.victorops.com/integrations/generic/20131114/alert/xxxx1/xxxx2', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_victorops_getinfo(): + rule = { + 'name': 'Test VictorOps Rule', + 'type': 'any', + 'victorops_api_key': 'xxxx1', + 'victorops_routing_key': 'xxxx2', + 'victorops_message_type': 'INFO', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = VictorOpsAlerter(rule) + + expected_data = { + 'type': 'victorops', + 'victorops_routing_key': 'xxxx2' + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('victorops_api_key, victorops_routing_key, victorops_message_type, expected_data', [ + ('', '', '', True), + ('xxxx1', '', '', True), + ('', 'xxxx2', '', True), + ('', '', 'INFO', True), + ('xxxx1', 'xxxx2', '', True), + ('xxxx1', '', 'INFO', True), + ('', 'xxxx2', 'INFO', True), + ('xxxx1', 'xxxx2', 'INFO', + { + 'type': 'victorops', + 'victorops_routing_key': 'xxxx2' + }), +]) +def test_victoropst_key_error(victorops_api_key, victorops_routing_key, victorops_message_type, expected_data): + try: + rule = { + 'name': 'Test VictorOps Rule', + 'type': 'any', + 'alert': [] + } + + if victorops_api_key != '': + rule['victorops_api_key'] = victorops_api_key + + if victorops_routing_key != '': + rule['victorops_routing_key'] = victorops_routing_key + + if victorops_message_type != '': + rule['victorops_message_type'] = victorops_message_type + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = VictorOpsAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except KeyError: + assert expected_data From c6291900066ee3905a80aeb41b98ee0da1a130a5 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 4 Jun 2021 22:03:37 +0900 Subject: [PATCH 0278/1065] Bump pytest-cov from 2.12.0 to 2.12.1 --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 09da6ae27..d7b7b09f9 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,6 +1,6 @@ -r requirements.txt docutils<0.17 -pytest-cov==2.12.0 +pytest-cov==2.12.1 flake8 flake8-absolute-import m2r2 From b305a8dd40368c7f501873953a78b2e78fd5be2a Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 4 Jun 2021 23:12:50 +0900 Subject: [PATCH 0279/1065] Restore the mistaken correction --- elastalert/alerters/ses.py | 6 +++--- elastalert/alerters/sns.py | 4 ++-- tests/alerters/ses_test.py | 9 +++++---- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/elastalert/alerters/ses.py b/elastalert/alerters/ses.py index fdb118814..65a7501dc 100644 --- a/elastalert/alerters/ses.py +++ b/elastalert/alerters/ses.py @@ -11,12 +11,12 @@ class SesAlerter(Alerter): def __init__(self, *args): super(SesAlerter, self).__init__(*args) - self.aws_access_key_id = self.rule['ses_aws_access_key_id'] - self.aws_secret_access_key = self.rule['ses_aws_secret_access_key'] + self.aws_access_key_id = self.rule.get('ses_aws_access_key_id') + self.aws_secret_access_key = self.rule.get('ses_aws_secret_access_key') self.aws_region = self.rule.get('ses_aws_region', 'us-east-1') self.aws_profile = self.rule.get('ses_aws_profile', '') - self.from_addr = self.rule.get('ses_from_addr') + self.from_addr = self.rule['ses_from_addr'] # Convert email to a list if it isn't already if isinstance(self.rule['ses_email'], str): diff --git a/elastalert/alerters/sns.py b/elastalert/alerters/sns.py index f3b96fbaf..2dde4a619 100644 --- a/elastalert/alerters/sns.py +++ b/elastalert/alerters/sns.py @@ -11,8 +11,8 @@ class SnsAlerter(Alerter): def __init__(self, *args): super(SnsAlerter, self).__init__(*args) self.sns_topic_arn = self.rule['sns_topic_arn'] - self.sns_aws_access_key_id = self.rule['sns_aws_access_key_id'] - self.sns_aws_secret_access_key = self.rule['sns_aws_secret_access_key'] + self.sns_aws_access_key_id = self.rule.get('sns_aws_access_key_id') + self.sns_aws_secret_access_key = self.rule.get('sns_aws_secret_access_key') self.sns_aws_region = self.rule.get('sns_aws_region', 'us-east-1') self.profile = self.rule.get('sns_aws_profile', None) diff --git a/tests/alerters/ses_test.py b/tests/alerters/ses_test.py index b1a6b93f7..1a8ad3b9e 100644 --- a/tests/alerters/ses_test.py +++ b/tests/alerters/ses_test.py @@ -9,6 +9,7 @@ def test_ses_getinfo(): 'name': 'Test Rule', 'type': 'any', 'alert_subject': 'Cool subject', + 'ses_from_addr': 'test2@aaa.com', 'ses_email': 'test@aaa.com', 'ses_aws_access_key_id': 'access key id', 'ses_aws_secret_access_key': 'secret access key', @@ -27,10 +28,10 @@ def test_ses_getinfo(): @pytest.mark.parametrize('ses_email, ses_from_addr, expected_data', [ - ('', '', True), - ('test@aaa.com', '', True), - ('', 'xxxxx2', True), - ('test@aaa.com', 'xxxxx2', + ('', '', True), + ('test@aaa.com', '', True), + ('', 'test2@aaa.com', True), + ('test@aaa.com', 'test2@aaa.com', { 'type': 'ses', 'recipients': ['test@aaa.com'] From bdb83605edcc163994e0392502a6bd07b654e607 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Fri, 4 Jun 2021 10:59:16 -0400 Subject: [PATCH 0280/1065] Synchronize CHANGELOG with all PRs through 6/4/2021 11am EDT --- CHANGELOG.md | 30 ++++++++++++++++++++++++++---- 1 file changed, 26 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cf3e0ed19..db4660351 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,12 +15,34 @@ - None ## New features -- Expose rule scheduler properties as configurable settings - [#192](https://github.com/jertel/elastalert2/pull/192) - #jertel +- Add support for RocketChat - [#182](https://github.com/jertel/elastalert2/pull/182) - @nsano-rururu +- Expose rule scheduler properties as configurable settings - [#192](https://github.com/jertel/elastalert2/pull/192) - @jertel +- Exclude empty observables from TheHive requests - [#193](https://github.com/jertel/elastalert2/pull/193) - @LaZyDK +- Ensure TheHive tags are converted to strings before submitting TheHive request - [#206](https://github.com/jertel/elastalert2/pull/206) - @LaZyDK +- Add support for Elasticsearch API key authentication - [#208](https://github.com/jertel/elastalert2/pull/208) - @vbisserie +- Add support for Elasticsearch 7.13 for building Kibana Discover URLs - [#212](https://github.com/jertel/elastalert2/pull/212) - @nsano-rururu +- Follow symbolic links when traversing rules folder for rule files - [#214](https://github.com/jertel/elastalert2/pull/214) - @vbisserie +- Support optional suppression of SSL log warnings when http-posting alerts - [#222](https://github.com/jertel/elastalert2/pull/222/files) - @nsano-rururu ## Other changes -- Speed up unit tests by adding default parallelism - [164](https://github.com/jertel/elastalert2/pull/164) - @ferozsalam -- Remove unused writeback_alias and fix --patience argument [167](https://github.com/jertel/elastalert2/pull/167) - @mrfroggg. -- Fix Bearer token auth in initialisation script - [169](https://github.com/jertel/elastalert2/pull/169) - @ferozsalam +- Speed up unit tests by adding default parallelism - [#164](https://github.com/jertel/elastalert2/pull/164) - @ferozsalam +- Remove unused writeback_alias and fix --patience argument - [#167](https://github.com/jertel/elastalert2/pull/167) - @mrfroggg +- Fix Bearer token auth in initialisation script - [#169](https://github.com/jertel/elastalert2/pull/169) - @ferozsalam +- Finish refactoring alerters and tests into individual files - [#175, et al](https://github.com/jertel/elastalert2/pull/175) - @ferozsalam +- Improve HTTP POST alert documentation - [#178](https://github.com/jertel/elastalert2/pull/178) - @nsano-rururu +- Upgrade Sphinx from 3.5.4 to 4.0.2 - [#179](https://github.com/jertel/elastalert2/pull/179) - @nsano-rururu +- Fix Sphinx dependency version - [#181](https://github.com/jertel/elastalert2/pull/181) - @ferozsalam +- Switch to absolute imports - [#198](https://github.com/jertel/elastalert2/pull/198) - @ferozsalam +- Encode JSON output before writing test data - [#215](https://github.com/jertel/elastalert2/pull/215) - @vbisserie +- Update pytest from 6.0.0 to 6.2.4 - [#223](https://github.com/jertel/elastalert2/pull/223/files) - @nsano-rururu +- Ensure ChatWork alerter fails to initialize if missing required args - [#224](https://github.com/jertel/elastalert2/pull/224) - @nsano-rururu +- Ensure DataDog alerter fails to initialize if missing required args - [#225](https://github.com/jertel/elastalert2/pull/225) - @nsano-rururu +- Ensure DingTalk alerter fails to initialize if missing required args - [#226](https://github.com/jertel/elastalert2/pull/226) - @nsano-rururu +- Ensure Zabbix alerter fails to initialize if missing required args - [#227](https://github.com/jertel/elastalert2/pull/227) - @nsano-rururu +- MS Teams alerter no longer requires ms_teams_alert_summary arg - [#228](https://github.com/jertel/elastalert2/pull/228) - @nsano-rururu +- Improve Gitter alerter by explicitly specifying arg names - [#230](https://github.com/jertel/elastalert2/pull/230) - @nsano-rururu +- Add more alerter test code coverage - [#231](https://github.com/jertel/elastalert2/pull/231) - @nsano-rururu +- Upgrade pytest-cov from 2.12.0 to 2.12.1 - [#232](https://github.com/jertel/elastalert2/pull/232) - @nsano-rururu # 2.1.0 From 0aeefbddae707469b8773cf0f99b1d8ff23c0e02 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 5 Jun 2021 00:04:57 +0900 Subject: [PATCH 0281/1065] change mock to build in unittest.mock --- elastalert/test_rule.py | 2 +- requirements.txt | 1 - setup.py | 1 - tests/alerters/alerta_test.py | 2 +- tests/alerters/chatwork_test.py | 2 +- tests/alerters/command_test.py | 2 +- tests/alerters/datadog_test.py | 2 +- tests/alerters/dingtalk_test.py | 2 +- tests/alerters/discord_test.py | 2 +- tests/alerters/email_test.py | 2 +- tests/alerters/gitter_test.py | 2 +- tests/alerters/googlechat_test.py | 2 +- tests/alerters/httppost_test.py | 2 +- tests/alerters/jira_test.py | 2 +- tests/alerters/line_test.py | 2 +- tests/alerters/mattermost_test.py | 2 +- tests/alerters/opsgenie_test.py | 2 +- tests/alerters/pagerduty_test.py | 2 +- tests/alerters/pagertree_test.py | 2 +- tests/alerters/rocketchat_test.py | 2 +- tests/alerters/servicenow_test.py | 2 +- tests/alerters/slack_test.py | 2 +- tests/alerters/teams_test.py | 2 +- tests/alerters/telegram_test.py | 2 +- tests/alerters/thehive_test.py | 2 +- tests/alerters/victorops_test.py | 2 +- tests/alerters/zabbix_test.py | 2 +- tests/alerts_test.py | 2 +- tests/base_test.py | 2 +- tests/config_test.py | 2 +- tests/conftest.py | 2 +- tests/loaders_test.py | 2 +- tests/rules_test.py | 2 +- tests/util_test.py | 2 +- 34 files changed, 32 insertions(+), 34 deletions(-) diff --git a/elastalert/test_rule.py b/elastalert/test_rule.py index 041fa04e7..15ddbfdd4 100644 --- a/elastalert/test_rule.py +++ b/elastalert/test_rule.py @@ -10,7 +10,7 @@ import string import sys -import mock +from unittest import mock from elastalert.config import load_conf from elastalert.elastalert import ElastAlerter diff --git a/requirements.txt b/requirements.txt index c6ad77062..797f19871 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,7 +10,6 @@ exotel>=0.1.3 Jinja2==2.11.3 jira>=2.0.0 jsonschema>=3.0.2 -mock>=2.0.0 prison>=0.1.2 prometheus_client>=0.10.1 py-zabbix>=1.1.3 diff --git a/setup.py b/setup.py index da89708c9..67052553b 100644 --- a/setup.py +++ b/setup.py @@ -36,7 +36,6 @@ 'jira>=2.0.0', 'Jinja2==2.11.3', 'jsonschema>=3.0.2', - 'mock>=2.0.0', 'prison>=0.1.2', 'prometheus_client>=0.10.1', 'py-zabbix>=1.1.3', diff --git a/tests/alerters/alerta_test.py b/tests/alerters/alerta_test.py index a9d827797..dc248d145 100644 --- a/tests/alerters/alerta_test.py +++ b/tests/alerters/alerta_test.py @@ -1,7 +1,7 @@ import datetime import json -import mock +from unittest import mock import pytest from requests import RequestException diff --git a/tests/alerters/chatwork_test.py b/tests/alerters/chatwork_test.py index bc81ae34a..3f9f42b48 100644 --- a/tests/alerters/chatwork_test.py +++ b/tests/alerters/chatwork_test.py @@ -1,4 +1,4 @@ -import mock +from unittest import mock import pytest from requests import RequestException from requests.auth import HTTPProxyAuth diff --git a/tests/alerters/command_test.py b/tests/alerters/command_test.py index 73efb7344..8ea408fdb 100644 --- a/tests/alerters/command_test.py +++ b/tests/alerters/command_test.py @@ -1,7 +1,7 @@ import json import subprocess -import mock +from unittest import mock import pytest from elastalert.alerters.command import CommandAlerter diff --git a/tests/alerters/datadog_test.py b/tests/alerters/datadog_test.py index 74b7c6547..2d030be01 100644 --- a/tests/alerters/datadog_test.py +++ b/tests/alerters/datadog_test.py @@ -1,6 +1,6 @@ import json -import mock +from unittest import mock import pytest from requests import RequestException diff --git a/tests/alerters/dingtalk_test.py b/tests/alerters/dingtalk_test.py index 5a4a59ae1..02fcf8b04 100644 --- a/tests/alerters/dingtalk_test.py +++ b/tests/alerters/dingtalk_test.py @@ -1,6 +1,6 @@ import json -import mock +from unittest import mock import pytest from requests import RequestException from requests.auth import HTTPProxyAuth diff --git a/tests/alerters/discord_test.py b/tests/alerters/discord_test.py index 693ff9be0..aae0598d4 100644 --- a/tests/alerters/discord_test.py +++ b/tests/alerters/discord_test.py @@ -1,6 +1,6 @@ import json -import mock +from unittest import mock import pytest from requests import RequestException from requests.auth import HTTPProxyAuth diff --git a/tests/alerters/email_test.py b/tests/alerters/email_test.py index 6862cb6a4..85a643cbb 100644 --- a/tests/alerters/email_test.py +++ b/tests/alerters/email_test.py @@ -1,6 +1,6 @@ import base64 -import mock +from unittest import mock import pytest from elastalert.alerters.email import EmailAlerter diff --git a/tests/alerters/gitter_test.py b/tests/alerters/gitter_test.py index 696e88599..ee61538fa 100644 --- a/tests/alerters/gitter_test.py +++ b/tests/alerters/gitter_test.py @@ -1,6 +1,6 @@ import json -import mock +from unittest import mock import pytest from requests import RequestException diff --git a/tests/alerters/googlechat_test.py b/tests/alerters/googlechat_test.py index b60317e40..0af9ce7e3 100644 --- a/tests/alerters/googlechat_test.py +++ b/tests/alerters/googlechat_test.py @@ -1,6 +1,6 @@ import json -import mock +from unittest import mock import pytest from requests import RequestException diff --git a/tests/alerters/httppost_test.py b/tests/alerters/httppost_test.py index e84f45fef..428f55bf5 100644 --- a/tests/alerters/httppost_test.py +++ b/tests/alerters/httppost_test.py @@ -1,6 +1,6 @@ import json -import mock +from unittest import mock import pytest from requests import RequestException diff --git a/tests/alerters/jira_test.py b/tests/alerters/jira_test.py index f7f3a807e..b38472de3 100644 --- a/tests/alerters/jira_test.py +++ b/tests/alerters/jira_test.py @@ -1,6 +1,6 @@ import datetime -import mock +from unittest import mock import pytest from jira import JIRAError diff --git a/tests/alerters/line_test.py b/tests/alerters/line_test.py index 31340eb23..f647af26a 100644 --- a/tests/alerters/line_test.py +++ b/tests/alerters/line_test.py @@ -1,4 +1,4 @@ -import mock +from unittest import mock import pytest from requests import RequestException diff --git a/tests/alerters/mattermost_test.py b/tests/alerters/mattermost_test.py index 1c99e45fc..b2df662d4 100644 --- a/tests/alerters/mattermost_test.py +++ b/tests/alerters/mattermost_test.py @@ -1,6 +1,6 @@ import json -import mock +from unittest import mock import pytest from requests import RequestException diff --git a/tests/alerters/opsgenie_test.py b/tests/alerters/opsgenie_test.py index 8e0c8ad77..0547d7e61 100644 --- a/tests/alerters/opsgenie_test.py +++ b/tests/alerters/opsgenie_test.py @@ -1,4 +1,4 @@ -import mock +from unittest import mock import pytest from requests import RequestException diff --git a/tests/alerters/pagerduty_test.py b/tests/alerters/pagerduty_test.py index e85333078..82f824d7d 100644 --- a/tests/alerters/pagerduty_test.py +++ b/tests/alerters/pagerduty_test.py @@ -1,6 +1,6 @@ import json -import mock +from unittest import mock import pytest from requests import RequestException diff --git a/tests/alerters/pagertree_test.py b/tests/alerters/pagertree_test.py index 1d61169ad..308994a43 100644 --- a/tests/alerters/pagertree_test.py +++ b/tests/alerters/pagertree_test.py @@ -2,7 +2,7 @@ import re import uuid -import mock +from unittest import mock import pytest from requests import RequestException diff --git a/tests/alerters/rocketchat_test.py b/tests/alerters/rocketchat_test.py index ea20fb5a0..6cd5ef95a 100644 --- a/tests/alerters/rocketchat_test.py +++ b/tests/alerters/rocketchat_test.py @@ -1,6 +1,6 @@ import json -import mock +from unittest import mock import pytest from requests import RequestException diff --git a/tests/alerters/servicenow_test.py b/tests/alerters/servicenow_test.py index 2869bf656..065697009 100644 --- a/tests/alerters/servicenow_test.py +++ b/tests/alerters/servicenow_test.py @@ -1,6 +1,6 @@ import json -import mock +from unittest import mock import pytest from requests import RequestException diff --git a/tests/alerters/slack_test.py b/tests/alerters/slack_test.py index 7b660310d..084688d36 100644 --- a/tests/alerters/slack_test.py +++ b/tests/alerters/slack_test.py @@ -1,6 +1,6 @@ import json -import mock +from unittest import mock import pytest from requests import RequestException diff --git a/tests/alerters/teams_test.py b/tests/alerters/teams_test.py index 72970427b..b0f0c3c46 100644 --- a/tests/alerters/teams_test.py +++ b/tests/alerters/teams_test.py @@ -1,6 +1,6 @@ import json -import mock +from unittest import mock import pytest from requests import RequestException diff --git a/tests/alerters/telegram_test.py b/tests/alerters/telegram_test.py index 95a8d02d9..bd97d4e2d 100644 --- a/tests/alerters/telegram_test.py +++ b/tests/alerters/telegram_test.py @@ -1,6 +1,6 @@ import json -import mock +from unittest import mock import pytest from requests import RequestException from requests.auth import HTTPProxyAuth diff --git a/tests/alerters/thehive_test.py b/tests/alerters/thehive_test.py index d61f4c50b..40458d3f4 100644 --- a/tests/alerters/thehive_test.py +++ b/tests/alerters/thehive_test.py @@ -1,6 +1,6 @@ import json -import mock +from unittest import mock import pytest from requests import RequestException diff --git a/tests/alerters/victorops_test.py b/tests/alerters/victorops_test.py index 649f9aa32..88c1051e7 100644 --- a/tests/alerters/victorops_test.py +++ b/tests/alerters/victorops_test.py @@ -1,6 +1,6 @@ import json -import mock +from unittest import mock import pytest from requests import RequestException diff --git a/tests/alerters/zabbix_test.py b/tests/alerters/zabbix_test.py index 1cee2ec75..0a1d2787e 100644 --- a/tests/alerters/zabbix_test.py +++ b/tests/alerters/zabbix_test.py @@ -1,5 +1,5 @@ import pytest -import mock +from unittest import mock from elastalert.alerters.zabbix import ZabbixAlerter from elastalert.loaders import FileRulesLoader diff --git a/tests/alerts_test.py b/tests/alerts_test.py index fab081b8e..6bcbf6163 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -2,7 +2,7 @@ import datetime import json -import mock +from unittest import mock from elastalert.alerts import Alerter from elastalert.alerts import BasicMatchString diff --git a/tests/base_test.py b/tests/base_test.py index 3ff375118..ff17e3ce1 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -5,7 +5,7 @@ import threading import elasticsearch -import mock +from unittest import mock import pytest from elasticsearch.exceptions import ConnectionError from elasticsearch.exceptions import ElasticsearchException diff --git a/tests/config_test.py b/tests/config_test.py index 6c03f185e..36266b91a 100644 --- a/tests/config_test.py +++ b/tests/config_test.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- import os -import mock +from unittest import mock import datetime from elastalert.config import load_conf diff --git a/tests/conftest.py b/tests/conftest.py index 0bb0b5325..07aef53eb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,7 +3,7 @@ import logging import os -import mock +from unittest import mock import pytest import elastalert.elastalert diff --git a/tests/loaders_test.py b/tests/loaders_test.py index 1bd570659..262b01253 100644 --- a/tests/loaders_test.py +++ b/tests/loaders_test.py @@ -3,7 +3,7 @@ import datetime import os -import mock +from unittest import mock import pytest import elastalert.alerts diff --git a/tests/rules_test.py b/tests/rules_test.py index 5f6421d87..e9b275089 100644 --- a/tests/rules_test.py +++ b/tests/rules_test.py @@ -2,7 +2,7 @@ import copy import datetime -import mock +from unittest import mock import pytest from elastalert.ruletypes import AnyRule diff --git a/tests/util_test.py b/tests/util_test.py index 2a24446d3..893278b06 100644 --- a/tests/util_test.py +++ b/tests/util_test.py @@ -2,7 +2,7 @@ from datetime import datetime from datetime import timedelta -import mock +from unittest import mock import pytest from dateutil.parser import parse as dt From 7af64f6f12868657ab6ba882539f39953812356e Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Fri, 4 Jun 2021 12:56:24 -0400 Subject: [PATCH 0282/1065] Updated changelog with new PR --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index db4660351..9ec008969 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -43,6 +43,7 @@ - Improve Gitter alerter by explicitly specifying arg names - [#230](https://github.com/jertel/elastalert2/pull/230) - @nsano-rururu - Add more alerter test code coverage - [#231](https://github.com/jertel/elastalert2/pull/231) - @nsano-rururu - Upgrade pytest-cov from 2.12.0 to 2.12.1 - [#232](https://github.com/jertel/elastalert2/pull/232) - @nsano-rururu +- Migrate away from external test mock dependency - [#233](https://github.com/jertel/elastalert2/pull/233) - @nsano-rururu # 2.1.0 From 801885acd9ac4449d3198a48599a0e9826ae3806 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sat, 5 Jun 2021 08:06:14 +0100 Subject: [PATCH 0283/1065] Refactor running instructions Centralise all the documentation on how to run ElastAlert in one page. - Move the ElastAlert invocation arguments to the 'Running ElastAlert' page - Move the Docker and Helm chart instructions to the 'Running ElastAlert' page - Tidy the 'Running ElastAlert' page --- README.md | 28 +-- docs/source/elastalert.rst | 47 ----- docs/source/running_elastalert.rst | 271 ++++++++++++++++++++++++----- 3 files changed, 230 insertions(+), 116 deletions(-) diff --git a/README.md b/README.md index de2856600..cd9971afc 100644 --- a/README.md +++ b/README.md @@ -17,27 +17,12 @@ The full list of platforms that ElastAlert 2 can fire alerts into can be found [ Please see our [contributing guidelines][6]. -## Docker +## Docker and Kubernetes -If you're interested in a pre-built Docker image check out the [elastalert2][2] project on Docker Hub. - -Be aware that the `latest` tag of the image represents the latest commit into the master branch. If you prefer to upgrade more slowly you will need utilize a versioned tag, such as `2.1.0` instead, or `2` if you are comfortable with always using the latest released version of ElastAlert 2. - -A properly configured config.yaml file must be mounted into the container during startup of the container. Use the [example file][1] provided as a template, and once saved locally to a file such as `/tmp/elastalert.yaml`, run the container as follows: - -```bash -docker run -d -v /tmp/elastalert.yaml:/opt/elastalert/config.yaml jertel/elastalert2 -``` - -To build the image locally run the following command: - -```bash -docker build . -t elastalert2 -``` - -## Kubernetes - -See the Helm chart [README.md][7] for information on installing this application into an existing Kubernetes cluster. +ElastAlert 2 is well-suited to being run as a microservice, and is available +as a [Docker container][2]. A [Helm chart ][7] is also maintained for easy +configuration as a Kubernetes deployment. For more instructions on how to +configure and run ElastAlert 2 in this way, see [here][8]. ## License @@ -50,4 +35,5 @@ ElastAlert 2 is licensed under the [Apache License, Version 2.0][5]. [4]: https://elastalert2.readthedocs.io/en/latest/ruletypes.html#alerts [5]: https://www.apache.org/licenses/LICENSE-2.0 [6]: https://github.com/jertel/elastalert2/blob/master/CONTRIBUTING.md -[7]: https://github.com/jertel/elastalert2/tree/master/chart/elastalert2 \ No newline at end of file +[7]: https://github.com/jertel/elastalert2/tree/master/chart/elastalert2 +[8]: https://elastalert2.readthedocs.io/en/latest/running_elastalert.html diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 625ae2296..bb942be16 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -244,50 +244,3 @@ adjust the logging format. For details, see the end of ``config.yaml.example`` where you can find an example logging configuration. - - -.. _runningelastalert: - -Running ElastAlert -================== - -``$ python elastalert/elastalert.py`` - -Several arguments are available when running ElastAlert: - -``--config`` will specify the configuration file to use. The default is ``config.yaml``. - -``--debug`` will run ElastAlert in debug mode. This will increase the logging verboseness, change -all alerts to ``DebugAlerter``, which prints alerts and suppresses their normal action, and skips writing -search and alert metadata back to Elasticsearch. Not compatible with `--verbose`. - -``--verbose`` will increase the logging verboseness, which allows you to see information about the state -of queries. Not compatible with `--debug`. - -``--start `` will force ElastAlert to begin querying from the given time, instead of the default, -querying from the present. The timestamp should be ISO8601, e.g. ``YYYY-MM-DDTHH:MM:SS`` (UTC) or with timezone -``YYYY-MM-DDTHH:MM:SS-08:00`` (PST). Note that if querying over a large date range, no alerts will be -sent until that rule has finished querying over the entire time period. To force querying from the current time, use "NOW". - -``--end `` will cause ElastAlert to stop querying at the specified timestamp. By default, ElastAlert -will periodically query until the present indefinitely. - -``--rule `` will only run the given rule. The rule file may be a complete file path or a filename in ``rules_folder`` -or its subdirectories. - -``--silence =`` will silence the alerts for a given rule for a period of time. The rule must be specified using -``--rule``. is one of days, weeks, hours, minutes or seconds. is an integer. For example, -``--rule noisy_rule.yaml --silence hours=4`` will stop noisy_rule from generating any alerts for 4 hours. - -``--es_debug`` will enable logging for all queries made to Elasticsearch. - -``--es_debug_trace `` will enable logging curl commands for all queries made to Elasticsearch to the -specified log file. ``--es_debug_trace`` is passed through to `elasticsearch.py -`_ which logs `localhost:9200` -instead of the actual ``es_host``:``es_port``. - -``--end `` will force ElastAlert to stop querying after the given time, instead of the default, -querying to the present time. This really only makes sense when running standalone. The timestamp is formatted -as ``YYYY-MM-DDTHH:MM:SS`` (UTC) or with timezone ``YYYY-MM-DDTHH:MM:SS-XX:00`` (UTC-XX). - -``--pin_rules`` will stop ElastAlert from loading, reloading or removing rules based on changes to their config files. diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index 268abfccf..f20154063 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -1,7 +1,114 @@ .. _tutorial: -Running ElastAlert for the First Time -===================================== +Running ElastAlert 2 +******************** + +ElastAlert 2 can easily be run as :ref:`a Docker container` +or directly on your machine as :ref:`a Python package`. +If you are not interested in modifying the internals of ElastAlert, the Docker +container is recommended for ease of use. + +Configuration flags +=================== + +However you choose to run ElastAlert, the ElastAlert process is started by invoking +``python elastalert/elastalert.py``. + +This command accepts several configuration flags: + +``--config`` will specify the configuration file to use. The default is +``config.yaml``. + +``--debug`` will run ElastAlert in debug mode. This will increase the logging +verboseness, change all alerts to ``DebugAlerter``, which prints alerts and +suppresses their normal action, and skips writing search and alert metadata back +to Elasticsearch. Not compatible with `--verbose`. + +``--verbose`` will increase the logging verboseness, which allows you to see +information about the state of queries. Not compatible with `--debug`. + +``--start `` will force ElastAlert to begin querying from the given +time, instead of the default, querying from the present. The timestamp should be +ISO8601, e.g. ``YYYY-MM-DDTHH:MM:SS`` (UTC) or with timezone +``YYYY-MM-DDTHH:MM:SS-08:00`` (PST). Note that if querying over a large date +range, no alerts will be sent until that rule has finished querying over the +entire time period. To force querying from the current time, use "NOW". + +``--end `` will cause ElastAlert to stop querying at the specified +timestamp. By default, ElastAlert will periodically query until the present +indefinitely. + +``--rule `` will only run the given rule. The rule file may be a +complete file path or a filename in ``rules_folder`` or its subdirectories. + +``--silence =`` will silence the alerts for a given rule for a +period of time. The rule must be specified using ``--rule``. is one of +days, weeks, hours, minutes or seconds. is an integer. For example, +``--rule noisy_rule.yaml --silence hours=4`` will stop noisy_rule from +generating any alerts for 4 hours. + +``--es_debug`` will enable logging for all queries made to Elasticsearch. + +``--es_debug_trace `` will enable logging curl commands for all +queries made to Elasticsearch to the specified log file. ``--es_debug_trace`` is +passed through to `elasticsearch.py +`_ which +logs `localhost:9200` instead of the actual ``es_host``:``es_port``. + +``--end `` will force ElastAlert to stop querying after the given +time, instead of the default, querying to the present time. This really only +makes sense when running standalone. The timestamp is formatted as +``YYYY-MM-DDTHH:MM:SS`` (UTC) or with timezone ``YYYY-MM-DDTHH:MM:SS-XX:00`` +(UTC-XX). + +``--pin_rules`` will stop ElastAlert from loading, reloading or removing rules +based on changes to their config files. + +.. _docker-instructions: + +As a Docker container +===================== + +If you're interested in a pre-built Docker image check out the +`elastalert2 +`_ project on Docker Hub. + +Be aware that the ``latest`` tag of the image represents the latest commit into +the master branch. If you prefer to upgrade more slowly you will need utilize a +versioned tag, such as ``2.1.0`` instead, or ``2`` if you are comfortable with +always using the latest released version of ElastAlert 2. + +A properly configured config.yaml file must be mounted into the container during +startup of the container. Use the `example file +`_ +provided as a template, and once saved locally to a file such as +``/tmp/elastalert.yaml``, run the container as follows: + +.. code-block:: + + docker run -d -v /tmp/elastalert.yaml:/opt/elastalert/config.yaml jertel/elastalert2 + +To build the image locally run the following command: + +.. code-block:: + + docker build . -t elastalert2 + +.. _kubernetes-instructions: + +As a Kubernetes deployment +========================== + +The Docker container for ElastAlert 2 can be used directly as a Kubernetes +deployment, but for convenience, a Helm chart is also available. See the +instructions provided `on Github +`_ for more information on +how to install, configure, and run the chart. + +.. _python-instructions: + +As a Python package +=================== Requirements ------------ @@ -31,45 +138,57 @@ Install the module:: $ pip install "setuptools>=11.3" $ python setup.py install -Depending on the version of Elasticsearch, you may need to manually install the correct version of elasticsearch-py. +Depending on the version of Elasticsearch, you may need to manually install the +correct version of elasticsearch-py. Elasticsearch 5.0+:: $ pip install "elasticsearch>=5.0.0" -Elasticsearch 2.X:: - - $ pip install "elasticsearch<3.0.0" - -Next, open up config.yaml.example. In it, you will find several configuration options. ElastAlert may be run without changing any of these settings. +Next, open up config.yaml.example. In it, you will find several configuration +options. ElastAlert may be run without changing any of these settings. -``rules_folder`` is where ElastAlert will load rule configuration files from. It will attempt to load every .yaml file in the folder. Without any valid rules, ElastAlert will not start. ElastAlert will also load new rules, stop running missing rules, and restart modified rules as the files in this folder change. For this tutorial, we will use the example_rules folder. +``rules_folder`` is where ElastAlert will load rule configuration files from. It +will attempt to load every .yaml file in the folder. Without any valid rules, +ElastAlert will not start. ElastAlert will also load new rules, stop running +missing rules, and restart modified rules as the files in this folder change. +For this tutorial, we will use the example_rules folder. ``run_every`` is how often ElastAlert will query Elasticsearch. -``buffer_time`` is the size of the query window, stretching backwards from the time each query is run. This value is ignored for rules where ``use_count_query`` or ``use_terms_query`` is set to true. +``buffer_time`` is the size of the query window, stretching backwards from the +time each query is run. This value is ignored for rules where +``use_count_query`` or ``use_terms_query`` is set to true. -``es_host`` is the address of an Elasticsearch cluster where ElastAlert will store data about its state, queries run, alerts, and errors. Each rule may also use a different Elasticsearch host to query against. +``es_host`` is the address of an Elasticsearch cluster where ElastAlert will +store data about its state, queries run, alerts, and errors. Each rule may also +use a different Elasticsearch host to query against. ``es_port`` is the port corresponding to ``es_host``. -``use_ssl``: Optional; whether or not to connect to ``es_host`` using TLS; set to ``True`` or ``False``. +``use_ssl``: Optional; whether or not to connect to ``es_host`` using TLS; set +to ``True`` or ``False``. -``verify_certs``: Optional; whether or not to verify TLS certificates; set to ``True`` or ``False``. The default is ``True`` +``verify_certs``: Optional; whether or not to verify TLS certificates; set to +``True`` or ``False``. The default is ``True`` -``ssl_show_warn``: Optional; suppress TLS and certificate related warnings; set to ``True`` or ``False``. The default is ``True``. +``ssl_show_warn``: Optional; suppress TLS and certificate related warnings; set +to ``True`` or ``False``. The default is ``True``. -``client_cert``: Optional; path to a PEM certificate to use as the client certificate +``client_cert``: Optional; path to a PEM certificate to use as the client +certificate ``client_key``: Optional; path to a private key file to use as the client key -``ca_certs``: Optional; path to a CA cert bundle to use to verify SSL connections +``ca_certs``: Optional; path to a CA cert bundle to use to verify SSL +connections ``es_username``: Optional; basic-auth username for connecting to ``es_host``. ``es_password``: Optional; basic-auth password for connecting to ``es_host``. -``es_bearer``: Optional; bearer token authorization for connecting to ``es_host``. If bearer token is specified, login and password are ignored. +``es_bearer``: Optional; bearer token authorization for connecting to +``es_host``. If bearer token is specified, login and password are ignored. ``es_url_prefix``: Optional; URL prefix for the Elasticsearch endpoint. @@ -77,9 +196,11 @@ Next, open up config.yaml.example. In it, you will find several configuration op ``statsd_host``: Optional; statsd host. -``es_send_get_body_as``: Optional; Method for querying Elasticsearch - ``GET``, ``POST`` or ``source``. The default is ``GET`` +``es_send_get_body_as``: Optional; Method for querying Elasticsearch - ``GET``, +``POST`` or ``source``. The default is ``GET`` -``writeback_index`` is the name of the index in which ElastAlert will store data. We will create this index later. +``writeback_index`` is the name of the index in which ElastAlert will store +data. We will create this index later. ``alert_time_limit`` is the retry window for failed alerts. @@ -88,9 +209,13 @@ Save the file as ``config.yaml`` Setting Up Elasticsearch ------------------------ -ElastAlert saves information and metadata about its queries and its alerts back to Elasticsearch. This is useful for auditing, debugging, and it allows ElastAlert to restart and resume exactly where it left off. This is not required for ElastAlert to run, but highly recommended. +ElastAlert saves information and metadata about its queries and its alerts back +to Elasticsearch. This is useful for auditing, debugging, and it allows +ElastAlert to restart and resume exactly where it left off. This is not required +for ElastAlert to run, but highly recommended. -First, we need to create an index for ElastAlert to write to by running ``elastalert-create-index`` and following the instructions:: +First, we need to create an index for ElastAlert to write to by running +``elastalert-create-index`` and following the instructions:: $ elastalert-create-index New index name (Default elastalert_status) @@ -98,12 +223,15 @@ First, we need to create an index for ElastAlert to write to by running ``elasta New index elastalert_status created Done! -For information about what data will go here, see :ref:`ElastAlert Metadata Index `. +For information about what data will go here, see :ref:`ElastAlert Metadata +Index `. Creating a Rule --------------- -Each rule defines a query to perform, parameters on what triggers a match, and a list of alerts to fire for each match. We are going to use ``example_rules/example_frequency.yaml`` as a template:: +Each rule defines a query to perform, parameters on what triggers a match, and a +list of alerts to fire for each match. We are going to use +``example_rules/example_frequency.yaml`` as a template:: # From example_rules/example_frequency.yaml es_host: elasticsearch.example.com @@ -113,7 +241,7 @@ Each rule defines a query to perform, parameters on what triggers a match, and a index: logstash-* num_events: 50 timeframe: - hours: 4 + hours: 4 filter: - term: some_field: "some_value" @@ -122,38 +250,59 @@ Each rule defines a query to perform, parameters on what triggers a match, and a email: - "elastalert@example.com" -``es_host`` and ``es_port`` should point to the Elasticsearch cluster we want to query. +``es_host`` and ``es_port`` should point to the Elasticsearch cluster we want to +query. -``name`` is the unique name for this rule. ElastAlert will not start if two rules share the same name. +``name`` is the unique name for this rule. ElastAlert will not start if two +rules share the same name. -``type``: Each rule has a different type which may take different parameters. The ``frequency`` type means "Alert when more than ``num_events`` occur within ``timeframe``." For information other types, see :ref:`Rule types `. +``type``: Each rule has a different type which may take different parameters. +The ``frequency`` type means "Alert when more than ``num_events`` occur within +``timeframe``." For information other types, see :ref:`Rule types `. -``index``: The name of the index(es) to query. If you are using Logstash, by default the indexes will match ``"logstash-*"``. +``index``: The name of the index(es) to query. If you are using Logstash, by +default the indexes will match ``"logstash-*"``. -``num_events``: This parameter is specific to ``frequency`` type and is the threshold for when an alert is triggered. +``num_events``: This parameter is specific to ``frequency`` type and is the +threshold for when an alert is triggered. ``timeframe`` is the time period in which ``num_events`` must occur. -``filter`` is a list of Elasticsearch filters that are used to filter results. Here we have a single term filter for documents with ``some_field`` matching ``some_value``. See :ref:`Writing Filters For Rules ` for more information. If no filters are desired, it should be specified as an empty list: ``filter: []`` +``filter`` is a list of Elasticsearch filters that are used to filter results. +Here we have a single term filter for documents with ``some_field`` matching +``some_value``. See :ref:`Writing Filters For Rules ` for more +information. If no filters are desired, it should be specified as an empty list: +``filter: []`` -``alert`` is a list of alerts to run on each match. For more information on alert types, see :ref:`Alerts `. The email alert requires an SMTP server for sending mail. By default, it will attempt to use localhost. This can be changed with the ``smtp_host`` option. +``alert`` is a list of alerts to run on each match. For more information on +alert types, see :ref:`Alerts `. The email alert requires an SMTP server +for sending mail. By default, it will attempt to use localhost. This can be +changed with the ``smtp_host`` option. ``email`` is a list of addresses to which alerts will be sent. -There are many other optional configuration options, see :ref:`Common configuration options `. +There are many other optional configuration options, see :ref:`Common +configuration options `. -All documents must have a timestamp field. ElastAlert will try to use ``@timestamp`` by default, but this can be changed with the ``timestamp_field`` option. By default, ElastAlert uses ISO8601 timestamps, though unix timestamps are supported by setting ``timestamp_type``. +All documents must have a timestamp field. ElastAlert will try to use +``@timestamp`` by default, but this can be changed with the ``timestamp_field`` +option. By default, ElastAlert uses ISO8601 timestamps, though unix timestamps +are supported by setting ``timestamp_type``. -As is, this rule means "Send an email to elastalert@example.com when there are more than 50 documents with ``some_field == some_value`` within a 4 hour period." +As is, this rule means "Send an email to elastalert@example.com when there are +more than 50 documents with ``some_field == some_value`` within a 4 hour +period." Testing Your Rule ----------------- -Running the ``elastalert-test-rule`` tool will test that your config file successfully loads and run it in debug mode over the last 24 hours:: +Running the ``elastalert-test-rule`` tool will test that your config file +successfully loads and run it in debug mode over the last 24 hours:: $ elastalert-test-rule example_rules/example_frequency.yaml -If you want to specify a configuration file to use, you can run it with the config flag:: +If you want to specify a configuration file to use, you can run it with the +config flag:: $ elastalert-test-rule --config example_rules/example_frequency.yaml @@ -167,7 +316,9 @@ See :ref:`the testing section for more details ` Running ElastAlert ------------------ -There are two ways of invoking ElastAlert. As a daemon, through Supervisor (http://supervisord.org/), or directly with Python. For easier debugging purposes in this tutorial, we will invoke it directly:: +There are two ways of invoking ElastAlert. As a daemon, through Supervisor +(http://supervisord.org/), or directly with Python. For easier debugging +purposes in this tutorial, we will invoke it directly:: $ python -m elastalert.elastalert --verbose --rule example_frequency.yaml # or use the entry point: elastalert --verbose --rule ... No handlers could be found for logger "Elasticsearch" @@ -176,31 +327,46 @@ There are two ways of invoking ElastAlert. As a daemon, through Supervisor (http INFO:root:Ran Example rule from 1-15 14:22 PST to 1-15 15:07 PST: 5 query hits (0 already seen), 0 matches, 0 alerts sent INFO:root:Sleeping for 297 seconds -ElastAlert uses the python logging system and ``--verbose`` sets it to display INFO level messages. ``--rule example_frequency.yaml`` specifies the rule to run, otherwise ElastAlert will attempt to load the other rules in the example_rules folder. +ElastAlert uses the python logging system and ``--verbose`` sets it to display +INFO level messages. ``--rule example_frequency.yaml`` specifies the rule to +run, otherwise ElastAlert will attempt to load the other rules in the +example_rules folder. Let's break down the response to see what's happening. ``Queried rule Example rule from 1-15 14:22 PST to 1-15 15:07 PST: 5 hits`` -ElastAlert periodically queries the most recent ``buffer_time`` (default 45 minutes) for data matching the filters. Here we see that it matched 5 hits: +ElastAlert periodically queries the most recent ``buffer_time`` (default 45 +minutes) for data matching the filters. Here we see that it matched 5 hits: .. code-block:: POST http://elasticsearch.example.com:14900/elastalert_status/elastalert_status?op_type=create [status:201 request:0.025s] -This line showing that ElastAlert uploaded a document to the elastalert_status index with information about the query it just made: +This line showing that ElastAlert uploaded a document to the elastalert_status +index with information about the query it just made: .. code-block:: Ran Example rule from 1-15 14:22 PST to 1-15 15:07 PST: 5 query hits (0 already seen), 0 matches, 0 alerts sent -The line means ElastAlert has finished processing the rule. For large time periods, sometimes multiple queries may be run, but their data will be processed together. ``query hits`` is the number of documents that are downloaded from Elasticsearch, ``already seen`` refers to documents that were already counted in a previous overlapping query and will be ignored, ``matches`` is the number of matches the rule type outputted, and ``alerts sent`` is the number of alerts actually sent. This may differ from ``matches`` because of options like ``realert`` and ``aggregation`` or because of an error. +The line means ElastAlert has finished processing the rule. For large time +periods, sometimes multiple queries may be run, but their data will be processed +together. ``query hits`` is the number of documents that are downloaded from +Elasticsearch, ``already seen`` refers to documents that were already counted in +a previous overlapping query and will be ignored, ``matches`` is the number of +matches the rule type outputted, and ``alerts sent`` is the number of alerts +actually sent. This may differ from ``matches`` because of options like +``realert`` and ``aggregation`` or because of an error. ``Sleeping for 297 seconds`` -The default ``run_every`` is 5 minutes, meaning ElastAlert will sleep until 5 minutes have elapsed from the last cycle before running queries for each rule again with time ranges shifted forward 5 minutes. +The default ``run_every`` is 5 minutes, meaning ElastAlert will sleep until 5 +minutes have elapsed from the last cycle before running queries for each rule +again with time ranges shifted forward 5 minutes. -Say, over the next 297 seconds, 46 more matching documents were added to Elasticsearch:: +Say, over the next 297 seconds, 46 more matching documents were added to +Elasticsearch:: INFO:root:Queried rule Example rule from 1-15 14:27 PST to 1-15 15:12 PST: 51 hits @@ -224,15 +390,24 @@ If an error occurred, such as an unreachable SMTP server, you may see: ERROR:root:Error while running alert email: Error connecting to SMTP host: [Errno 61] Connection refused -Note that if you stop ElastAlert and then run it again later, it will look up ``elastalert_status`` and begin querying -at the end time of the last query. This is to prevent duplication or skipping of alerts if ElastAlert is restarted. +Note that if you stop ElastAlert and then run it again later, it will look up +``elastalert_status`` and begin querying at the end time of the last query. This +is to prevent duplication or skipping of alerts if ElastAlert is restarted. -By using the ``--debug`` flag instead of ``--verbose``, the body of email will instead be logged and the email will not be sent. In addition, the queries will not be saved to ``elastalert_status``. +By using the ``--debug`` flag instead of ``--verbose``, the body of email will +instead be logged and the email will not be sent. In addition, the queries will +not be saved to ``elastalert_status``. Disabling a Rule ---------------- -To stop a rule from executing, add or adjust the `is_enabled` option inside the rule's YAML file to `false`. When ElastAlert reloads the rules it will detect that the rule has been disabled and prevent it from executing. The rule reload interval defaults to 5 minutes but can be adjusted via the `run_every` configuration option. +To stop a rule from executing, add or adjust the `is_enabled` option inside the +rule's YAML file to `false`. When ElastAlert reloads the rules it will detect +that the rule has been disabled and prevent it from executing. The rule reload +interval defaults to 5 minutes but can be adjusted via the `run_every` +configuration option. -Optionally, once a rule has been disabled it is safe to remove the rule file, if there is no intention of re-activating the rule. However, be aware that removing a rule file without first disabling it will _not_ disable the rule! +Optionally, once a rule has been disabled it is safe to remove the rule file, if +there is no intention of re-activating the rule. However, be aware that removing +a rule file without first disabling it will _not_ disable the rule! From b92be422864205cae275957477760502abb5b06a Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sat, 5 Jun 2021 08:12:52 +0100 Subject: [PATCH 0284/1065] Add explanatory link to --config documentation --- docs/source/running_elastalert.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index f20154063..e1c58a7be 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -17,7 +17,8 @@ However you choose to run ElastAlert, the ElastAlert process is started by invok This command accepts several configuration flags: ``--config`` will specify the configuration file to use. The default is -``config.yaml``. +``config.yaml``. See :ref:`here` to understand what behaviour +can be configured in this file. ``--debug`` will run ElastAlert in debug mode. This will increase the logging verboseness, change all alerts to ``DebugAlerter``, which prints alerts and From 6c981f8aa19ce21a5bbf68031dc882f2fc707c5a Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sat, 5 Jun 2021 08:15:19 +0100 Subject: [PATCH 0285/1065] Make Docker and k8s instructions more prominent --- README.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index cd9971afc..f4156f5f7 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,13 @@ ElastAlert 2 is backwards compatible with the original ElastAlert rules. ![CI Workflow](https://github.com/jertel/elastalert/workflows/master_build_test/badge.svg) +## Docker and Kubernetes + +ElastAlert 2 is well-suited to being run as a microservice, and is available +as a [Docker container][2]. A [Helm chart ][7] is also maintained for easy +configuration as a Kubernetes deployment. For more instructions on how to +configure and run ElastAlert 2 in this way, see [here][8]. + ## Documentation Documentation, including an FAQ, for ElastAlert 2 can be found on [readthedocs.com][3]. This is the place to start if you're not familiar with ElastAlert 2 at all. @@ -17,13 +24,6 @@ The full list of platforms that ElastAlert 2 can fire alerts into can be found [ Please see our [contributing guidelines][6]. -## Docker and Kubernetes - -ElastAlert 2 is well-suited to being run as a microservice, and is available -as a [Docker container][2]. A [Helm chart ][7] is also maintained for easy -configuration as a Kubernetes deployment. For more instructions on how to -configure and run ElastAlert 2 in this way, see [here][8]. - ## License ElastAlert 2 is licensed under the [Apache License, Version 2.0][5]. From 8bc734fc5fde2bb7f3bcc8bd3955c2ab69758e73 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sat, 5 Jun 2021 08:38:24 +0100 Subject: [PATCH 0286/1065] Fix missing reference --- docs/source/elastalert_status.rst | 2 +- docs/source/running_elastalert.rst | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/source/elastalert_status.rst b/docs/source/elastalert_status.rst index 99f26101d..a5685624d 100644 --- a/docs/source/elastalert_status.rst +++ b/docs/source/elastalert_status.rst @@ -67,4 +67,4 @@ an alert with ``realert`` is triggered, a ``silence`` record will be written wit be 0 unless ``exponential_realert`` is set. Whenever an alert is triggered, ElastAlert will check for a matching ``silence`` document, and if the ``until`` timestamp is in the future, it will ignore -the alert completely. See the :ref:`Running ElastAlert ` section for information on how to silence an alert. +the alert completely. See the :ref:`Running ElastAlert ` section for information on how to silence an alert. diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index e1c58a7be..784c4123d 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -8,6 +8,8 @@ or directly on your machine as :ref:`a Python package`. If you are not interested in modifying the internals of ElastAlert, the Docker container is recommended for ease of use. +.. _elastalert-arguments: + Configuration flags =================== From 1fc21d467d08a635fd36312de49f9f627508fe77 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sat, 5 Jun 2021 08:43:54 +0100 Subject: [PATCH 0287/1065] Fix hyperlinked whitespace --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index f4156f5f7..2b6de51b1 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ ElastAlert 2 is backwards compatible with the original ElastAlert rules. ## Docker and Kubernetes ElastAlert 2 is well-suited to being run as a microservice, and is available -as a [Docker container][2]. A [Helm chart ][7] is also maintained for easy +as a [Docker container][2]. A [Helm chart][7] is also maintained for easy configuration as a Kubernetes deployment. For more instructions on how to configure and run ElastAlert 2 in this way, see [here][8]. From 0865d8c580f711a4b638f86f48fa71ceb1c4e9ec Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 5 Jun 2021 21:52:53 +0900 Subject: [PATCH 0288/1065] Fix alerters required test code --- elastalert/alerters/chatwork.py | 4 +- elastalert/alerters/datadog.py | 4 +- elastalert/alerters/dingtalk.py | 2 +- elastalert/alerters/discord.py | 2 +- elastalert/alerters/exotel.py | 8 ++-- elastalert/alerters/gitter.py | 2 +- elastalert/alerters/googlechat.py | 2 +- elastalert/alerters/httppost.py | 2 +- elastalert/alerters/line.py | 2 +- elastalert/alerters/mattermost.py | 2 +- elastalert/alerters/pagerduty.py | 4 +- elastalert/alerters/pagertree.py | 2 +- elastalert/alerters/rocketchat.py | 2 +- elastalert/alerters/servicenow.py | 2 +- elastalert/alerters/ses.py | 11 ++--- elastalert/alerters/slack.py | 2 +- elastalert/alerters/sns.py | 2 +- elastalert/alerters/teams.py | 2 +- elastalert/alerters/telegram.py | 4 +- elastalert/alerters/twilio.py | 6 +-- elastalert/alerters/victorops.py | 6 +-- elastalert/alerters/zabbix.py | 6 +-- tests/alerters/alerta_test.py | 8 ++-- tests/alerters/chatwork_test.py | 12 +++--- tests/alerters/datadog_test.py | 13 +++--- tests/alerters/dingtalk_test.py | 10 ++--- tests/alerters/discord_test.py | 12 ++---- tests/alerters/exotel_test.py | 67 +++++++++++++++++++++++++++++++ tests/alerters/gitter_test.py | 8 ++-- tests/alerters/googlechat_test.py | 8 ++-- tests/alerters/httppost_test.py | 14 +++---- tests/alerters/line_test.py | 8 ++-- tests/alerters/mattermost_test.py | 8 ++-- tests/alerters/pagerduty_test.py | 12 +++--- tests/alerters/pagertree_test.py | 12 ++---- tests/alerters/rocketchat_test.py | 10 ++--- tests/alerters/ses_test.py | 12 +++--- tests/alerters/slack_test.py | 8 ++-- tests/alerters/sns_test.py | 8 ++-- tests/alerters/teams_test.py | 8 ++-- tests/alerters/telegram_test.py | 12 +++--- tests/alerters/twilio_test.py | 20 ++++----- tests/alerters/victorops_test.py | 20 ++++----- tests/alerters/zabbix_test.py | 12 +++--- 44 files changed, 220 insertions(+), 161 deletions(-) diff --git a/elastalert/alerters/chatwork.py b/elastalert/alerters/chatwork.py index 4a6330bca..08bf62d57 100644 --- a/elastalert/alerters/chatwork.py +++ b/elastalert/alerters/chatwork.py @@ -14,8 +14,8 @@ class ChatworkAlerter(Alerter): def __init__(self, rule): super(ChatworkAlerter, self).__init__(rule) - self.chatwork_apikey = self.rule['chatwork_apikey'] - self.chatwork_room_id = self.rule['chatwork_room_id'] + self.chatwork_apikey = self.rule.get('chatwork_apikey', None) + self.chatwork_room_id = self.rule.get('chatwork_room_id', None) self.url = 'https://api.chatwork.com/v2/rooms/%s/messages' % (self.chatwork_room_id) self.chatwork_proxy = self.rule.get('chatwork_proxy', None) self.chatwork_proxy_login = self.rule.get('chatwork_proxy_login', None) diff --git a/elastalert/alerters/datadog.py b/elastalert/alerters/datadog.py index 3b71e4264..6ee8d9b3b 100644 --- a/elastalert/alerters/datadog.py +++ b/elastalert/alerters/datadog.py @@ -13,8 +13,8 @@ class DatadogAlerter(Alerter): def __init__(self, rule): super(DatadogAlerter, self).__init__(rule) - self.dd_api_key = self.rule['datadog_api_key'] - self.dd_app_key = self.rule['datadog_app_key'] + self.dd_api_key = self.rule.get('datadog_api_key', None) + self.dd_app_key = self.rule.get('datadog_app_key', None) def alert(self, matches): url = 'https://api.datadoghq.com/api/v1/events' diff --git a/elastalert/alerters/dingtalk.py b/elastalert/alerters/dingtalk.py index e87eca6b3..2e023b2a1 100644 --- a/elastalert/alerters/dingtalk.py +++ b/elastalert/alerters/dingtalk.py @@ -15,7 +15,7 @@ class DingTalkAlerter(Alerter): def __init__(self, rule): super(DingTalkAlerter, self).__init__(rule) - self.dingtalk_access_token = self.rule['dingtalk_access_token'] + self.dingtalk_access_token = self.rule.get('dingtalk_access_token', None) self.dingtalk_webhook_url = 'https://oapi.dingtalk.com/robot/send?access_token=%s' % (self.dingtalk_access_token) self.dingtalk_msgtype = self.rule.get('dingtalk_msgtype', 'text') self.dingtalk_single_title = self.rule.get('dingtalk_single_title', 'elastalert') diff --git a/elastalert/alerters/discord.py b/elastalert/alerters/discord.py index 3cbde63db..829653bdf 100644 --- a/elastalert/alerters/discord.py +++ b/elastalert/alerters/discord.py @@ -15,7 +15,7 @@ class DiscordAlerter(Alerter): def __init__(self, rule): super(DiscordAlerter, self).__init__(rule) - self.discord_webhook_url = self.rule['discord_webhook_url'] + self.discord_webhook_url = self.rule.get('discord_webhook_url', None) self.discord_emoji_title = self.rule.get('discord_emoji_title', ':warning:') self.discord_proxy = self.rule.get('discord_proxy', None) self.discord_proxy_login = self.rule.get('discord_proxy_login', None) diff --git a/elastalert/alerters/exotel.py b/elastalert/alerters/exotel.py index e7c63ba2a..f7a95fd9a 100644 --- a/elastalert/alerters/exotel.py +++ b/elastalert/alerters/exotel.py @@ -13,10 +13,10 @@ class ExotelAlerter(Alerter): def __init__(self, rule): super(ExotelAlerter, self).__init__(rule) - self.exotel_account_sid = self.rule['exotel_account_sid'] - self.exotel_auth_token = self.rule['exotel_auth_token'] - self.exotel_to_number = self.rule['exotel_to_number'] - self.exotel_from_number = self.rule['exotel_from_number'] + self.exotel_account_sid = self.rule.get('exotel_account_sid', None) + self.exotel_auth_token = self.rule.get('exotel_auth_token', None) + self.exotel_to_number = self.rule.get('exotel_to_number', None) + self.exotel_from_number = self.rule.get('exotel_from_number', None) self.sms_body = self.rule.get('exotel_message_body', '') def alert(self, matches): diff --git a/elastalert/alerters/gitter.py b/elastalert/alerters/gitter.py index b6d14aa69..7149a2038 100644 --- a/elastalert/alerters/gitter.py +++ b/elastalert/alerters/gitter.py @@ -13,7 +13,7 @@ class GitterAlerter(Alerter): def __init__(self, rule): super(GitterAlerter, self).__init__(rule) - self.gitter_webhook_url = self.rule['gitter_webhook_url'] + self.gitter_webhook_url = self.rule.get('gitter_webhook_url', None) self.gitter_proxy = self.rule.get('gitter_proxy', None) self.gitter_msg_level = self.rule.get('gitter_msg_level', 'error') diff --git a/elastalert/alerters/googlechat.py b/elastalert/alerters/googlechat.py index 9de439de1..1e18f89f5 100644 --- a/elastalert/alerters/googlechat.py +++ b/elastalert/alerters/googlechat.py @@ -13,7 +13,7 @@ class GoogleChatAlerter(Alerter): def __init__(self, rule): super(GoogleChatAlerter, self).__init__(rule) - self.googlechat_webhook_url = self.rule['googlechat_webhook_url'] + self.googlechat_webhook_url = self.rule.get('googlechat_webhook_url', None) if isinstance(self.googlechat_webhook_url, str): self.googlechat_webhook_url = [self.googlechat_webhook_url] self.googlechat_format = self.rule.get('googlechat_format', 'basic') diff --git a/elastalert/alerters/httppost.py b/elastalert/alerters/httppost.py index d5f4aaff3..59e9a806b 100644 --- a/elastalert/alerters/httppost.py +++ b/elastalert/alerters/httppost.py @@ -13,7 +13,7 @@ class HTTPPostAlerter(Alerter): def __init__(self, rule): super(HTTPPostAlerter, self).__init__(rule) - post_url = self.rule['http_post_url'] + post_url = self.rule.get('http_post_url', None) if isinstance(post_url, str): post_url = [post_url] self.post_url = post_url diff --git a/elastalert/alerters/line.py b/elastalert/alerters/line.py index 7d79b5558..41d1d4c92 100644 --- a/elastalert/alerters/line.py +++ b/elastalert/alerters/line.py @@ -11,7 +11,7 @@ class LineNotifyAlerter(Alerter): def __init__(self, rule): super(LineNotifyAlerter, self).__init__(rule) - self.linenotify_access_token = self.rule["linenotify_access_token"] + self.linenotify_access_token = self.rule.get("linenotify_access_token", None) def alert(self, matches): body = self.create_alert_body(matches) diff --git a/elastalert/alerters/mattermost.py b/elastalert/alerters/mattermost.py index 1ed7c56b5..2fbab05eb 100644 --- a/elastalert/alerters/mattermost.py +++ b/elastalert/alerters/mattermost.py @@ -16,7 +16,7 @@ def __init__(self, rule): super(MattermostAlerter, self).__init__(rule) # HTTP config - self.mattermost_webhook_url = self.rule['mattermost_webhook_url'] + self.mattermost_webhook_url = self.rule.get('mattermost_webhook_url', None) if isinstance(self.mattermost_webhook_url, str): self.mattermost_webhook_url = [self.mattermost_webhook_url] self.mattermost_proxy = self.rule.get('mattermost_proxy', None) diff --git a/elastalert/alerters/pagerduty.py b/elastalert/alerters/pagerduty.py index 69ebafb3d..b804b26ca 100644 --- a/elastalert/alerters/pagerduty.py +++ b/elastalert/alerters/pagerduty.py @@ -12,8 +12,8 @@ class PagerDutyAlerter(Alerter): def __init__(self, rule): super(PagerDutyAlerter, self).__init__(rule) - self.pagerduty_service_key = self.rule['pagerduty_service_key'] - self.pagerduty_client_name = self.rule['pagerduty_client_name'] + self.pagerduty_service_key = self.rule.get('pagerduty_service_key', None) + self.pagerduty_client_name = self.rule.get('pagerduty_client_name', None) self.pagerduty_incident_key = self.rule.get('pagerduty_incident_key', '') self.pagerduty_incident_key_args = self.rule.get('pagerduty_incident_key_args', None) self.pagerduty_event_type = self.rule.get('pagerduty_event_type', 'trigger') diff --git a/elastalert/alerters/pagertree.py b/elastalert/alerters/pagertree.py index 360f405fd..9e159e46c 100644 --- a/elastalert/alerters/pagertree.py +++ b/elastalert/alerters/pagertree.py @@ -14,7 +14,7 @@ class PagerTreeAlerter(Alerter): def __init__(self, rule): super(PagerTreeAlerter, self).__init__(rule) - self.url = self.rule['pagertree_integration_url'] + self.url = self.rule.get('pagertree_integration_url', None) self.pagertree_proxy = self.rule.get('pagertree_proxy', None) def alert(self, matches): diff --git a/elastalert/alerters/rocketchat.py b/elastalert/alerters/rocketchat.py index 6d5c4ebc6..7ab832af3 100644 --- a/elastalert/alerters/rocketchat.py +++ b/elastalert/alerters/rocketchat.py @@ -15,7 +15,7 @@ class RocketChatAlerter(Alerter): def __init__(self, rule): super(RocketChatAlerter, self).__init__(rule) - self.rocket_chat_webhook_url = self.rule['rocket_chat_webhook_url'] + self.rocket_chat_webhook_url = self.rule.get('rocket_chat_webhook_url', None) if isinstance(self.rocket_chat_webhook_url, str): self.rocket_chat_webhook_url = [self.rocket_chat_webhook_url] self.rocket_chat_proxy = self.rule.get('rocket_chat_proxy', None) diff --git a/elastalert/alerters/servicenow.py b/elastalert/alerters/servicenow.py index eecf8744e..811fbecb9 100644 --- a/elastalert/alerters/servicenow.py +++ b/elastalert/alerters/servicenow.py @@ -24,7 +24,7 @@ class ServiceNowAlerter(Alerter): def __init__(self, rule): super(ServiceNowAlerter, self).__init__(rule) - self.servicenow_rest_url = self.rule['servicenow_rest_url'] + self.servicenow_rest_url = self.rule.get('servicenow_rest_url', None) self.servicenow_proxy = self.rule.get('servicenow_proxy', None) def alert(self, matches): diff --git a/elastalert/alerters/ses.py b/elastalert/alerters/ses.py index 65a7501dc..68b4c90a2 100644 --- a/elastalert/alerters/ses.py +++ b/elastalert/alerters/ses.py @@ -16,11 +16,12 @@ def __init__(self, *args): self.aws_region = self.rule.get('ses_aws_region', 'us-east-1') self.aws_profile = self.rule.get('ses_aws_profile', '') - self.from_addr = self.rule['ses_from_addr'] + self.email = self.rule.get('ses_email', None) + self.from_addr = self.rule.get('ses_from_addr', None) # Convert email to a list if it isn't already - if isinstance(self.rule['ses_email'], str): - self.rule['ses_email'] = [self.rule['ses_email']] + if isinstance(self.email, str): + self.email = [self.email] # If there is a cc then also convert it a list if it isn't cc = self.rule.get('ses_cc') @@ -44,7 +45,7 @@ def __init__(self, *args): def alert(self, matches): body = self.create_alert_body(matches) - to_addr = self.rule['ses_email'] + to_addr = self.email if 'ses_email_from_field' in self.rule: recipient = lookup_es_key(matches[0], self.rule['ses_email_from_field']) if isinstance(recipient, str): @@ -106,4 +107,4 @@ def create_default_title(self, matches): def get_info(self): return {'type': 'ses', - 'recipients': self.rule['ses_email']} + 'recipients': self.email} diff --git a/elastalert/alerters/slack.py b/elastalert/alerters/slack.py index 97902ac69..aa0fe16b0 100644 --- a/elastalert/alerters/slack.py +++ b/elastalert/alerters/slack.py @@ -14,7 +14,7 @@ class SlackAlerter(Alerter): def __init__(self, rule): super(SlackAlerter, self).__init__(rule) - self.slack_webhook_url = self.rule['slack_webhook_url'] + self.slack_webhook_url = self.rule.get('slack_webhook_url', None) if isinstance(self.slack_webhook_url, str): self.slack_webhook_url = [self.slack_webhook_url] self.slack_proxy = self.rule.get('slack_proxy', None) diff --git a/elastalert/alerters/sns.py b/elastalert/alerters/sns.py index 2dde4a619..4c16911e4 100644 --- a/elastalert/alerters/sns.py +++ b/elastalert/alerters/sns.py @@ -10,7 +10,7 @@ class SnsAlerter(Alerter): def __init__(self, *args): super(SnsAlerter, self).__init__(*args) - self.sns_topic_arn = self.rule['sns_topic_arn'] + self.sns_topic_arn = self.rule.get('sns_topic_arn', None) self.sns_aws_access_key_id = self.rule.get('sns_aws_access_key_id') self.sns_aws_secret_access_key = self.rule.get('sns_aws_secret_access_key') self.sns_aws_region = self.rule.get('sns_aws_region', 'us-east-1') diff --git a/elastalert/alerters/teams.py b/elastalert/alerters/teams.py index fb33fe310..d9f496490 100644 --- a/elastalert/alerters/teams.py +++ b/elastalert/alerters/teams.py @@ -12,7 +12,7 @@ class MsTeamsAlerter(Alerter): def __init__(self, rule): super(MsTeamsAlerter, self).__init__(rule) - self.ms_teams_webhook_url = self.rule['ms_teams_webhook_url'] + self.ms_teams_webhook_url = self.rule.get('ms_teams_webhook_url', None) if isinstance(self.ms_teams_webhook_url, str): self.ms_teams_webhook_url = [self.ms_teams_webhook_url] self.ms_teams_proxy = self.rule.get('ms_teams_proxy', None) diff --git a/elastalert/alerters/telegram.py b/elastalert/alerters/telegram.py index 718656670..8cef4ccfb 100644 --- a/elastalert/alerters/telegram.py +++ b/elastalert/alerters/telegram.py @@ -15,8 +15,8 @@ class TelegramAlerter(Alerter): def __init__(self, rule): super(TelegramAlerter, self).__init__(rule) - self.telegram_bot_token = self.rule['telegram_bot_token'] - self.telegram_room_id = self.rule['telegram_room_id'] + self.telegram_bot_token = self.rule.get('telegram_bot_token', None) + self.telegram_room_id = self.rule.get('telegram_room_id', None) self.telegram_api_url = self.rule.get('telegram_api_url', 'api.telegram.org') self.url = 'https://%s/bot%s/%s' % (self.telegram_api_url, self.telegram_bot_token, "sendMessage") self.telegram_proxy = self.rule.get('telegram_proxy', None) diff --git a/elastalert/alerters/twilio.py b/elastalert/alerters/twilio.py index 3878633a7..dece40117 100644 --- a/elastalert/alerters/twilio.py +++ b/elastalert/alerters/twilio.py @@ -10,9 +10,9 @@ class TwilioAlerter(Alerter): def __init__(self, rule): super(TwilioAlerter, self).__init__(rule) - self.twilio_account_sid = self.rule['twilio_account_sid'] - self.twilio_auth_token = self.rule['twilio_auth_token'] - self.twilio_to_number = self.rule['twilio_to_number'] + self.twilio_account_sid = self.rule.get('twilio_account_sid', None) + self.twilio_auth_token = self.rule.get('twilio_auth_token', None) + self.twilio_to_number = self.rule.get('twilio_to_number', None) self.twilio_from_number = self.rule.get('twilio_from_number', None) self.twilio_message_service_sid = self.rule.get('twilio_message_service_sid', None) self.twilio_use_copilot = self.rule.get('twilio_use_copilot', False) diff --git a/elastalert/alerters/victorops.py b/elastalert/alerters/victorops.py index 993a31814..f0b39cfec 100644 --- a/elastalert/alerters/victorops.py +++ b/elastalert/alerters/victorops.py @@ -13,9 +13,9 @@ class VictorOpsAlerter(Alerter): def __init__(self, rule): super(VictorOpsAlerter, self).__init__(rule) - self.victorops_api_key = self.rule['victorops_api_key'] - self.victorops_routing_key = self.rule['victorops_routing_key'] - self.victorops_message_type = self.rule['victorops_message_type'] + self.victorops_api_key = self.rule.get('victorops_api_key', None) + self.victorops_routing_key = self.rule.get('victorops_routing_key', None) + self.victorops_message_type = self.rule.get('victorops_message_type', None) self.victorops_entity_id = self.rule.get('victorops_entity_id', None) self.victorops_entity_display_name = self.rule.get('victorops_entity_display_name', 'no entity display name') self.url = 'https://alert.victorops.com/integrations/generic/20131114/alert/%s/%s' % ( diff --git a/elastalert/alerters/zabbix.py b/elastalert/alerters/zabbix.py index da96f0ed3..558e9baa4 100644 --- a/elastalert/alerters/zabbix.py +++ b/elastalert/alerters/zabbix.py @@ -46,15 +46,15 @@ class ZabbixAlerter(Alerter): # You can ensure that the rule config file specifies all # of the options. Otherwise, ElastAlert will throw an exception # when trying to load the rule. - required_options = frozenset(['zbx_sender_host', 'zbx_sender_port', 'zbx_host', 'zbx_key']) + required_options = frozenset(['zbx_host', 'zbx_key']) def __init__(self, *args): super(ZabbixAlerter, self).__init__(*args) self.zbx_sender_host = self.rule.get('zbx_sender_host', 'localhost') self.zbx_sender_port = self.rule.get('zbx_sender_port', 10051) - self.zbx_host = self.rule['zbx_host'] - self.zbx_key = self.rule['zbx_key'] + self.zbx_host = self.rule.get('zbx_host', None) + self.zbx_key = self.rule.get('zbx_key', None) self.timestamp_field = self.rule.get('timestamp_field', '@timestamp') self.timestamp_type = self.rule.get('timestamp_type', 'iso') self.timestamp_strptime = self.rule.get('timestamp_strptime', '%Y-%m-%dT%H:%M:%S.%fZ') diff --git a/tests/alerters/alerta_test.py b/tests/alerters/alerta_test.py index a9d827797..90bf793c4 100644 --- a/tests/alerters/alerta_test.py +++ b/tests/alerters/alerta_test.py @@ -677,14 +677,14 @@ def test_alerta_getinfo(): @pytest.mark.parametrize('alerta_api_url, expected_data', [ - ('', True), + ('', 'Missing required option(s): alerta_api_url'), ('http://elastalerthost:8080/api/alert', { 'type': 'alerta', 'alerta_url': 'http://elastalerthost:8080/api/alert' }), ]) -def test_alerta_key_error(alerta_api_url, expected_data): +def test_alerta_required_error(alerta_api_url, expected_data): try: rule = { 'name': 'Test Alerta rule!', @@ -703,5 +703,5 @@ def test_alerta_key_error(alerta_api_url, expected_data): actual_data = alert.get_info() assert expected_data == actual_data - except Exception: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/chatwork_test.py b/tests/alerters/chatwork_test.py index bc81ae34a..b98579a2e 100644 --- a/tests/alerters/chatwork_test.py +++ b/tests/alerters/chatwork_test.py @@ -124,16 +124,16 @@ def test_chatwork_getinfo(): @pytest.mark.parametrize('chatwork_apikey, chatwork_room_id, expected_data', [ - ('', '', True), - ('xxxx1', '', True), - ('', 'xxxx2', True), + ('', '', 'Missing required option(s): chatwork_apikey, chatwork_room_id'), + ('xxxx1', '', 'Missing required option(s): chatwork_apikey, chatwork_room_id'), + ('', 'xxxx2', '1Missing required option(s): chatwork_apikey, chatwork_room_id'), ('xxxx1', 'xxxx2', { "type": "chatwork", "chatwork_room_id": "xxxx2" }), ]) -def test_chatwork_key_error(chatwork_apikey, chatwork_room_id, expected_data): +def test_chatwork_required_error(chatwork_apikey, chatwork_room_id, expected_data): try: rule = { 'name': 'Test Chatwork Rule', @@ -153,5 +153,5 @@ def test_chatwork_key_error(chatwork_apikey, chatwork_room_id, expected_data): actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/datadog_test.py b/tests/alerters/datadog_test.py index 74b7c6547..ec2baaf6f 100644 --- a/tests/alerters/datadog_test.py +++ b/tests/alerters/datadog_test.py @@ -88,15 +88,15 @@ def test_datadog_getinfo(): @pytest.mark.parametrize('datadog_api_key, datadog_app_key, expected_data', [ - ('', '', True), - ('xxxx1', '', True), - ('', 'xxxx2', True), + ('', '', 'Missing required option(s): datadog_api_key, datadog_app_key'), + ('xxxx1', '', 'Missing required option(s): datadog_api_key, datadog_app_key'), + ('', 'xxxx2', 'Missing required option(s): datadog_api_key, datadog_app_key'), ('xxxx1', 'xxxx2', { 'type': 'datadog' }), ]) -def test_datadog_key_error(datadog_api_key, datadog_app_key, expected_data): +def test_datadog_required_error(datadog_api_key, datadog_app_key, expected_data): try: rule = { 'name': 'Test Datadog Event Alerter', @@ -115,8 +115,7 @@ def test_datadog_key_error(datadog_api_key, datadog_app_key, expected_data): rules_loader.load_modules(rule) alert = DatadogAlerter(rule) - expected_data = {'type': 'datadog'} actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/dingtalk_test.py b/tests/alerters/dingtalk_test.py index 5a4a59ae1..56a8f02b3 100644 --- a/tests/alerters/dingtalk_test.py +++ b/tests/alerters/dingtalk_test.py @@ -314,15 +314,15 @@ def test_dingtalk_getinfo(): assert expected_data == actual_data -@pytest.mark.parametrize('dingtalk_access_token,, expected_data', [ - ('', True), +@pytest.mark.parametrize('dingtalk_access_token, expected_data', [ + ('', 'Missing required option(s): dingtalk_access_token'), ('xxxxxxx', { 'type': 'dingtalk', "dingtalk_webhook_url": 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx' }), ]) -def test_dingtalk_key_error(dingtalk_access_token, expected_data): +def test_dingtalk_required_error(dingtalk_access_token, expected_data): try: rule = { 'name': 'Test DingTalk Rule', @@ -340,5 +340,5 @@ def test_dingtalk_key_error(dingtalk_access_token, expected_data): actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/discord_test.py b/tests/alerters/discord_test.py index 693ff9be0..e08a322e7 100644 --- a/tests/alerters/discord_test.py +++ b/tests/alerters/discord_test.py @@ -230,14 +230,14 @@ def test_discord_getinfo(): @pytest.mark.parametrize('discord_webhook_url, expected_data', [ - ('', True), + ('', 'Missing required option(s): discord_webhook_url'), ('http://xxxxxxx', { 'type': 'discord', 'discord_webhook_url': 'http://xxxxxxx' }), ]) -def test_discord_key_error(discord_webhook_url, expected_data): +def test_discord_required_error(discord_webhook_url, expected_data): try: rule = { 'name': 'Test Discord Rule' + ('a' * 2069), @@ -253,11 +253,7 @@ def test_discord_key_error(discord_webhook_url, expected_data): rules_loader.load_modules(rule) alert = DiscordAlerter(rule) - expected_data = { - 'type': 'discord', - 'discord_webhook_url': 'http://xxxxxxx' - } actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/exotel_test.py b/tests/alerters/exotel_test.py index 4559c8443..6d5e5e9a8 100644 --- a/tests/alerters/exotel_test.py +++ b/tests/alerters/exotel_test.py @@ -1,3 +1,5 @@ +import pytest + from elastalert.alerters.exotel import ExotelAlerter from elastalert.loaders import FileRulesLoader @@ -23,3 +25,68 @@ def test_exotel_getinfo(): } actual_data = alert.get_info() assert expected_data == actual_data + + +@pytest.mark.parametrize('exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number, expected_data', [ + ('', '', '', '', + 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), + ('xxxx1', '', '', '', + 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), + ('', 'xxxx2', '', '', + 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), + ('', '', 'xxxx3', '', + 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), + ('', '', '', 'xxxx4', + 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), + ('xxxx1', 'xxxx2', '', '', + 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), + ('xxxx1', '', 'xxxx3', '', + 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), + ('xxxx1', '', '', 'xxxx4', + 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), + ('', 'xxxx2', 'xxxx3', '', + 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), + ('', 'xxxx2', '', 'xxxx4', + 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), + ('', '', 'xxxx3', 'xxxx4', + 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), + ('xxxx1', 'xxxx2', 'xxxx3', '', + 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), + ('xxxx1', '', 'xxxx3', 'xxxx4', + 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), + ('', 'xxxx2', 'xxxx3', 'xxxx4', + 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), + ('xxxx1', 'xxxx2', 'xxxx3', 'xxxx4', + { + 'type': 'exotel', + 'exotel_account': 'xxxx1' + }), +]) +def test_exotel_required_error(exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number, expected_data): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert': [] + } + + if exotel_account_sid != '': + rule['exotel_account_sid'] = exotel_account_sid + + if exotel_auth_token != '': + rule['exotel_auth_token'] = exotel_auth_token + + if exotel_to_number != '': + rule['exotel_to_number'] = exotel_to_number + + if exotel_from_number != '': + rule['exotel_from_number'] = exotel_from_number + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ExotelAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/gitter_test.py b/tests/alerters/gitter_test.py index 696e88599..21e1af528 100644 --- a/tests/alerters/gitter_test.py +++ b/tests/alerters/gitter_test.py @@ -129,14 +129,14 @@ def test_gitter_getinfo(): @pytest.mark.parametrize('gitter_webhook_url, expected_data', [ - ('', True), + ('', 'Missing required option(s): gitter_webhook_url'), ('https://webhooks.gitter.im/e/xxxxx', { 'type': 'gitter', 'gitter_webhook_url': 'https://webhooks.gitter.im/e/xxxxx' }) ]) -def test_gitter_key_error(gitter_webhook_url, expected_data): +def test_gitter_required_error(gitter_webhook_url, expected_data): try: rule = { 'name': 'Test Gitter Rule', @@ -153,5 +153,5 @@ def test_gitter_key_error(gitter_webhook_url, expected_data): actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/googlechat_test.py b/tests/alerters/googlechat_test.py index b60317e40..d0636edda 100644 --- a/tests/alerters/googlechat_test.py +++ b/tests/alerters/googlechat_test.py @@ -147,14 +147,14 @@ def test_google_chat_getinfo(): @pytest.mark.parametrize('googlechat_webhook_url, expected_data', [ - ('', True), + ('', 'Missing required option(s): googlechat_webhook_url'), ('http://xxxxxxx', { 'type': 'googlechat', 'googlechat_webhook_url': ['http://xxxxxxx'] }), ]) -def test_google_chat_key_error(googlechat_webhook_url, expected_data): +def test_google_chat_required_error(googlechat_webhook_url, expected_data): try: rule = { 'name': 'Test GoogleChat Rule', @@ -171,5 +171,5 @@ def test_google_chat_key_error(googlechat_webhook_url, expected_data): actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/httppost_test.py b/tests/alerters/httppost_test.py index e84f45fef..58c8c73e8 100644 --- a/tests/alerters/httppost_test.py +++ b/tests/alerters/httppost_test.py @@ -306,15 +306,15 @@ def test_http_getinfo(): assert expected_data == actual_data -@pytest.mark.parametrize('http_post_webhook_url, expected_data', [ - ('', True), +@pytest.mark.parametrize('http_post_url, expected_data', [ + ('', 'Missing required option(s): http_post_url'), ('http://test.webhook.url', { 'type': 'http_post', 'http_post_webhook_url': ['http://test.webhook.url'] }), ]) -def test_http_key_error(http_post_webhook_url, expected_data): +def test_http_required_error(http_post_url, expected_data): try: rule = { 'name': 'Test HTTP Post Alerter Without Payload', @@ -322,8 +322,8 @@ def test_http_key_error(http_post_webhook_url, expected_data): 'alert': [] } - if http_post_webhook_url != '': - rule['http_post_webhook_url'] = http_post_webhook_url + if http_post_url != '': + rule['http_post_url'] = http_post_url rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) @@ -331,5 +331,5 @@ def test_http_key_error(http_post_webhook_url, expected_data): actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/line_test.py b/tests/alerters/line_test.py index 31340eb23..986a36618 100644 --- a/tests/alerters/line_test.py +++ b/tests/alerters/line_test.py @@ -83,14 +83,14 @@ def test_line_getinfo(): @pytest.mark.parametrize('linenotify_access_token, expected_data', [ - ('', True), + ('', 'Missing required option(s): linenotify_access_token'), ('xxxxx', { "type": "linenotify", "linenotify_access_token": 'xxxxx' }), ]) -def test_line_key_error(linenotify_access_token, expected_data): +def test_line_required_error(linenotify_access_token, expected_data): try: rule = { 'name': 'Test LineNotify Rule', @@ -107,5 +107,5 @@ def test_line_key_error(linenotify_access_token, expected_data): actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/mattermost_test.py b/tests/alerters/mattermost_test.py index 1c99e45fc..181dde475 100644 --- a/tests/alerters/mattermost_test.py +++ b/tests/alerters/mattermost_test.py @@ -875,7 +875,7 @@ def test_mattermost_getinfo(): @pytest.mark.parametrize('mattermost_webhook_url, expected_data', [ - ('', True), + ('', 'Missing required option(s): mattermost_webhook_url'), ('http://xxxxx', { 'type': 'mattermost', @@ -883,7 +883,7 @@ def test_mattermost_getinfo(): 'mattermost_webhook_url': ['http://xxxxx'] }), ]) -def test_mattermost_key_error(mattermost_webhook_url, expected_data): +def test_mattermost_required_error(mattermost_webhook_url, expected_data): try: rule = { 'name': 'Test Mattermost Rule', @@ -902,5 +902,5 @@ def test_mattermost_key_error(mattermost_webhook_url, expected_data): actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/pagerduty_test.py b/tests/alerters/pagerduty_test.py index e85333078..e80372d7e 100644 --- a/tests/alerters/pagerduty_test.py +++ b/tests/alerters/pagerduty_test.py @@ -700,16 +700,16 @@ def test_pagerduty_getinfo(): @pytest.mark.parametrize('pagerduty_service_key, pagerduty_client_name, expected_data', [ - ('', '', True), - ('xxxxx1', '', True), - ('', 'xxxxx2', True), + ('', '', 'Missing required option(s): pagerduty_service_key, pagerduty_client_name'), + ('xxxxx1', '', 'Missing required option(s): pagerduty_service_key, pagerduty_client_name'), + ('', 'xxxxx2', 'Missing required option(s): pagerduty_service_key, pagerduty_client_name'), ('xxxxx1', 'xxxxx2', { 'type': 'pagerduty', 'pagerduty_client_name': 'xxxxx2' }), ]) -def test_pagerduty_key_error(pagerduty_service_key, pagerduty_client_name, expected_data): +def test_pagerduty_required_error(pagerduty_service_key, pagerduty_client_name, expected_data): try: rule = { 'name': 'Test PD Rule', @@ -729,5 +729,5 @@ def test_pagerduty_key_error(pagerduty_service_key, pagerduty_client_name, expec actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/pagertree_test.py b/tests/alerters/pagertree_test.py index 1d61169ad..f58e7061c 100644 --- a/tests/alerters/pagertree_test.py +++ b/tests/alerters/pagertree_test.py @@ -135,14 +135,14 @@ def test_pagertree_getinfo(): @pytest.mark.parametrize('pagertree_integration_url, expected_data', [ - ('', True), + ('', 'Missing required option(s): pagertree_integration_url'), ('https://api.pagertree.com/integration/xxxxx', { 'type': 'pagertree', 'pagertree_integration_url': 'https://api.pagertree.com/integration/xxxxx' }), ]) -def test_pagertree_key_error(pagertree_integration_url, expected_data): +def test_pagertree_required_error(pagertree_integration_url, expected_data): try: rule = { 'name': 'Test PagerTree Rule', @@ -157,11 +157,7 @@ def test_pagertree_key_error(pagertree_integration_url, expected_data): rules_loader.load_modules(rule) alert = PagerTreeAlerter(rule) - expected_data = { - 'type': 'pagertree', - 'pagertree_integration_url': 'https://api.pagertree.com/integration/xxxxx' - } actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/rocketchat_test.py b/tests/alerters/rocketchat_test.py index ea20fb5a0..99a73ff28 100644 --- a/tests/alerters/rocketchat_test.py +++ b/tests/alerters/rocketchat_test.py @@ -514,7 +514,7 @@ def test_rocketchat_alert_fields(): assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) -def test_rocketchat_msg_color_key_error(): +def test_rocketchat_msg_color_required_error(): try: rule = { 'name': 'Test Rule', @@ -600,7 +600,7 @@ def test_rocketchat_getinfo(): @pytest.mark.parametrize('rocket_chat_webhook_url, expected_data', [ - ('', True), + ('', 'Missing required option(s): rocket_chat_webhook_url'), ('http://please.dontgohere.rocketchat', { 'type': 'rocketchat', @@ -608,7 +608,7 @@ def test_rocketchat_getinfo(): 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'] }) ]) -def test_rocketchat_key_error(rocket_chat_webhook_url, expected_data): +def test_rocketchat_required_error(rocket_chat_webhook_url, expected_data): try: rule = { 'name': 'Test Rule', @@ -626,5 +626,5 @@ def test_rocketchat_key_error(rocket_chat_webhook_url, expected_data): actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/ses_test.py b/tests/alerters/ses_test.py index 1a8ad3b9e..4705cee02 100644 --- a/tests/alerters/ses_test.py +++ b/tests/alerters/ses_test.py @@ -28,16 +28,16 @@ def test_ses_getinfo(): @pytest.mark.parametrize('ses_email, ses_from_addr, expected_data', [ - ('', '', True), - ('test@aaa.com', '', True), - ('', 'test2@aaa.com', True), + ('', '', 'Missing required option(s): ses_email, ses_from_addr'), + ('test@aaa.com', '', 'Missing required option(s): ses_email, ses_from_addr'), + ('', 'test2@aaa.com', 'Missing required option(s): ses_email, ses_from_addr'), ('test@aaa.com', 'test2@aaa.com', { 'type': 'ses', 'recipients': ['test@aaa.com'] }), ]) -def test_ses_key_error(ses_email, ses_from_addr, expected_data): +def test_ses_required_error(ses_email, ses_from_addr, expected_data): try: rule = { 'name': 'Test Telegram Rule', @@ -57,5 +57,5 @@ def test_ses_key_error(ses_email, ses_from_addr, expected_data): actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/slack_test.py b/tests/alerters/slack_test.py index 7b660310d..010dfbe60 100644 --- a/tests/alerters/slack_test.py +++ b/tests/alerters/slack_test.py @@ -1368,14 +1368,14 @@ def test_slack_getinfo(): @pytest.mark.parametrize('slack_webhook_url, expected_data', [ - ('', True), + ('', 'Missing required option(s): slack_webhook_url'), ('http://please.dontgohere.slack', { 'type': 'slack', 'slack_username_override': 'elastalert' }), ]) -def test_slack_key_error(slack_webhook_url, expected_data): +def test_slack_required_error(slack_webhook_url, expected_data): try: rule = { 'name': 'Test Rule', @@ -1393,5 +1393,5 @@ def test_slack_key_error(slack_webhook_url, expected_data): actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/sns_test.py b/tests/alerters/sns_test.py index c9f81519b..6ff42747b 100644 --- a/tests/alerters/sns_test.py +++ b/tests/alerters/sns_test.py @@ -27,13 +27,13 @@ def test_sns_getinfo(): @pytest.mark.parametrize('sns_topic_arn, expected_data', [ - ('', True), + ('', 'Missing required option(s): sns_topic_arn'), ('xxxx', { 'type': 'sns' }) ]) -def test_sns_key_error(sns_topic_arn, expected_data): +def test_sns_required_error(sns_topic_arn, expected_data): try: rule = { 'name': 'Test Rule', @@ -51,5 +51,5 @@ def test_sns_key_error(sns_topic_arn, expected_data): actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/teams_test.py b/tests/alerters/teams_test.py index 72970427b..00c86c923 100644 --- a/tests/alerters/teams_test.py +++ b/tests/alerters/teams_test.py @@ -166,14 +166,14 @@ def test_ms_teams_getinfo(): @pytest.mark.parametrize('ms_teams_webhook_url, expected_data', [ - ('', True), + ('', 'Missing required option(s): ms_teams_webhook_url'), ('http://test.webhook.url', { 'type': 'ms_teams', 'ms_teams_webhook_url': ['http://test.webhook.url'] }) ]) -def test_ms_teams_key_error(ms_teams_webhook_url, expected_data): +def test_ms_teams_required_error(ms_teams_webhook_url, expected_data): try: rule = { 'name': 'Test Rule', @@ -191,5 +191,5 @@ def test_ms_teams_key_error(ms_teams_webhook_url, expected_data): actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/telegram_test.py b/tests/alerters/telegram_test.py index 95a8d02d9..b6d2b8a62 100644 --- a/tests/alerters/telegram_test.py +++ b/tests/alerters/telegram_test.py @@ -166,16 +166,16 @@ def test_telegram_getinfo(): @pytest.mark.parametrize('telegram_bot_token, telegram_room_id, expected_data', [ - ('', '', True), - ('xxxxx1', '', True), - ('', 'xxxxx2', True), + ('', '', 'Missing required option(s): telegram_bot_token, telegram_room_id'), + ('xxxxx1', '', 'Missing required option(s): telegram_bot_token, telegram_room_id'), + ('', 'xxxxx2', 'Missing required option(s): telegram_bot_token, telegram_room_id'), ('xxxxx1', 'xxxxx2', { 'type': 'telegram', 'telegram_room_id': 'xxxxx2' }), ]) -def test_telegram_key_error(telegram_bot_token, telegram_room_id, expected_data): +def test_telegram_required_error(telegram_bot_token, telegram_room_id, expected_data): try: rule = { 'name': 'Test Telegram Rule', @@ -195,5 +195,5 @@ def test_telegram_key_error(telegram_bot_token, telegram_room_id, expected_data) actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/twilio_test.py b/tests/alerters/twilio_test.py index 8bd04b858..e1e0fe308 100644 --- a/tests/alerters/twilio_test.py +++ b/tests/alerters/twilio_test.py @@ -29,20 +29,20 @@ def test_twilio_getinfo(): @pytest.mark.parametrize('twilio_account_sid, twilio_auth_token, twilio_to_number, expected_data', [ - ('', '', '', True), - ('xxxx1', '', '', True), - ('', 'xxxx2', '', True), - ('', '', 'INFO', True), - ('xxxx1', 'xxxx2', '', True), - ('xxxx1', '', 'INFO', True), - ('', 'xxxx2', 'INFO', True), + ('', '', '', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('xxxx1', '', '', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('', 'xxxx2', '', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('', '', 'INFO', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('xxxx1', 'xxxx2', '', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('xxxx1', '', 'INFO', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('', 'xxxx2', 'INFO', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), ('xxxx1', 'xxxx2', 'INFO', { 'type': 'twilio', 'twilio_client_name': 'xxxxx4' }), ]) -def test_twilio_key_error(twilio_account_sid, twilio_auth_token, twilio_to_number, expected_data): +def test_twilio_required_error(twilio_account_sid, twilio_auth_token, twilio_to_number, expected_data): try: rule = { 'name': 'Test Rule', @@ -67,8 +67,8 @@ def test_twilio_key_error(twilio_account_sid, twilio_auth_token, twilio_to_numbe actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) @pytest.mark.parametrize('twilio_use_copilot, twilio_message_service_sid, twilio_from_number, expected_data', [ diff --git a/tests/alerters/victorops_test.py b/tests/alerters/victorops_test.py index 649f9aa32..c24b33ab1 100644 --- a/tests/alerters/victorops_test.py +++ b/tests/alerters/victorops_test.py @@ -219,20 +219,20 @@ def test_victorops_getinfo(): @pytest.mark.parametrize('victorops_api_key, victorops_routing_key, victorops_message_type, expected_data', [ - ('', '', '', True), - ('xxxx1', '', '', True), - ('', 'xxxx2', '', True), - ('', '', 'INFO', True), - ('xxxx1', 'xxxx2', '', True), - ('xxxx1', '', 'INFO', True), - ('', 'xxxx2', 'INFO', True), + ('', '', '', 'Missing required option(s): victorops_api_key, victorops_routing_key, victorops_message_type'), + ('xxxx1', '', '', 'Missing required option(s): victorops_api_key, victorops_routing_key, victorops_message_type'), + ('', 'xxxx2', '', 'Missing required option(s): victorops_api_key, victorops_routing_key, victorops_message_type'), + ('', '', 'INFO', 'Missing required option(s): victorops_api_key, victorops_routing_key, victorops_message_type'), + ('xxxx1', 'xxxx2', '', 'Missing required option(s): victorops_api_key, victorops_routing_key, victorops_message_type'), + ('xxxx1', '', 'INFO', 'Missing required option(s): victorops_api_key, victorops_routing_key, victorops_message_type'), + ('', 'xxxx2', 'INFO', 'Missing required option(s): victorops_api_key, victorops_routing_key, victorops_message_type'), ('xxxx1', 'xxxx2', 'INFO', { 'type': 'victorops', 'victorops_routing_key': 'xxxx2' }), ]) -def test_victoropst_key_error(victorops_api_key, victorops_routing_key, victorops_message_type, expected_data): +def test_victoropst_required_error(victorops_api_key, victorops_routing_key, victorops_message_type, expected_data): try: rule = { 'name': 'Test VictorOps Rule', @@ -255,5 +255,5 @@ def test_victoropst_key_error(victorops_api_key, victorops_routing_key, victorop actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) diff --git a/tests/alerters/zabbix_test.py b/tests/alerters/zabbix_test.py index 1cee2ec75..d45fd4cb5 100644 --- a/tests/alerters/zabbix_test.py +++ b/tests/alerters/zabbix_test.py @@ -57,15 +57,15 @@ def test_zabbix_getinfo(): @pytest.mark.parametrize('zbx_host, zbx_key, expected_data', [ - ('', '', True), - ('example.com', '', True), - ('', 'example-key', True), + ('', '', 'Missing required option(s): zbx_host, zbx_key'), + ('example.com', '', 'Missing required option(s): zbx_host, zbx_key'), + ('', 'example-key', 'Missing required option(s): zbx_host, zbx_key'), ('example.com', 'example-key', { 'type': 'zabbix Alerter' }) ]) -def test_zabbix_key_error(zbx_host, zbx_key, expected_data): +def test_zabbix_required_error(zbx_host, zbx_key, expected_data): try: rule = { 'name': 'Basic Zabbix test', @@ -87,5 +87,5 @@ def test_zabbix_key_error(zbx_host, zbx_key, expected_data): actual_data = alert.get_info() assert expected_data == actual_data - except KeyError: - assert expected_data + except Exception as ea: + assert expected_data in str(ea) From 3b70f65a2a7a4ac3dfbe75195ca666506d6c9afb Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 5 Jun 2021 11:11:45 -0400 Subject: [PATCH 0289/1065] Add PRs 234 and 235 --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9ec008969..ee6decd54 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -44,6 +44,8 @@ - Add more alerter test code coverage - [#231](https://github.com/jertel/elastalert2/pull/231) - @nsano-rururu - Upgrade pytest-cov from 2.12.0 to 2.12.1 - [#232](https://github.com/jertel/elastalert2/pull/232) - @nsano-rururu - Migrate away from external test mock dependency - [#233](https://github.com/jertel/elastalert2/pull/233) - @nsano-rururu +- Improve ElastAlert 2 documentation relating to running scenarios - [#234](https://github.com/jertel/elastalert2/pull/234) - @ferozsalam +- Improve test coverage and correct dict lookup syntax for alerter init functions - [#235](https://github.com/jertel/elastalert2/pull/235) - @nsano-rururu # 2.1.0 From a1fc501b27720dc3219a2e22e87fa2a0b14ede3a Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 6 Jun 2021 00:26:01 +0900 Subject: [PATCH 0290/1065] Add Kibana Discover url in mattermost alerts --- docs/source/ruletypes.rst | 6 + elastalert/alerters/mattermost.py | 12 ++ elastalert/schema.yaml | 3 + tests/alerters/mattermost_test.py | 192 ++++++++++++++++++++++++++++++ 4 files changed, 213 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index e127d1101..6ac59fb61 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2223,6 +2223,12 @@ Example mattermost_msg_fields:: ``mattermost_author_icon``: An optional URL used to display a 16x16 pixel icon beside the author_name. Defaults to "". +``mattermost_attach_kibana_discover_url``: Enables the attachment of the ``kibana_discover_url`` to the mattermost notification. The config ``generate_kibana_discover_url`` must also be ``True`` in order to generate the url. Defaults to ``False``. + +``mattermost_kibana_discover_color``: The color of the Kibana Discover url attachment. Defaults to ``#ec4b98``. + +``mattermost_kibana_discover_title``: The title of the Kibana Discover url attachment. Defaults to ``Discover in Kibana``. + Microsoft Teams ~~~~~~~~~~~~~~~ diff --git a/elastalert/alerters/mattermost.py b/elastalert/alerters/mattermost.py index 1ed7c56b5..8270e31e8 100644 --- a/elastalert/alerters/mattermost.py +++ b/elastalert/alerters/mattermost.py @@ -40,6 +40,9 @@ def __init__(self, rule): self.mattermost_author_name = self.rule.get('mattermost_author_name', '') self.mattermost_author_link = self.rule.get('mattermost_author_link', '') self.mattermost_author_icon = self.rule.get('mattermost_author_icon', '') + self.mattermost_attach_kibana_discover_url = self.rule.get('mattermost_attach_kibana_discover_url', False) + self.mattermost_kibana_discover_color = self.rule.get('mattermost_kibana_discover_color', '#ec4b98') + self.mattermost_kibana_discover_title = self.rule.get('mattermost_kibana_discover_title', 'Discover in Kibana') def get_aggregation_summary_text__maximum_width(self): width = super(MattermostAlerter, self).get_aggregation_summary_text__maximum_width() @@ -128,6 +131,15 @@ def alert(self, matches): if self.mattermost_author_icon != '': payload['attachments'][0]['author_icon'] = self.mattermost_author_icon + if self.mattermost_attach_kibana_discover_url: + kibana_discover_url = lookup_es_key(matches[0], 'kibana_discover_url') + if kibana_discover_url: + payload['attachments'].append({ + 'color': self.mattermost_kibana_discover_color, + 'title': self.mattermost_kibana_discover_title, + 'title_link': kibana_discover_url + }) + for url in self.mattermost_webhook_url: try: if self.mattermost_ignore_ssl_errors: diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index bffcc1406..8528a5162 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -406,6 +406,9 @@ properties: mattermost_author_name: {type: string} mattermost_author_link: {type: string} mattermost_author_icon: {type: string} + mattermost_attach_kibana_discover_url {type: boolean} + mattermost_kibana_discover_color {type: string} + mattermost_kibana_discover_title {type: string} ### Microsoft Teams ms_teams_webhook_url: *arrayOfString diff --git a/tests/alerters/mattermost_test.py b/tests/alerters/mattermost_test.py index b2df662d4..aa22e1d3a 100644 --- a/tests/alerters/mattermost_test.py +++ b/tests/alerters/mattermost_test.py @@ -904,3 +904,195 @@ def test_mattermost_key_error(mattermost_webhook_url, expected_data): assert expected_data == actual_data except KeyError: assert expected_data + + +def test_mattermost_attach_kibana_discover_url_when_generated(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_attach_kibana_discover_url': True, + 'mattermost_webhook_url': 'http://please.dontgohere.mattermost', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'kibana_discover_url': 'http://localhost:5601/app/discover#/' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Rule: ', + 'color': 'danger', + 'title': 'Test Rule', + 'pretext': '', + 'fields': [], + 'text': 'Test Rule\n\n' + }, + { + 'color': '#ec4b98', + 'title': 'Discover in Kibana', + 'title_link': 'http://localhost:5601/app/discover#/' + } + ], 'username': 'elastalert' + } + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + print(actual_data) + assert expected_data == actual_data + + +def test_mattermost_attach_kibana_discover_url_when_not_generated(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_attach_kibana_discover_url': True, + 'mattermost_webhook_url': 'http://please.dontgohere.mattermost', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Rule: ', + 'color': 'danger', + 'title': 'Test Rule', + 'pretext': '', + 'fields': [], + 'text': 'Test Rule\n\n' + } + ], 'username': 'elastalert' + } + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + print(actual_data) + assert expected_data == actual_data + + +def test_mattermost_kibana_discover_title(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_attach_kibana_discover_url': True, + 'mattermost_kibana_discover_title': 'Click to discover in Kibana', + 'mattermost_webhook_url': 'http://please.dontgohere.mattermost', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'kibana_discover_url': 'http://localhost:5601/app/discover#/' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Rule: ', + 'color': 'danger', + 'title': 'Test Rule', + 'pretext': '', + 'fields': [], + 'text': 'Test Rule\n\n' + }, + { + 'color': '#ec4b98', + 'title': 'Click to discover in Kibana', + 'title_link': 'http://localhost:5601/app/discover#/' + } + ], 'username': 'elastalert' + } + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + print(actual_data) + assert expected_data == actual_data + + +def test_mattermost_kibana_discover_color(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_attach_kibana_discover_url': True, + 'mattermost_kibana_discover_color': 'blue', + 'mattermost_webhook_url': 'http://please.dontgohere.mattermost', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'kibana_discover_url': 'http://localhost:5601/app/discover#/' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Rule: ', + 'color': 'danger', + 'title': 'Test Rule', + 'pretext': '', + 'fields': [], + 'text': 'Test Rule\n\n' + }, + { + 'color': 'blue', + 'title': 'Discover in Kibana', + 'title_link': 'http://localhost:5601/app/discover#/' + } + ], 'username': 'elastalert' + } + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + print(actual_data) + assert expected_data == actual_data From 69eaf9b9eb2e85868e5d692b4e9a8e6d5dfd7baf Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 6 Jun 2021 17:49:50 +0900 Subject: [PATCH 0291/1065] Remove mattermost_msg_fields from schema.yaml --- elastalert/schema.yaml | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 8528a5162..960706d5a 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -36,15 +36,6 @@ definitions: filter: &filter {} - mattermostField: &mattermostField - type: object - additionalProperties: false - properties: - title: {type: string} - value: {type: string} - args: *arrayOfString - short: {type: boolean} - required: [type, index, alert] type: object @@ -397,7 +388,6 @@ properties: mattermost_icon_url_override: {type: string} mattermost_msg_pretext: {type: string} mattermost_msg_color: {enum: [good, warning, danger]} - mattermost_msg_fields: *mattermostField mattermost_title_link: {type: string} mattermost_footer: {type: string} mattermost_footer_icon: {type: string} From f29d96350396634a7b9aa21109c25221cda88142 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 6 Jun 2021 17:57:17 +0900 Subject: [PATCH 0292/1065] fix testcode --- tests/alerters/mattermost_test.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tests/alerters/mattermost_test.py b/tests/alerters/mattermost_test.py index 1fa81b9ae..18b393690 100644 --- a/tests/alerters/mattermost_test.py +++ b/tests/alerters/mattermost_test.py @@ -951,7 +951,6 @@ def test_mattermost_attach_kibana_discover_url_when_generated(): ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - print(actual_data) assert expected_data == actual_data @@ -994,7 +993,6 @@ def test_mattermost_attach_kibana_discover_url_when_not_generated(): ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - print(actual_data) assert expected_data == actual_data @@ -1044,7 +1042,6 @@ def test_mattermost_kibana_discover_title(): ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - print(actual_data) assert expected_data == actual_data @@ -1094,7 +1091,4 @@ def test_mattermost_kibana_discover_color(): ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) - print(actual_data) assert expected_data == actual_data - except Exception as ea: - assert expected_data in str(ea) From c8a6af5bed0ba865e12a50f5adf82e75250fb100 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 6 Jun 2021 19:11:37 +0900 Subject: [PATCH 0293/1065] Fix schema.yaml --- elastalert/schema.yaml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 960706d5a..bae822b0d 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -34,6 +34,19 @@ definitions: milliseconds: {type: number} schedule: {type: string} + mattermostField: &mattermostField + type: object + additionalProperties: false + properties: + title: {type: string} + value: {type: string} + args: *arrayOfString + short: {type: boolean} + + arrayOfMattermostFields: &arrayOfMattermostField + type: array + items: *mattermostField + filter: &filter {} required: [type, index, alert] @@ -388,6 +401,7 @@ properties: mattermost_icon_url_override: {type: string} mattermost_msg_pretext: {type: string} mattermost_msg_color: {enum: [good, warning, danger]} + mattermost_msg_fields: *arrayOfMattermostField mattermost_title_link: {type: string} mattermost_footer: {type: string} mattermost_footer_icon: {type: string} From 7fb672a6c88f842568cdc5f893a913efcf956fc2 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sun, 6 Jun 2021 06:21:01 -0400 Subject: [PATCH 0294/1065] Add PR 239 --- CHANGELOG.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ee6decd54..c37cdef69 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,7 +22,8 @@ - Add support for Elasticsearch API key authentication - [#208](https://github.com/jertel/elastalert2/pull/208) - @vbisserie - Add support for Elasticsearch 7.13 for building Kibana Discover URLs - [#212](https://github.com/jertel/elastalert2/pull/212) - @nsano-rururu - Follow symbolic links when traversing rules folder for rule files - [#214](https://github.com/jertel/elastalert2/pull/214) - @vbisserie -- Support optional suppression of SSL log warnings when http-posting alerts - [#222](https://github.com/jertel/elastalert2/pull/222/files) - @nsano-rururu +- Support optional suppression of SSL log warnings when http-posting alerts - [#222](https://github.com/jertel/elastalert2/pull/222) - @nsano-rururu +- Add support for inclusion of Kibana Discover URLs in MatterMost messages - [#239](https://github.com/jertel/elastalert2/pull/239) - @nsano-rururu ## Other changes - Speed up unit tests by adding default parallelism - [#164](https://github.com/jertel/elastalert2/pull/164) - @ferozsalam @@ -46,7 +47,8 @@ - Migrate away from external test mock dependency - [#233](https://github.com/jertel/elastalert2/pull/233) - @nsano-rururu - Improve ElastAlert 2 documentation relating to running scenarios - [#234](https://github.com/jertel/elastalert2/pull/234) - @ferozsalam - Improve test coverage and correct dict lookup syntax for alerter init functions - [#235](https://github.com/jertel/elastalert2/pull/235) - @nsano-rururu - +- Fix schema bug with MatterMost alerts - [#239](https://github.com/jertel/elastalert2/pull/239) - @nsano-rururu +- # 2.1.0 ## Breaking changes From 22057d6237354ecb607b98ce7b315b97a11514d6 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 7 Jun 2021 08:29:21 +0100 Subject: [PATCH 0295/1065] Fix Github link in Helm chart documentation --- docs/source/running_elastalert.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index 784c4123d..de4de8c4b 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -105,8 +105,8 @@ As a Kubernetes deployment The Docker container for ElastAlert 2 can be used directly as a Kubernetes deployment, but for convenience, a Helm chart is also available. See the instructions provided `on Github -`_ for more information on -how to install, configure, and run the chart. +`_ +for more information on how to install, configure, and run the chart. .. _python-instructions: From 05368b4b5e6bf0141bad346ed8e85403b646d15e Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Mon, 7 Jun 2021 07:30:17 -0400 Subject: [PATCH 0296/1065] Clarified Docker and Kubernetes readme section --- README.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 2b6de51b1..985c16859 100644 --- a/README.md +++ b/README.md @@ -10,9 +10,10 @@ ElastAlert 2 is backwards compatible with the original ElastAlert rules. ## Docker and Kubernetes ElastAlert 2 is well-suited to being run as a microservice, and is available -as a [Docker container][2]. A [Helm chart][7] is also maintained for easy -configuration as a Kubernetes deployment. For more instructions on how to -configure and run ElastAlert 2 in this way, see [here][8]. +as a [Docker container][2]. For more instructions on how to +configure and run ElastAlert 2 using Docker, see [here][8]. + +A [Helm chart][7] is also included for easy configuration as a Kubernetes deployment. ## Documentation From 128b6b9deb4dd60d0c913ebaa8d1631d0aa58172 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Tue, 8 Jun 2021 01:22:41 +0900 Subject: [PATCH 0297/1065] add mattermost_title in mattermost alerts --- docs/source/ruletypes.rst | 4 +++- elastalert/alerters/mattermost.py | 4 ++++ elastalert/schema.yaml | 1 + tests/alerters/mattermost_test.py | 3 ++- 4 files changed, 10 insertions(+), 2 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 6ac59fb61..dd3d6a0f1 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2207,7 +2207,9 @@ Example mattermost_msg_fields:: value: static field short: false -``mattermost_title_link``: You can add a link in your Mattermost notification by setting this to a valid URL. Defaults to "". +``mattermost_title``: Sets a title for the message, this shows up as a blue text at the start of the message. Defaults to "". + +``mattermost_title_link``: You can add a link in your Mattermost notification by setting this to a valid URL. Requires mattermost_title to be set. Defaults to "". ``mattermost_footer``: Add a static footer text for alert. Defaults to "". diff --git a/elastalert/alerters/mattermost.py b/elastalert/alerters/mattermost.py index 1319d7600..2da5b8c42 100644 --- a/elastalert/alerters/mattermost.py +++ b/elastalert/alerters/mattermost.py @@ -32,6 +32,7 @@ def __init__(self, rule): self.mattermost_msg_color = self.rule.get('mattermost_msg_color', 'danger') self.mattermost_msg_fields = self.rule.get('mattermost_msg_fields', '') self.mattermost_image_url = self.rule.get('mattermost_image_url', '') + self.mattermost_title = self.rule.get('mattermost_title', '') self.mattermost_title_link = self.rule.get('mattermost_title_link', '') self.mattermost_footer = self.rule.get('mattermost_footer', '') self.mattermost_footer_icon = self.rule.get('mattermost_footer_icon', '') @@ -107,6 +108,9 @@ def alert(self, matches): if self.mattermost_channel_override != '': payload['channel'] = self.mattermost_channel_override + if self.mattermost_title != '': + payload['attachments'][0]['title'] = self.mattermost_title + if self.mattermost_title_link != '': payload['attachments'][0]['title_link'] = self.mattermost_title_link diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index bae822b0d..9cee513ed 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -402,6 +402,7 @@ properties: mattermost_msg_pretext: {type: string} mattermost_msg_color: {enum: [good, warning, danger]} mattermost_msg_fields: *arrayOfMattermostField + mattermost_title: {type: string} mattermost_title_link: {type: string} mattermost_footer: {type: string} mattermost_footer_icon: {type: string} diff --git a/tests/alerters/mattermost_test.py b/tests/alerters/mattermost_test.py index 18b393690..3a32000b4 100644 --- a/tests/alerters/mattermost_test.py +++ b/tests/alerters/mattermost_test.py @@ -365,6 +365,7 @@ def test_mattermost_title_link(): 'mattermost_webhook_url': 'http://xxxxx', 'mattermost_msg_pretext': 'aaaaa', 'mattermost_msg_color': 'danger', + 'mattermost_title': 'mattermost.title', 'mattermost_title_link': 'http://title.url', 'alert': [], 'alert_subject': 'Test Mattermost' @@ -384,10 +385,10 @@ def test_mattermost_title_link(): { 'fallback': 'Test Mattermost: aaaaa', 'color': 'danger', - 'title': 'Test Mattermost', 'pretext': 'aaaaa', 'fields': [], 'text': 'Test Mattermost Rule\n\n', + 'title': 'mattermost.title', 'title_link': 'http://title.url' } ], From c626f16d1496b388b7f4e84c07bebaf09d503266 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Mon, 7 Jun 2021 13:19:43 -0400 Subject: [PATCH 0298/1065] Add PR 246 to Changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c37cdef69..b30fc741d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,7 @@ - Follow symbolic links when traversing rules folder for rule files - [#214](https://github.com/jertel/elastalert2/pull/214) - @vbisserie - Support optional suppression of SSL log warnings when http-posting alerts - [#222](https://github.com/jertel/elastalert2/pull/222) - @nsano-rururu - Add support for inclusion of Kibana Discover URLs in MatterMost messages - [#239](https://github.com/jertel/elastalert2/pull/239) - @nsano-rururu +- Add support for inclusion of alert Title in MatterMost messages - [#246](https://github.com/jertel/elastalert2/pull/246) - @nsano-rururu ## Other changes - Speed up unit tests by adding default parallelism - [#164](https://github.com/jertel/elastalert2/pull/164) - @ferozsalam From 3b0c8bc5dcde694f5ac44e34d9d4616c70456cbe Mon Sep 17 00:00:00 2001 From: Yoan Blanc Date: Tue, 8 Jun 2021 15:30:33 +0200 Subject: [PATCH 0299/1065] fix: metrics_writeback takes three positional arguments Signed-off-by: Yoan Blanc --- elastalert/prometheus_wrapper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/prometheus_wrapper.py b/elastalert/prometheus_wrapper.py index d94a35200..6da0b904b 100644 --- a/elastalert/prometheus_wrapper.py +++ b/elastalert/prometheus_wrapper.py @@ -33,7 +33,7 @@ def metrics_run_rule(self, rule, endtime, starttime=None): finally: return self.run_rule(rule, endtime, starttime) - def metrics_writeback(self, doc_type, body): + def metrics_writeback(self, doc_type, body, rule=None, match_body=None): """ Update various prometheus metrics accoording to the doc_type """ res = self.writeback(doc_type, body) From 5c4310e14a9bf0acb140a26cbb1e3f2c127c91f6 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Tue, 8 Jun 2021 09:51:59 -0400 Subject: [PATCH 0300/1065] Add PR 253 to changelog --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b30fc741d..6c53ce4d1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -49,7 +49,8 @@ - Improve ElastAlert 2 documentation relating to running scenarios - [#234](https://github.com/jertel/elastalert2/pull/234) - @ferozsalam - Improve test coverage and correct dict lookup syntax for alerter init functions - [#235](https://github.com/jertel/elastalert2/pull/235) - @nsano-rururu - Fix schema bug with MatterMost alerts - [#239](https://github.com/jertel/elastalert2/pull/239) - @nsano-rururu -- +- Fix prometheus wrapper writeback function signature - [#253](https://github.com/jertel/elastalert2/pull/253) - @greut + # 2.1.0 ## Breaking changes From e296b4df048704e6b213904d5d9a0f8e7a1d5a26 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Tue, 8 Jun 2021 11:25:18 -0400 Subject: [PATCH 0301/1065] Switch to environment secrets for additional access controls --- .github/workflows/publish_image.yml | 2 ++ .github/workflows/python-publish.yml | 1 + .github/workflows/upload_chart.yml | 3 +++ 3 files changed, 6 insertions(+) diff --git a/.github/workflows/publish_image.yml b/.github/workflows/publish_image.yml index 1b2e9d5b8..02e2650ba 100644 --- a/.github/workflows/publish_image.yml +++ b/.github/workflows/publish_image.yml @@ -15,6 +15,8 @@ env: jobs: push: + environment: Main + runs-on: ubuntu-latest steps: diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index f36aec395..5b15ece47 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -10,6 +10,7 @@ on: jobs: deploy: + environment: Main runs-on: ubuntu-latest diff --git a/.github/workflows/upload_chart.yml b/.github/workflows/upload_chart.yml index ba29b719f..e7509a0ce 100644 --- a/.github/workflows/upload_chart.yml +++ b/.github/workflows/upload_chart.yml @@ -7,7 +7,10 @@ on: jobs: build: + environment: Main + runs-on: ubuntu-latest + steps: - uses: actions/checkout@v2 - name: Helm push chart From 2639e10da96d7f09aeb4b95037018bbe075e4237 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Tue, 8 Jun 2021 23:17:32 -0400 Subject: [PATCH 0302/1065] Prepare 2.1.1 for release --- CHANGELOG.md | 2 +- CONTRIBUTING.md | 2 +- chart/elastalert2/Chart.yaml | 4 ++-- chart/elastalert2/README.md | 2 +- chart/elastalert2/values.yaml | 2 +- docs/source/running_elastalert.rst | 2 +- setup.py | 2 +- 7 files changed, 8 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6c53ce4d1..4dc7c3c45 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,7 +9,7 @@ ## Other changes - None -# Unreleased +# 2.1.1 ## Breaking changes - None diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e69deb9fa..429d51da3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -30,7 +30,7 @@ Maintainers, when creating a new release, follow the procedure below: - [Chart.yaml](chart/elastalert2/Chart.yaml): Match chart version and the app version to the new release version (typically keep them in sync) - [values.yaml](chart/elastalert2/values.yaml): Match the default image version to the new release version. - [Chart README.md](chart/elastalert2/README.md): Match the default image version to the new release version. - - [Project README.md](README.md): Match the default image version to the new release version. + - [Docs](docs/source/running_elastalert.rst): Match the default image version to the new release version. - [CHANGELOG.md](CHANGELOG.md): This must contain all PRs and any other relevent notes about this release 3. Publish a [new][1] release. - The title (and tag) of the release will be the same value as the new version determined in step 1. diff --git a/chart/elastalert2/Chart.yaml b/chart/elastalert2/Chart.yaml index b713a29dd..9ec21f9b3 100644 --- a/chart/elastalert2/Chart.yaml +++ b/chart/elastalert2/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: Automated rule-based alerting for Elasticsearch name: elastalert2 -version: 2.1.0 -appVersion: 2.1.0 +version: 2.1.1 +appVersion: 2.1.1 home: https://github.com/jertel/elastalert2 sources: - https://github.com/jertel/elastalert2 diff --git a/chart/elastalert2/README.md b/chart/elastalert2/README.md index ee837de0e..92d0ec78c 100644 --- a/chart/elastalert2/README.md +++ b/chart/elastalert2/README.md @@ -47,7 +47,7 @@ The command removes all the Kubernetes components associated with the chart and | Parameter | Description | Default | |----------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------|---------------------------------| | `image.repository` | docker image | jertel/elastalert2 | -| `image.tag` | docker image tag | 2.1.0 | +| `image.tag` | docker image tag | 2.1.1 | | `image.pullPolicy` | image pull policy | IfNotPresent | | `podAnnotations` | Annotations to be added to pods | {} | | `command` | command override for container | `NULL` | diff --git a/chart/elastalert2/values.yaml b/chart/elastalert2/values.yaml index 9b13734d6..a61adb877 100644 --- a/chart/elastalert2/values.yaml +++ b/chart/elastalert2/values.yaml @@ -25,7 +25,7 @@ image: # docker image repository: jertel/elastalert2 # docker image tag - tag: 2.1.0 + tag: 2.1.1 pullPolicy: IfNotPresent resources: {} diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index de4de8c4b..4afb783a2 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -78,7 +78,7 @@ If you're interested in a pre-built Docker image check out the Be aware that the ``latest`` tag of the image represents the latest commit into the master branch. If you prefer to upgrade more slowly you will need utilize a -versioned tag, such as ``2.1.0`` instead, or ``2`` if you are comfortable with +versioned tag, such as ``2.1.1`` instead, or ``2`` if you are comfortable with always using the latest released version of ElastAlert 2. A properly configured config.yaml file must be mounted into the container during diff --git a/setup.py b/setup.py index 67052553b..2b304755e 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ base_dir = os.path.dirname(__file__) setup( name='elastalert2', - version='2.1.0', + version='2.1.1', description='Automated rule-based alerting for Elasticsearch', setup_requires='setuptools', license='Apache 2.0', From ff28bacb06fc410c7cb18b8172715afaf09c4eca Mon Sep 17 00:00:00 2001 From: Yoan Blanc Date: Wed, 9 Jun 2021 09:40:18 +0200 Subject: [PATCH 0303/1065] fix: forward rule and match_body Signed-off-by: Yoan Blanc --- elastalert/prometheus_wrapper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/prometheus_wrapper.py b/elastalert/prometheus_wrapper.py index 6da0b904b..5b84d9358 100644 --- a/elastalert/prometheus_wrapper.py +++ b/elastalert/prometheus_wrapper.py @@ -36,7 +36,7 @@ def metrics_run_rule(self, rule, endtime, starttime=None): def metrics_writeback(self, doc_type, body, rule=None, match_body=None): """ Update various prometheus metrics accoording to the doc_type """ - res = self.writeback(doc_type, body) + res = self.writeback(doc_type, body, rule, match_body) try: if doc_type == 'elastalert_status': self.prom_hits.labels(body['rule_name']).inc(int(body['hits'])) From 09fef21fadc24f1204c1e8d5419c3a28e6578df2 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 11 Jun 2021 21:57:16 +0900 Subject: [PATCH 0304/1065] Rocket.Chat add kibana discover --- docs/source/ruletypes.rst | 12 +- elastalert/alerters/rocketchat.py | 12 ++ elastalert/schema.yaml | 3 + tests/alerters/rocketchat_test.py | 198 ++++++++++++++++++++++++++++++ 4 files changed, 222 insertions(+), 3 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index dd3d6a0f1..67ca34fc4 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2398,7 +2398,7 @@ The alerter requires the following option: Optional: -``rocket_chat_username_override``: By default RocketChat will use username defined in Integration when posting to the channel. Use this option to change it (free text). +``rocket_chat_username_override``: By default Rocket.Chat will use username defined in Integration when posting to the channel. Use this option to change it (free text). ``rocket_chat_channel_override``: Incoming webhooks have a default channel, but it can be overridden. A public channel can be specified “#other-channel”, and a Direct Message with “@username”. @@ -2409,9 +2409,15 @@ ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . ``rocket_chat_text_string``: Notification message you want to add. -``rocket_chat_proxy``: By default ElastAlert will not use a network proxy to send notifications to RocketChat. Set this option using ``hostname:port`` if you need to use a proxy. +``rocket_chat_proxy``: By default ElastAlert will not use a network proxy to send notifications to Rocket.Chat. Set this option using ``hostname:port`` if you need to use a proxy. -```rocket_chat_alert_fields``: You can add additional fields to your RocketChat alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. +``rocket_chat_attach_kibana_discover_url``: Enables the attachment of the ``kibana_discover_url`` to the Rocket.Chat notification. The config ``generate_kibana_discover_url`` must also be ``True`` in order to generate the url. Defaults to ``False``. + +``rocket_chat_kibana_discover_color``: The color of the Kibana Discover url attachment. Defaults to ``#ec4b98``. + +``rocket_chat_kibana_discover_title``: The title of the Kibana Discover url attachment. Defaults to ``Discover in Kibana``. + +```rocket_chat_alert_fields``: You can add additional fields to your Rocket.Chat alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. Example rocket_chat_alert_fields:: diff --git a/elastalert/alerters/rocketchat.py b/elastalert/alerters/rocketchat.py index 7ab832af3..a16ccc3fc 100644 --- a/elastalert/alerters/rocketchat.py +++ b/elastalert/alerters/rocketchat.py @@ -28,6 +28,9 @@ def __init__(self, rule): self.rocket_chat_msg_color = self.rule.get('rocket_chat_msg_color', 'danger') self.rocket_chat_text_string = self.rule.get('rocket_chat_text_string', '') self.rocket_chat_alert_fields = self.rule.get('rocket_chat_alert_fields', '') + self.rocket_chat_attach_kibana_discover_url = self.rule.get('rocket_chat_attach_kibana_discover_url', False) + self.rocket_chat_kibana_discover_color = self.rule.get('rocket_chat_kibana_discover_color', '#ec4b98') + self.rocket_chat_kibana_discover_title = self.rule.get('rocket_chat_kibana_discover_title', 'Discover in Kibana') def format_body(self, body): return body @@ -77,6 +80,15 @@ def alert(self, matches): if self.rocket_chat_emoji_override != '': payload['emoji'] = self.rocket_chat_emoji_override + if self.rocket_chat_attach_kibana_discover_url: + kibana_discover_url = lookup_es_key(matches[0], 'kibana_discover_url') + if kibana_discover_url: + payload['attachments'].append({ + 'color': self.rocket_chat_kibana_discover_color, + 'title': self.rocket_chat_kibana_discover_title, + 'title_link': kibana_discover_url + }) + for url in self.rocket_chat_webhook_url: for channel_override in self.rocket_chat_channel_override: try: diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 9cee513ed..c724fd40a 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -475,6 +475,9 @@ properties: rocket_chat_msg_color: {enum: [good, warning, danger]} rocket_chat_text_string: {type: string} rocket_chat_proxy: {type: string} + rocket_chat_attach_kibana_discover_url {type: boolean} + rocket_chat_kibana_discover_color {type: string} + rocket_chat_kibana_discover_title {type: string} ### ServiceNow servicenow_rest_url: {type: string} diff --git a/tests/alerters/rocketchat_test.py b/tests/alerters/rocketchat_test.py index d42db09c4..e1eb964be 100644 --- a/tests/alerters/rocketchat_test.py +++ b/tests/alerters/rocketchat_test.py @@ -628,3 +628,201 @@ def test_rocketchat_required_error(rocket_chat_webhook_url, expected_data): assert expected_data == actual_data except Exception as ea: assert expected_data in str(ea) + + +def test_rocket_chat_attach_kibana_discover_url_when_generated(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'alert': [], + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_attach_kibana_discover_url': True + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'kibana_discover_url': 'http://localhost:5601/app/discover#/' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert2', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Cool subject', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + }, + { + 'color': '#ec4b98', + 'title': 'Discover in Kibana', + 'title_link': 'http://localhost:5601/app/discover#/' + } + ], + 'text': '' + } + + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocket_chat_attach_kibana_discover_url_when_not_generated(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'alert': [], + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_attach_kibana_discover_url': True + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + 'somefield': 'foobarbaz', + '@timestamp': '2021-01-01T00:00:00' + } + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert2', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Cool subject', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocket_chat_kibana_discover_title(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'alert': [], + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_attach_kibana_discover_url': True, + 'rocket_chat_kibana_discover_title': 'Click to discover in Kibana' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + 'somefield': 'foobarbaz', + '@timestamp': '2021-01-01T00:00:00', + 'kibana_discover_url': 'http://localhost:5601/app/discover#/' + } + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert2', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Cool subject', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + }, + { + 'color': '#ec4b98', + 'title': 'Click to discover in Kibana', + 'title_link': 'http://localhost:5601/app/discover#/' + } + ], + 'text': '' + } + + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocket_chat_kibana_discover_color(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'alert': [], + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocket_chat', + 'rocket_chat_attach_kibana_discover_url': True, + 'rocket_chat_kibana_discover_color': 'blue' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + 'somefield': 'foobarbaz', + '@timestamp': '2021-01-01T00:00:00', + 'kibana_discover_url': 'http://localhost:5601/app/discover#/' + } + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert2', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + }, + { + 'color': 'blue', + 'title': 'Discover in Kibana', + 'title_link': 'http://localhost:5601/app/discover#/' + } + ], + 'text': '' + } + + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) From f853570091157b5c66b15c87e48bae1077bf7944 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 11 Jun 2021 23:32:28 +0900 Subject: [PATCH 0305/1065] Added exception handling to stomp.py --- elastalert/alerters/stomp.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/elastalert/alerters/stomp.py b/elastalert/alerters/stomp.py index 65e0d3101..ee0360fda 100644 --- a/elastalert/alerters/stomp.py +++ b/elastalert/alerters/stomp.py @@ -5,7 +5,7 @@ import stomp from elastalert.alerts import Alerter, BasicMatchString -from elastalert.util import lookup_es_key, elastalert_logger +from elastalert.util import lookup_es_key, elastalert_logger, EAException class StompAlerter(Alerter): @@ -63,13 +63,17 @@ def alert(self, matches): 'stomp_destination', '/queue/ALERT') self.stomp_ssl = self.rule.get('stomp_ssl', False) - conn = stomp.Connection([(self.stomp_hostname, self.stomp_hostport)], use_ssl=self.stomp_ssl) + try: + conn = stomp.Connection([(self.stomp_hostname, self.stomp_hostport)], use_ssl=self.stomp_ssl) - conn.connect(self.stomp_login, self.stomp_password) - # Ensures that the CONNECTED frame is received otherwise, the disconnect call will fail. - time.sleep(1) - conn.send(self.stomp_destination, json.dumps(fullmessage)) - conn.disconnect() + conn.connect(self.stomp_login, self.stomp_password) + # Ensures that the CONNECTED frame is received otherwise, the disconnect call will fail. + time.sleep(1) + conn.send(self.stomp_destination, json.dumps(fullmessage)) + conn.disconnect() + except Exception as e: + raise EAException("Error posting to Stomp: %s" % e) + elastalert_logger.info("Alert sent to Stomp") def get_info(self): return {'type': 'stomp'} From 279617612d6ca3c20e17424870dec3fe7670ceb4 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 12 Jun 2021 01:08:57 +0900 Subject: [PATCH 0306/1065] Error handling fixes for Amazon SNS and Amazon SES --- elastalert/alerters/ses.py | 25 +++++++++++++------------ elastalert/alerters/sns.py | 35 +++++++++++++++++++---------------- 2 files changed, 32 insertions(+), 28 deletions(-) diff --git a/elastalert/alerters/ses.py b/elastalert/alerters/ses.py index 68b4c90a2..8171198e6 100644 --- a/elastalert/alerters/ses.py +++ b/elastalert/alerters/ses.py @@ -58,17 +58,18 @@ def alert(self, matches): if 'ses_email_add_domain' in self.rule: to_addr = [name + self.rule['ses_email_add_domain'] for name in to_addr] - if self.aws_profile != '': - session = boto3.Session(profile_name=self.aws_profile) - else: - session = boto3.Session( - aws_access_key_id=self.aws_access_key_id, - aws_secret_access_key=self.aws_secret_access_key, - region_name=self.aws_region - ) - - client = session.client('ses') try: + if self.aws_profile != '': + session = boto3.Session(profile_name=self.aws_profile) + else: + session = boto3.Session( + aws_access_key_id=self.aws_access_key_id, + aws_secret_access_key=self.aws_secret_access_key, + region_name=self.aws_region + ) + + client = session.client('ses') + client.send_email( Source=self.from_addr, Destination={ @@ -90,9 +91,9 @@ def alert(self, matches): }, ReplyToAddresses=self.rule.get('ses_email_reply_to', [])) except Exception as e: - raise EAException("Error sending ses: %s" % (e,)) + raise EAException("Error sending Amazon SES: %s" % e) - elastalert_logger.info("Sent ses to %s" % (to_addr,)) + elastalert_logger.info("Sent Amazon SES to %s" % (to_addr,)) def create_default_title(self, matches): subject = 'ElastAlert 2: %s' % (self.rule['name']) diff --git a/elastalert/alerters/sns.py b/elastalert/alerters/sns.py index 4c16911e4..7946007be 100644 --- a/elastalert/alerters/sns.py +++ b/elastalert/alerters/sns.py @@ -1,7 +1,7 @@ import boto3 from elastalert.alerts import Alerter -from elastalert.util import elastalert_logger +from elastalert.util import elastalert_logger, EAException class SnsAlerter(Alerter): @@ -23,22 +23,25 @@ def create_default_title(self, matches): def alert(self, matches): body = self.create_alert_body(matches) - if self.profile is None: - session = boto3.Session( - aws_access_key_id=self.sns_aws_access_key_id, - aws_secret_access_key=self.sns_aws_access_key_id, - region_name=self.sns_aws_region + try: + if self.profile is None: + session = boto3.Session( + aws_access_key_id=self.sns_aws_access_key_id, + aws_secret_access_key=self.sns_aws_access_key_id, + region_name=self.sns_aws_region + ) + else: + session = boto3.Session(profile_name=self.profile) + + sns_client = session.client('sns') + sns_client.publish( + TopicArn=self.sns_topic_arn, + Message=body, + Subject=self.create_title(matches) ) - else: - session = boto3.Session(profile_name=self.profile) - - sns_client = session.client('sns') - sns_client.publish( - TopicArn=self.sns_topic_arn, - Message=body, - Subject=self.create_title(matches) - ) - elastalert_logger.info("Sent sns notification to %s" % (self.sns_topic_arn)) + except Exception as e: + raise EAException("Error sending Amazon SNS: %s" % e) + elastalert_logger.info("Sent Amazon SNS notification to %s" % (self.sns_topic_arn)) def get_info(self): return {'type': 'sns'} From f7c2a9296962f694ff1b91e429e6a6271ef83a35 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sat, 12 Jun 2021 08:14:21 +0100 Subject: [PATCH 0307/1065] Tidy the documentation for ElastAlert invocation options - Add basic documentation of the Prometheus wrapper (from the old README, cc @greut) - Remove duplicate documentation of the `--end` flag - Alphabetise the option so it's easier to search through them (and find duplicates) --- docs/source/running_elastalert.rst | 51 +++++++++++++++--------------- 1 file changed, 25 insertions(+), 26 deletions(-) diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index 4afb783a2..782771d4d 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -27,19 +27,25 @@ verboseness, change all alerts to ``DebugAlerter``, which prints alerts and suppresses their normal action, and skips writing search and alert metadata back to Elasticsearch. Not compatible with `--verbose`. -``--verbose`` will increase the logging verboseness, which allows you to see -information about the state of queries. Not compatible with `--debug`. +``--end `` will force ElastAlert to stop querying after the given +time, instead of the default, querying to the present time. This really only +makes sense when running standalone. The timestamp is formatted as +``YYYY-MM-DDTHH:MM:SS`` (UTC) or with timezone ``YYYY-MM-DDTHH:MM:SS-XX:00`` +(UTC-XX). -``--start `` will force ElastAlert to begin querying from the given -time, instead of the default, querying from the present. The timestamp should be -ISO8601, e.g. ``YYYY-MM-DDTHH:MM:SS`` (UTC) or with timezone -``YYYY-MM-DDTHH:MM:SS-08:00`` (PST). Note that if querying over a large date -range, no alerts will be sent until that rule has finished querying over the -entire time period. To force querying from the current time, use "NOW". +``--es_debug`` will enable logging for all queries made to Elasticsearch. + +``--es_debug_trace `` will enable logging curl commands for all +queries made to Elasticsearch to the specified log file. ``--es_debug_trace`` is +passed through to `elasticsearch.py +`_ which +logs `localhost:9200` instead of the actual ``es_host``:``es_port``. + +``--pin_rules`` will stop ElastAlert from loading, reloading or removing rules +based on changes to their config files. -``--end `` will cause ElastAlert to stop querying at the specified -timestamp. By default, ElastAlert will periodically query until the present -indefinitely. +``--prometheus_port`` exposes ElastAlert Prometheus metrics on the specified +port. Prometheus metrics disabled by default. ``--rule `` will only run the given rule. The rule file may be a complete file path or a filename in ``rules_folder`` or its subdirectories. @@ -50,22 +56,15 @@ days, weeks, hours, minutes or seconds. is an integer. For example, ``--rule noisy_rule.yaml --silence hours=4`` will stop noisy_rule from generating any alerts for 4 hours. -``--es_debug`` will enable logging for all queries made to Elasticsearch. - -``--es_debug_trace `` will enable logging curl commands for all -queries made to Elasticsearch to the specified log file. ``--es_debug_trace`` is -passed through to `elasticsearch.py -`_ which -logs `localhost:9200` instead of the actual ``es_host``:``es_port``. - -``--end `` will force ElastAlert to stop querying after the given -time, instead of the default, querying to the present time. This really only -makes sense when running standalone. The timestamp is formatted as -``YYYY-MM-DDTHH:MM:SS`` (UTC) or with timezone ``YYYY-MM-DDTHH:MM:SS-XX:00`` -(UTC-XX). +``--start `` will force ElastAlert to begin querying from the given +time, instead of the default, querying from the present. The timestamp should be +ISO8601, e.g. ``YYYY-MM-DDTHH:MM:SS`` (UTC) or with timezone +``YYYY-MM-DDTHH:MM:SS-08:00`` (PST). Note that if querying over a large date +range, no alerts will be sent until that rule has finished querying over the +entire time period. To force querying from the current time, use "NOW". -``--pin_rules`` will stop ElastAlert from loading, reloading or removing rules -based on changes to their config files. +``--verbose`` will increase the logging verboseness, which allows you to see +information about the state of queries. Not compatible with `--debug`. .. _docker-instructions: From 4e7d6ccb8a4a40bf52c0267b7b8e5ab4cc075d4e Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 12 Jun 2021 19:48:01 +0900 Subject: [PATCH 0308/1065] Added error handling for unsupported operand type --- docs/source/ruletypes.rst | 5 ++++ elastalert/elastalert.py | 23 +++++++++++++------ tests/base_test.py | 48 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 69 insertions(+), 7 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 67ca34fc4..ac23307d5 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -456,6 +456,11 @@ query_delay ``query_delay``: This option will cause ElastAlert to subtract a time delta from every query, causing the rule to run with a delay. This is useful if the data is Elasticsearch doesn't get indexed immediately. (Optional, time) +For example:: + + query_delay: + hours: 2 + owner ^^^^^ diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 74a3d9fef..8e2689105 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -613,7 +613,10 @@ def remove_old_events(self, rule): remove = [] buffer_time = rule.get('buffer_time', self.buffer_time) if rule.get('query_delay'): - buffer_time += rule['query_delay'] + try: + buffer_time += rule['query_delay'] + except Exception as e: + self.handle_error("[remove_old_events]Error parsing query_delay send time format %s" % e) for _id, timestamp in rule['processed_hits'].items(): if now - timestamp > buffer_time: remove.append(_id) @@ -1271,7 +1274,10 @@ def handle_rule_execution(self, rule): if hasattr(self.args, 'end') and self.args.end: endtime = ts_to_dt(self.args.end) elif delay: - endtime = ts_now() - delay + try: + endtime = ts_now() - delay + except Exception as e: + self.handle_error("[handle_rule_execution]Error parsing query_delay send time format %s" % e) else: endtime = ts_now() @@ -1847,11 +1853,14 @@ def add_aggregated_alert(self, match, rule): except Exception as e: self.handle_error("Error parsing aggregate send time Cron format %s" % (e), rule['aggregation']['schedule']) else: - if rule.get('aggregate_by_match_time', False): - match_time = ts_to_dt(lookup_es_key(match, rule['timestamp_field'])) - alert_time = match_time + rule['aggregation'] - else: - alert_time = ts_now() + rule['aggregation'] + try: + if rule.get('aggregate_by_match_time', False): + match_time = ts_to_dt(lookup_es_key(match, rule['timestamp_field'])) + alert_time = match_time + rule['aggregation'] + else: + alert_time = ts_now() + rule['aggregation'] + except Exception as e: + self.handle_error("[add_aggregated_alert]Error parsing aggregate send time format %s" % (e), rule['aggregation']) rule['aggregate_alert_time'][aggregation_key_value] = alert_time agg_id = None diff --git a/tests/base_test.py b/tests/base_test.py index ff17e3ce1..7bcf3f48c 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -1378,3 +1378,51 @@ def test_query_with_blacklist_filter_es_five(ea_sixsix): ea_sixsix.init_rule(new_rule, True) assert 'username:"xudan1" OR username:"xudan12" OR username:"aa1"' in new_rule['filter'][-1]['query_string'][ 'query'] + + +def test_handle_rule_execution_error(ea, caplog): + with mock.patch('elastalert.elastalert.elasticsearch_client'): + ea.rules[0]['aggregate_by_match_time'] = True + ea.rules[0]['summary_table_fields'] = ['@timestamp'] + ea.rules[0]['aggregation_key'] = ['service.name'] + ea.rules[0]['alert_text_type'] = 'aggregation_summary_only' + ea.rules[0]['query_delay'] = 'a' + new_rule = copy.copy(ea.rules[0]) + ea.init_rule(new_rule, True) + + ea.handle_rule_execution(ea.rules[0]) + user, level, message = caplog.record_tuples[0] + assert '[handle_rule_execution]Error parsing query_delay send time format' in message + + +def test_remove_old_events_error(ea, caplog): + with mock.patch('elastalert.elastalert.elasticsearch_client'): + ea.rules[0]['aggregate_by_match_time'] = True + ea.rules[0]['summary_table_fields'] = ['@timestamp'] + ea.rules[0]['aggregation_key'] = ['service.name'] + ea.rules[0]['alert_text_type'] = 'aggregation_summary_only' + ea.rules[0]['query_delay'] = 'a' + new_rule = copy.copy(ea.rules[0]) + ea.init_rule(new_rule, True) + + ea.remove_old_events(ea.rules[0]) + user, level, message = caplog.record_tuples[0] + assert '[remove_old_events]Error parsing query_delay send time format' in message + + +def test_add_aggregated_alert_error(ea, caplog): + mod = BaseEnhancement(ea.rules[0]) + mod.process = mock.Mock() + ea.rules[0]['match_enhancements'] = [mod] + ea.rules[0]['aggregation'] = {"hour": 5} + ea.rules[0]['run_enhancements_first'] = True + ea.rules[0]['aggregate_by_match_time'] = True + hits = generate_hits([START_TIMESTAMP, END_TIMESTAMP]) + ea.thread_data.current_es.search.return_value = hits + ea.rules[0]['type'].matches = [{'@timestamp': END}] + with mock.patch('elastalert.elastalert.elasticsearch_client'): + ea.run_rule(ea.rules[0], END, START) + user, level, message = caplog.record_tuples[0] + exceptd = "[add_aggregated_alert]" + exceptd += "Error parsing aggregate send time format unsupported operand type(s) for +: 'datetime.datetime' and 'dict'" + assert exceptd in message From eca7198485a177509d59c06afc2c5757b686650c Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 12 Jun 2021 20:29:55 +0900 Subject: [PATCH 0309/1065] Fixed docs of Python package --- docs/source/running_elastalert.rst | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index 782771d4d..1fb1a389a 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -118,11 +118,10 @@ Requirements - Elasticsearch - ISO8601 or Unix timestamped data - Python 3.9 -- pip, see requirements.txt -- Packages on Ubuntu 18.x: build-essential python3-pip python3.6 python3.6-dev libffi-dev libssl-dev -- Packages on Ubuntu 20.x: build-essential python3-pip python3.6 python3.6-dev libffi-dev libssl-dev -- Packages on CentOS 7.x: 'Development Tools' python3-pip python36 python3-devel python3-libs python3-setuptools libffi-devel openssl-devel -- Packages on CentOS 8.x: 'Development Tools' python3-pip python36 python3-devel python3-setuptools python3-libs libffi-devel openssl-devel +- pip +- Packages on Ubuntu 21.x: build-essential python3-pip python3.9 python3.9-dev libffi-dev libssl-dev + +If you want to install python 3.9 on CentOS, please install python 3.9 from the source code after installing 'Development Tools'. Downloading and Configuring --------------------------- From 54b79d478fa2596161e2a6a6b36727a7c692a061 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 13 Jun 2021 01:10:13 +0900 Subject: [PATCH 0310/1065] fix datadog --- elastalert/alerters/datadog.py | 2 +- tests/alerters/datadog_test.py | 10 +++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/elastalert/alerters/datadog.py b/elastalert/alerters/datadog.py index 6ee8d9b3b..2fd71be1c 100644 --- a/elastalert/alerters/datadog.py +++ b/elastalert/alerters/datadog.py @@ -32,7 +32,7 @@ def alert(self, matches): response.raise_for_status() except RequestException as e: raise EAException('Error posting event to Datadog: %s' % e) - elastalert_logger.info('Alert sent to Datadog') + elastalert_logger.info('Alert sent to Datadog') def get_info(self): return {'type': 'datadog'} diff --git a/tests/alerters/datadog_test.py b/tests/alerters/datadog_test.py index 2d61a6693..e70325851 100644 --- a/tests/alerters/datadog_test.py +++ b/tests/alerters/datadog_test.py @@ -1,4 +1,5 @@ import json +import logging from unittest import mock import pytest @@ -9,7 +10,8 @@ from elastalert.util import EAException -def test_datadog_alerter(): +def test_datadog_alerter(caplog): + caplog.set_level(logging.INFO) rule = { 'name': 'Test Datadog Event Alerter', 'type': 'any', @@ -43,6 +45,7 @@ def test_datadog_alerter(): ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data + assert ('elastalert', logging.INFO, 'Alert sent to Datadog') == caplog.record_tuples[0] def test_datadog_alerterea_exception(): @@ -65,8 +68,9 @@ def test_datadog_alerterea_exception(): mock_run = mock.MagicMock(side_effect=RequestException) with mock.patch('requests.post', mock_run), pytest.raises(RequestException): alert.alert([match]) - except EAException: - assert True + assert False + except EAException as ea: + assert 'Error posting event to Datadog:' in str(ea) def test_datadog_getinfo(): From ffe86e59ec860ef998b214ef8de336391d1478e9 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sat, 12 Jun 2021 17:53:10 +0100 Subject: [PATCH 0311/1065] Remove undefined parameter from docstring It looks like the ability to pass the `config` parameter was removed but the docstring explaining what it did stayed --- elastalert/elastalert.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 8e2689105..3786eaf51 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -47,14 +47,7 @@ class ElastAlerter(object): """ The main ElastAlert runner. This class holds all state about active rules, controls when queries are run, and passes information between rules and alerts. - :param args: An argparse arguments instance. Should contain debug and start - - :param conf: The configuration dictionary. At the top level, this - contains global options, and under 'rules', contains all state relating - to rules and alerts. In each rule in conf['rules'], the RuleType and Alerter - instances live under 'type' and 'alerts', respectively. The conf dictionary - should not be passed directly from a configuration file, but must be populated - by config.py:load_rules instead. """ + :param args: An argparse arguments instance. Should contain debug and start""" thread_data = threading.local() From 506228d946fb10ef40836ccbfdb2b5e7f1f828a0 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 13 Jun 2021 02:26:36 +0900 Subject: [PATCH 0312/1065] fix command alerter --- elastalert/alerters/command.py | 20 ++++++++++---------- tests/alerters/command_test.py | 22 ++++++++++------------ 2 files changed, 20 insertions(+), 22 deletions(-) diff --git a/elastalert/alerters/command.py b/elastalert/alerters/command.py index dc79afbb0..32643c358 100644 --- a/elastalert/alerters/command.py +++ b/elastalert/alerters/command.py @@ -15,20 +15,20 @@ def __init__(self, *args): self.last_command = [] self.shell = False - if isinstance(self.rule['command'], str): - self.shell = True - if '%' in self.rule['command']: - elastalert_logger.warning('Warning! You could be vulnerable to shell injection!') - self.rule['command'] = [self.rule['command']] - - def alert(self, matches): - # Format the command and arguments try: - command = [resolve_string(command_arg, matches[0]) for command_arg in self.rule['command']] - self.last_command = command + if isinstance(self.rule['command'], str): + self.shell = True + if '%' in self.rule['command']: + elastalert_logger.warning('Warning! You could be vulnerable to shell injection!') + self.rule['command'] = [self.rule['command']] except KeyError as e: raise EAException("Error formatting command: %s" % (e)) + def alert(self, matches): + # Format the command and arguments + command = [resolve_string(command_arg, matches[0]) for command_arg in self.rule['command']] + self.last_command = command + # Run command and pipe data try: subp = subprocess.Popen(command, stdin=subprocess.PIPE, shell=self.shell) diff --git a/tests/alerters/command_test.py b/tests/alerters/command_test.py index 8ea408fdb..3d6e86b4c 100644 --- a/tests/alerters/command_test.py +++ b/tests/alerters/command_test.py @@ -1,8 +1,8 @@ import json import subprocess -from unittest import mock import pytest +from unittest import mock from elastalert.alerters.command import CommandAlerter from elastalert.alerts import BasicMatchString @@ -100,24 +100,23 @@ def test_command_fail_on_non_zero_exit(): def test_command_os_error(): - try: - rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], - 'pipe_alert_text': True, 'type': mock_rule(), 'name': 'Test'} - alert = CommandAlerter(rule) - match = {'@timestamp': '2014-01-01T00:00:00', - 'somefield': 'foobarbaz'} + rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'], + 'pipe_alert_text': True, 'type': mock_rule(), 'name': 'Test'} + alert = CommandAlerter(rule) + match = {'@timestamp': '2014-01-01T00:00:00', + 'somefield': 'foobarbaz'} + with pytest.raises(EAException) as ea: mock_run = mock.MagicMock(side_effect=OSError) with mock.patch("elastalert.alerters.command.subprocess.Popen", mock_run), pytest.raises(OSError) as mock_popen: mock_subprocess = mock.Mock() mock_popen.return_value = mock_subprocess mock_subprocess.communicate.return_value = (None, None) alert.alert([match]) - except EAException: - assert True + assert 'Error while running command /bin/test/ --arg foobarbaz: ' in str(ea) def test_command_key_error(): - try: + with pytest.raises(EAException) as ea: rule = {} alert = CommandAlerter(rule) match = {'@timestamp': '2014-01-01T00:00:00', @@ -125,5 +124,4 @@ def test_command_key_error(): 'nested': {'field': 1}} with mock.patch("elastalert.alerters.command.subprocess.Popen"): alert.alert([match]) - except KeyError: - assert True + assert 'Error formatting command:' in str(ea) From e2cd2f22ebd0defe263da2bf739e3e1fac3a2cb5 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sun, 13 Jun 2021 07:45:48 +0100 Subject: [PATCH 0313/1065] Tidy example files into a single folder This reduces clutter in the root directory and gives us a place to put all sample files in the future. Also updates the documentation to point to the new file locations. --- CONTRIBUTING.md | 2 +- README.md | 2 +- docs/source/elastalert.rst | 2 +- docs/source/recipes/adding_rules.rst | 2 +- docs/source/ruletypes.rst | 8 ++++---- docs/source/running_elastalert.rst | 16 ++++++++-------- .../config.yaml.example | 2 +- .../rules}/example_cardinality.yaml | 0 .../rules}/example_change.yaml | 0 .../rules}/example_frequency.yaml | 0 .../rules}/example_new_term.yaml | 0 .../rules}/example_opsgenie_frequency.yaml | 0 .../rules}/example_percentage_match.yaml | 0 .../rules}/example_single_metric_agg.yaml | 0 .../rules}/example_spike.yaml | 0 .../rules}/example_spike_single_metric_agg.yaml | 0 .../rules}/exemple_discord_any.yaml | 0 {example_rules => examples/rules}/jira_acct.txt | 0 .../rules}/ssh-repeat-offender.yaml | 0 {example_rules => examples/rules}/ssh.yaml | 0 .../supervisord.conf.example | 0 21 files changed, 17 insertions(+), 17 deletions(-) rename config.yaml.example => examples/config.yaml.example (99%) rename {example_rules => examples/rules}/example_cardinality.yaml (100%) rename {example_rules => examples/rules}/example_change.yaml (100%) rename {example_rules => examples/rules}/example_frequency.yaml (100%) rename {example_rules => examples/rules}/example_new_term.yaml (100%) rename {example_rules => examples/rules}/example_opsgenie_frequency.yaml (100%) rename {example_rules => examples/rules}/example_percentage_match.yaml (100%) rename {example_rules => examples/rules}/example_single_metric_agg.yaml (100%) rename {example_rules => examples/rules}/example_spike.yaml (100%) rename {example_rules => examples/rules}/example_spike_single_metric_agg.yaml (100%) rename {example_rules => examples/rules}/exemple_discord_any.yaml (100%) rename {example_rules => examples/rules}/jira_acct.txt (100%) rename {example_rules => examples/rules}/ssh-repeat-offender.yaml (100%) rename {example_rules => examples/rules}/ssh.yaml (100%) rename supervisord.conf.example => examples/supervisord.conf.example (100%) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 429d51da3..f4f310e65 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -10,7 +10,7 @@ running on your machine). Before submitting the PR review that you have included the following changes, where applicable: - Documentation: If you're adding new functionality, any new configuration options should be documented appropriately in the docs/ folder. - Helm Chart: If your new feature introduces settings consider adding those to the Helm chart [README.md](chart/elastalert2/README.md) and [values.yaml](chart/elastalert2/values.yaml) -- Examples: If your new feature includes new configuration options, review the [Example config file](config.yaml.example) to see if they should be added there for consistency with other configuration options. +- Examples: If your new feature includes new configuration options, review the [Example config file](examples/config.yaml.example) to see if they should be added there for consistency with other configuration options. - Change log: Describe your contribution to the appropriate section(s) for the _Upcoming release_, in the [CHANGELOG.md](CHANGELOG.md) file. ## Releases diff --git a/README.md b/README.md index 985c16859..0e1070211 100644 --- a/README.md +++ b/README.md @@ -30,7 +30,7 @@ Please see our [contributing guidelines][6]. ElastAlert 2 is licensed under the [Apache License, Version 2.0][5]. [0]: https://github.com/yelp/elastalert -[1]: https://github.com/jertel/elastalert2/blob/master/config.yaml.example +[1]: https://github.com/jertel/elastalert2/blob/master/examples/config.yaml.example [2]: https://hub.docker.com/r/jertel/elastalert2 [3]: https://elastalert2.readthedocs.io/ [4]: https://elastalert2.readthedocs.io/en/latest/ruletypes.html#alerts diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index bb942be16..3cf1ec242 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -242,5 +242,5 @@ If you need a more sophisticated logging configuration, you can provide a full l in the config file. This way you can also configure logging to a file, to Logstash and adjust the logging format. -For details, see the end of ``config.yaml.example`` where you can find an example logging +For details, see the end of ``examples/config.yaml.example`` where you can find an example logging configuration. diff --git a/docs/source/recipes/adding_rules.rst b/docs/source/recipes/adding_rules.rst index 1ea2be6f5..296b0d67f 100644 --- a/docs/source/recipes/adding_rules.rst +++ b/docs/source/recipes/adding_rules.rst @@ -130,7 +130,7 @@ Now, in a file named ``my_rules.py``, add pass -In the rule configuration file, ``example_rules/example_login_rule.yaml``, we are going to specify this rule by writing +In the rule configuration file, ``examples/rules/example_login_rule.yaml``, we are going to specify this rule by writing .. code-block:: yaml diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index ac23307d5..c3ad96ebb 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1,7 +1,7 @@ Rule Types and Configuration Options ************************************ -Examples of several types of rule configuration can be found in the example_rules folder. +Examples of several types of rule configuration can be found in the ``examples/rules`` folder. .. _commonconfig: @@ -913,7 +913,7 @@ It is possible to mix between whitelisted value definitions, or use either one. Change ~~~~~~ -For an example configuration file using this rule type, look at ``example_rules/example_change.yaml``. +For an example configuration file using this rule type, look at ``examples/rules/example_change.yaml``. ``change``: This rule will monitor a certain field and match if that field changes. The field must change with respect to the last event with the same ``query_key``. @@ -936,7 +936,7 @@ of the ``compare_key`` field. Frequency ~~~~~~~~~ -For an example configuration file using this rule type, look at ``example_rules/example_frequency.yaml``. +For an example configuration file using this rule type, look at ``example/rules/example_frequency.yaml``. ``frequency``: This rule matches when there are at least a certain number of events in a given time frame. This may be counted on a per-``query_key`` basis. @@ -2062,7 +2062,7 @@ This alert requires four additional options: ``jira_account_file``: The path to the file which contains JIRA account credentials. -For an example JIRA account file, see ``example_rules/jira_acct.yaml``. The account file is also yaml formatted and must contain two fields: +For an example JIRA account file, see ``examples/rules/jira_acct.yaml``. The account file is also yaml formatted and must contain two fields: ``user``: The username. diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index 1fb1a389a..cf78f1086 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -82,7 +82,7 @@ always using the latest released version of ElastAlert 2. A properly configured config.yaml file must be mounted into the container during startup of the container. Use the `example file -`_ +`_ provided as a template, and once saved locally to a file such as ``/tmp/elastalert.yaml``, run the container as follows: @@ -146,14 +146,14 @@ Elasticsearch 5.0+:: $ pip install "elasticsearch>=5.0.0" -Next, open up config.yaml.example. In it, you will find several configuration +Next, open up ``examples/config.yaml.example``. In it, you will find several configuration options. ElastAlert may be run without changing any of these settings. ``rules_folder`` is where ElastAlert will load rule configuration files from. It will attempt to load every .yaml file in the folder. Without any valid rules, ElastAlert will not start. ElastAlert will also load new rules, stop running missing rules, and restart modified rules as the files in this folder change. -For this tutorial, we will use the example_rules folder. +For this tutorial, we will use the ``examples/rules`` folder. ``run_every`` is how often ElastAlert will query Elasticsearch. @@ -232,9 +232,9 @@ Creating a Rule Each rule defines a query to perform, parameters on what triggers a match, and a list of alerts to fire for each match. We are going to use -``example_rules/example_frequency.yaml`` as a template:: +``examples/rules/example_frequency.yaml`` as a template:: - # From example_rules/example_frequency.yaml + # From examples/rules/example_frequency.yaml es_host: elasticsearch.example.com es_port: 14900 name: Example rule @@ -300,12 +300,12 @@ Testing Your Rule Running the ``elastalert-test-rule`` tool will test that your config file successfully loads and run it in debug mode over the last 24 hours:: - $ elastalert-test-rule example_rules/example_frequency.yaml + $ elastalert-test-rule examples/rules/example_frequency.yaml If you want to specify a configuration file to use, you can run it with the config flag:: - $ elastalert-test-rule --config example_rules/example_frequency.yaml + $ elastalert-test-rule --config examples/rules/example_frequency.yaml The configuration preferences will be loaded as follows: 1. Configurations specified in the yaml file. @@ -331,7 +331,7 @@ purposes in this tutorial, we will invoke it directly:: ElastAlert uses the python logging system and ``--verbose`` sets it to display INFO level messages. ``--rule example_frequency.yaml`` specifies the rule to run, otherwise ElastAlert will attempt to load the other rules in the -example_rules folder. +``examples/rules`` folder. Let's break down the response to see what's happening. diff --git a/config.yaml.example b/examples/config.yaml.example similarity index 99% rename from config.yaml.example rename to examples/config.yaml.example index d0e6299b4..a80a48f89 100644 --- a/config.yaml.example +++ b/examples/config.yaml.example @@ -1,7 +1,7 @@ # This is the folder that contains the rule yaml files # This can also be a list of directories # Any .yaml file will be loaded as a rule -rules_folder: example_rules +rules_folder: examples/rules # How often ElastAlert will query Elasticsearch # The unit can be anything from weeks to seconds diff --git a/example_rules/example_cardinality.yaml b/examples/rules/example_cardinality.yaml similarity index 100% rename from example_rules/example_cardinality.yaml rename to examples/rules/example_cardinality.yaml diff --git a/example_rules/example_change.yaml b/examples/rules/example_change.yaml similarity index 100% rename from example_rules/example_change.yaml rename to examples/rules/example_change.yaml diff --git a/example_rules/example_frequency.yaml b/examples/rules/example_frequency.yaml similarity index 100% rename from example_rules/example_frequency.yaml rename to examples/rules/example_frequency.yaml diff --git a/example_rules/example_new_term.yaml b/examples/rules/example_new_term.yaml similarity index 100% rename from example_rules/example_new_term.yaml rename to examples/rules/example_new_term.yaml diff --git a/example_rules/example_opsgenie_frequency.yaml b/examples/rules/example_opsgenie_frequency.yaml similarity index 100% rename from example_rules/example_opsgenie_frequency.yaml rename to examples/rules/example_opsgenie_frequency.yaml diff --git a/example_rules/example_percentage_match.yaml b/examples/rules/example_percentage_match.yaml similarity index 100% rename from example_rules/example_percentage_match.yaml rename to examples/rules/example_percentage_match.yaml diff --git a/example_rules/example_single_metric_agg.yaml b/examples/rules/example_single_metric_agg.yaml similarity index 100% rename from example_rules/example_single_metric_agg.yaml rename to examples/rules/example_single_metric_agg.yaml diff --git a/example_rules/example_spike.yaml b/examples/rules/example_spike.yaml similarity index 100% rename from example_rules/example_spike.yaml rename to examples/rules/example_spike.yaml diff --git a/example_rules/example_spike_single_metric_agg.yaml b/examples/rules/example_spike_single_metric_agg.yaml similarity index 100% rename from example_rules/example_spike_single_metric_agg.yaml rename to examples/rules/example_spike_single_metric_agg.yaml diff --git a/example_rules/exemple_discord_any.yaml b/examples/rules/exemple_discord_any.yaml similarity index 100% rename from example_rules/exemple_discord_any.yaml rename to examples/rules/exemple_discord_any.yaml diff --git a/example_rules/jira_acct.txt b/examples/rules/jira_acct.txt similarity index 100% rename from example_rules/jira_acct.txt rename to examples/rules/jira_acct.txt diff --git a/example_rules/ssh-repeat-offender.yaml b/examples/rules/ssh-repeat-offender.yaml similarity index 100% rename from example_rules/ssh-repeat-offender.yaml rename to examples/rules/ssh-repeat-offender.yaml diff --git a/example_rules/ssh.yaml b/examples/rules/ssh.yaml similarity index 100% rename from example_rules/ssh.yaml rename to examples/rules/ssh.yaml diff --git a/supervisord.conf.example b/examples/supervisord.conf.example similarity index 100% rename from supervisord.conf.example rename to examples/supervisord.conf.example From 4500f28468ed8f3c42701bb7acb12fa0d6326dee Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 14 Jun 2021 21:15:53 +0100 Subject: [PATCH 0314/1065] Fix typo Also tweak gitignore to only look for `/rules` folders under the root directory --- .gitignore | 2 +- docs/source/ruletypes.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 702514108..c58f29bcb 100644 --- a/.gitignore +++ b/.gitignore @@ -16,4 +16,4 @@ build/ my_rules *.swp *~ -rules/ +/rules/ diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index c3ad96ebb..2b2a95f9b 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -936,7 +936,7 @@ of the ``compare_key`` field. Frequency ~~~~~~~~~ -For an example configuration file using this rule type, look at ``example/rules/example_frequency.yaml``. +For an example configuration file using this rule type, look at ``examples/rules/example_frequency.yaml``. ``frequency``: This rule matches when there are at least a certain number of events in a given time frame. This may be counted on a per-``query_key`` basis. From deb8fa3cbaaf74bec0b3a97b819abcbb6efc7df1 Mon Sep 17 00:00:00 2001 From: markus-nclose <38457858+markus-nclose@users.noreply.github.com> Date: Thu, 17 Jun 2021 08:58:46 +0200 Subject: [PATCH 0315/1065] Create example_thehive_frequency.yaml Example Thehive alert with a custom alert_text to replace description in order to add Kibana discover URL. --- examples/rules/example_thehive_frequency.yaml | 52 +++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 examples/rules/example_thehive_frequency.yaml diff --git a/examples/rules/example_thehive_frequency.yaml b/examples/rules/example_thehive_frequency.yaml new file mode 100644 index 000000000..bd12a9505 --- /dev/null +++ b/examples/rules/example_thehive_frequency.yaml @@ -0,0 +1,52 @@ +# This example will look at values in the query_key for a number of events that occured during the timeframe. +# Unique alerts will be sent to TheHive containing a value from the alert in the tags as well as Kibana link in the description. We will be using alert_text in order to generate a custom description. + +name: "Example TheHive with frequency and query_key" +index: your_indice_%Y-%m-%d + +type: frequency +num_events: 30 + +#Count frequecny based on values in "dest_ip" +query_key: dest_ip + +# Exemple query +filter: +- query: + query_string: + query: "event_id:4625" + +# The alert text below allows for the alert description in TheHive to contain new lines for easier reading . +alert_text_args: [ kibana_discover_url, dest_ip ] +alert_text_type: alert_text_only +alert_text: | + 'Example bruteforce alert to the destination IP {1} . ' + + Kibana URL: + + {0} + +# Details needed in order to generate Kibana discover URL in alert_text +generate_kibana_discover_url: true +kibana_discover_app_url: http://your.kibana.server/app/kibana#/discover +kibana_discover_version: '7.5' +kibana_discover_index_pattern_id: 477b4a90-25ead-11b9-ad2c-19e82a454d17 + + +# Needed +alert: +- hivealerter + +hive_alert_config: + type: 'test' + source: 'elastalert' + # description: 'description disabled as we will be using alert_text to insert our Kibana URL' + severity: 1 + tags: [field_1, 'bruteforce' ] + tlp: 2 + status: 'New' + follow: True + +hive_observable_data_mapping: + - ip: dest_ip + - fqdn: host_name From cdac3184076117aa2328ad517a758477654230b7 Mon Sep 17 00:00:00 2001 From: Cedric Charest Date: Fri, 18 Jun 2021 12:23:58 -0400 Subject: [PATCH 0316/1065] Fix #281 self.rule parameters to jinja2 Template --- elastalert/alerts.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 2525a0ce3..07ee6a802 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -31,12 +31,13 @@ def _ensure_new_line(self): def _add_custom_alert_text(self): missing = self.rule.get('alert_missing_value', '') alert_text = str(self.rule.get('alert_text', '')) - if 'alert_text_jinja' == self.rule.get('alert_text_type'): + if self.rule.get('alert_text_type') == 'alert_text_jinja': # Top fields are accessible via `{{field_name}}` or `{{jinja_root_name['field_name']}}` # `jinja_root_name` dict is useful when accessing *fields with dots in their keys*, # as Jinja treat dot as a nested field. - alert_text = self.rule.get("jinja_template").render(**self.match, - **{self.rule['jinja_root_name']: self.match}) + alert_text = self.rule.get("jinja_template").render(**self.match, **self.rule, + **{self.rule['jinja_root_name']: {**self.match, + **self.rule}}) elif 'alert_text_args' in self.rule: alert_text_args = self.rule.get('alert_text_args') alert_text_values = [lookup_es_key(self.match, arg) for arg in alert_text_args] From 71ef3a531a49208ab0b6af9b3072847d9e70c1ae Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 19 Jun 2021 01:32:39 +0900 Subject: [PATCH 0317/1065] Bump pytest-xdist from 2.2.1 to 2.3.0 --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index d7b7b09f9..ea0c82390 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -8,7 +8,7 @@ pluggy>=0.12.0 pre-commit pylint<2.9 pytest==6.2.4 -pytest-xdist==2.2.1 +pytest-xdist==2.3.0 setuptools sphinx_rtd_theme tox==3.23.1 From 8e5eb6c609c3dbdd63e826469653586f8c317ce1 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 19 Jun 2021 01:49:45 +0900 Subject: [PATCH 0318/1065] Deleted the default value setting of smtp_port --- elastalert/alerters/email.py | 4 +- tests/alerters/email_test.py | 248 +++++++++++++++++++++++++++++++++-- 2 files changed, 241 insertions(+), 11 deletions(-) diff --git a/elastalert/alerters/email.py b/elastalert/alerters/email.py index 896647982..193346408 100644 --- a/elastalert/alerters/email.py +++ b/elastalert/alerters/email.py @@ -25,7 +25,7 @@ def __init__(self, *args): self.smtp_host = self.rule.get('smtp_host', 'localhost') self.smtp_ssl = self.rule.get('smtp_ssl', False) self.from_addr = self.rule.get('from_addr', 'ElastAlert') - self.smtp_port = self.rule.get('smtp_port', 25) + self.smtp_port = self.rule.get('smtp_port') if self.rule.get('smtp_auth_file'): self.get_account(self.rule['smtp_auth_file']) self.smtp_key_file = self.rule.get('smtp_key_file') @@ -95,11 +95,13 @@ def alert(self, matches): if self.smtp_port: self.smtp = SMTP_SSL(self.smtp_host, self.smtp_port, keyfile=self.smtp_key_file, certfile=self.smtp_cert_file) else: + # default port : 465 self.smtp = SMTP_SSL(self.smtp_host, keyfile=self.smtp_key_file, certfile=self.smtp_cert_file) else: if self.smtp_port: self.smtp = SMTP(self.smtp_host, self.smtp_port) else: + # default port : 25 self.smtp = SMTP(self.smtp_host) self.smtp.ehlo() if self.smtp.has_extn('STARTTLS'): diff --git a/tests/alerters/email_test.py b/tests/alerters/email_test.py index 85a643cbb..31597526a 100644 --- a/tests/alerters/email_test.py +++ b/tests/alerters/email_test.py @@ -1,13 +1,17 @@ import base64 +import datetime +import logging +import pytest from unittest import mock -import pytest from elastalert.alerters.email import EmailAlerter +from elastalert.util import EAException from tests.alerts_test import mock_rule -def test_email(): +def test_email(caplog): + caplog.set_level(logging.INFO) rule = { 'name': 'test alert', 'email': ['testing@test.test', 'test@test.test'], @@ -25,7 +29,7 @@ def test_email(): alert = EmailAlerter(rule) alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost', 25), + expected = [mock.call('localhost'), mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), @@ -46,6 +50,7 @@ def test_email(): assert 'To: testing@test.test' in body assert 'From: testfrom@test.test' in body assert 'Subject: Test alert for test_value, owned by owner_value' in body + assert ('elastalert', logging.INFO, "Sent email to ['testing@test.test', 'test@test.test']") == caplog.record_tuples[0] def test_email_from_field(): @@ -113,7 +118,7 @@ def test_email_with_unicode_strings(): alert = EmailAlerter(rule) alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost', 25), + expected = [mock.call('localhost'), mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), @@ -149,7 +154,7 @@ def test_email_with_auth(): alert = EmailAlerter(rule) alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost', 25), + expected = [mock.call('localhost'), mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), @@ -188,7 +193,7 @@ def test_email_with_cert_key(): alert = EmailAlerter(rule) alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost', 25), + expected = [mock.call('localhost'), mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile='dummy/cert.crt', keyfile='dummy/client.key'), @@ -220,7 +225,7 @@ def test_email_with_cc(): alert = EmailAlerter(rule) alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost', 25), + expected = [mock.call('localhost'), mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), @@ -259,7 +264,7 @@ def test_email_with_bcc(): alert = EmailAlerter(rule) alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost', 25), + expected = [mock.call('localhost'), mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), @@ -299,7 +304,7 @@ def test_email_with_cc_and_bcc(): alert = EmailAlerter(rule) alert.alert([{'test_term': 'test_value'}]) - expected = [mock.call('localhost', 25), + expected = [mock.call('localhost'), mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), @@ -344,7 +349,7 @@ def test_email_with_args(): alert = EmailAlerter(rule) alert.alert([{'test_term': 'test_value', 'test_arg1': 'testing', 'test': {'term': ':)', 'arg3': '☃'}}]) - expected = [mock.call('localhost', 25), + expected = [mock.call('localhost'), mock.call().ehlo(), mock.call().has_extn('STARTTLS'), mock.call().starttls(certfile=None, keyfile=None), @@ -451,3 +456,226 @@ def test_email_key_error(email, expected_data): assert expected_data == actual_data except Exception: assert expected_data + + +@pytest.mark.parametrize('query_key, expected_data', [ + ('hostname', 'ElastAlert: Test email rule! - aProbe'), + ('test', 'ElastAlert: Test email rule!'), + ('', 'ElastAlert: Test email rule!'), +]) +def test_email_create_default_title(query_key, expected_data): + rule = { + 'name': 'Test email rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'type': 'any', + 'alert': 'email', + 'email': 'test@test.com' + } + if query_key != '': + rule['query_key'] = query_key + + match = [ + { + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + }, + { + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname2': 'aProbe' + } + ] + alert = EmailAlerter(rule) + + result = alert.create_default_title(match) + assert expected_data == result + + +def test_email_smtp_port(): + rule = { + 'name': 'test alert', + 'email': ['testing@test.test', 'test@test.test'], + 'smtp_port': 35, + 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_reply_to': 'test@example.com', + 'owner': 'owner_value', + 'alert_subject': 'Test alert for {0}, owned by {1}', + 'alert_subject_args': ['test_term', 'owner'], + 'snowman': '☃' + } + with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: + mock_smtp.return_value = mock.Mock() + + alert = EmailAlerter(rule) + alert.alert([{'test_term': 'test_value'}]) + expected = [mock.call('localhost', 35), + mock.call().ehlo(), + mock.call().has_extn('STARTTLS'), + mock.call().starttls(certfile=None, keyfile=None), + mock.call().sendmail( + mock.ANY, + [ + 'testing@test.test', + 'test@test.test' + ], + mock.ANY + ), + mock.call().quit()] + assert mock_smtp.mock_calls == expected + + body = mock_smtp.mock_calls[4][1][2] + + assert 'Reply-To: test@example.com' in body + assert 'To: testing@test.test' in body + assert 'From: testfrom@test.test' in body + assert 'Subject: Test alert for test_value, owned by owner_value' in body + + +def test_email_smtp_ssl_true(): + rule = { + 'name': 'test alert', + 'email': ['testing@test.test', 'test@test.test'], + 'smtp_ssl': True, + 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_reply_to': 'test@example.com', + 'owner': 'owner_value', + 'alert_subject': 'Test alert for {0}, owned by {1}', + 'alert_subject_args': ['test_term', 'owner'], + 'snowman': '☃' + } + with mock.patch('elastalert.alerters.email.SMTP_SSL') as mock_smtp: + mock_smtp.return_value = mock.Mock() + + alert = EmailAlerter(rule) + alert.alert([{'test_term': 'test_value'}]) + expected = [mock.call('localhost', certfile=None, keyfile=None), + mock.call().sendmail( + mock.ANY, + [ + 'testing@test.test', + 'test@test.test' + ], + mock.ANY + ), + mock.call().quit()] + assert mock_smtp.mock_calls == expected + + body = mock_smtp.mock_calls[1][1][2] + + assert 'Reply-To: test@example.com' in body + assert 'To: testing@test.test' in body + assert 'From: testfrom@test.test' in body + assert 'Subject: Test alert for test_value, owned by owner_value' in body + + +def test_email_smtp_ssl_true_and_smtp_port(): + rule = { + 'name': 'test alert', + 'email': ['testing@test.test', 'test@test.test'], + 'smtp_ssl': True, + 'smtp_port': 455, + 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_reply_to': 'test@example.com', + 'owner': 'owner_value', + 'alert_subject': 'Test alert for {0}, owned by {1}', + 'alert_subject_args': ['test_term', 'owner'], + 'snowman': '☃' + } + with mock.patch('elastalert.alerters.email.SMTP_SSL') as mock_smtp: + mock_smtp.return_value = mock.Mock() + + alert = EmailAlerter(rule) + alert.alert([{'test_term': 'test_value'}]) + expected = [mock.call('localhost', 455, certfile=None, keyfile=None), + mock.call().sendmail( + mock.ANY, + [ + 'testing@test.test', + 'test@test.test' + ], + mock.ANY + ), + mock.call().quit()] + assert mock_smtp.mock_calls == expected + + body = mock_smtp.mock_calls[1][1][2] + + assert 'Reply-To: test@example.com' in body + assert 'To: testing@test.test' in body + assert 'From: testfrom@test.test' in body + assert 'Subject: Test alert for test_value, owned by owner_value' in body + + +def test_email_smtp_exception(): + with pytest.raises(EAException) as ea: + rule = { + 'name': 'test alert', + 'email': ['testing@test.test', 'test@test.test'], + 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_reply_to': 'test@example.com', + 'alert_subject': 'Test alert for {0}', + 'alert_subject_args': ['test_term'], + 'smtp_auth_file': 'file.txt', + 'rule_file': '/tmp/foo.yaml' + } + with mock.patch('elastalert.alerters.email.SMTP_SSL') as mock_smtp: + with mock.patch('elastalert.alerts.read_yaml') as mock_open: + mock_open.return_value = {'user': 'someone', 'password': 'hunter2'} + mock_smtp.return_value = mock.Mock() + alert = EmailAlerter(rule) + + alert.alert([{'test_term': 'test_value'}]) + assert 'Error connecting to SMTP host: ' in str(ea) + + +def test_email_format_html(): + rule = { + 'name': 'test alert', + 'email': ['testing@test.test', 'test@test.test'], + 'smtp_ssl': True, + 'smtp_port': 455, + 'email_format': 'html', + 'from_addr': 'testfrom@test.test', + 'type': mock_rule(), + 'timestamp_field': '@timestamp', + 'email_reply_to': 'test@example.com', + 'owner': 'owner_value', + 'alert_subject': 'Test alert for {0}, owned by {1}', + 'alert_subject_args': ['test_term', 'owner'], + 'snowman': '☃' + } + with mock.patch('elastalert.alerters.email.SMTP_SSL') as mock_smtp: + mock_smtp.return_value = mock.Mock() + + alert = EmailAlerter(rule) + alert.alert([{'test_term': 'test_value'}]) + expected = [mock.call('localhost', 455, certfile=None, keyfile=None), + mock.call().sendmail( + mock.ANY, + [ + 'testing@test.test', + 'test@test.test' + ], + mock.ANY + ), + mock.call().quit()] + assert mock_smtp.mock_calls == expected + + body = mock_smtp.mock_calls[1][1][2] + + assert 'Reply-To: test@example.com' in body + assert 'To: testing@test.test' in body + assert 'From: testfrom@test.test' in body + assert 'Subject: Test alert for test_value, owned by owner_value' in body + assert 'Content-Type: text/html; charset="utf-8"' in body From a4f3a5314eaa0381bf6b51282d748238efaf7f0a Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 19 Jun 2021 02:14:19 +0900 Subject: [PATCH 0319/1065] Added info log output to command.py and thehive.py --- elastalert/alerters/command.py | 1 + elastalert/alerters/thehive.py | 3 ++- tests/alerters/command_test.py | 6 +++++- tests/alerters/thehive_test.py | 10 ++++++---- 4 files changed, 14 insertions(+), 6 deletions(-) diff --git a/elastalert/alerters/command.py b/elastalert/alerters/command.py index 32643c358..ac15ad05f 100644 --- a/elastalert/alerters/command.py +++ b/elastalert/alerters/command.py @@ -43,6 +43,7 @@ def alert(self, matches): raise EAException("Non-zero exit code while running command %s" % (' '.join(command))) except OSError as e: raise EAException("Error while running command %s: %s" % (' '.join(command), e)) + elastalert_logger.info("Alert sent to Command") def get_info(self): return {'type': 'command', diff --git a/elastalert/alerters/thehive.py b/elastalert/alerters/thehive.py index b4a75490a..e3aa1a029 100644 --- a/elastalert/alerters/thehive.py +++ b/elastalert/alerters/thehive.py @@ -6,7 +6,7 @@ from requests import RequestException from elastalert.alerts import Alerter -from elastalert.util import lookup_es_key, EAException +from elastalert.util import lookup_es_key, EAException, elastalert_logger class HiveAlerter(Alerter): @@ -123,6 +123,7 @@ def alert(self, matches): response.raise_for_status() except RequestException as e: raise EAException(f"Error posting to TheHive: {e}") + elastalert_logger.info("Alert sent to TheHive") def get_info(self): diff --git a/tests/alerters/command_test.py b/tests/alerters/command_test.py index 3d6e86b4c..170cc6108 100644 --- a/tests/alerters/command_test.py +++ b/tests/alerters/command_test.py @@ -1,5 +1,6 @@ import json import subprocess +import logging import pytest from unittest import mock @@ -28,7 +29,8 @@ def test_command_getinfo(): assert expected_data == actual_data -def test_command_old_style_string_format1(): +def test_command_old_style_string_format1(caplog): + caplog.set_level(logging.INFO) # Test command as string with formatted arg (old-style string format) rule = {'command': '/bin/test/ --arg %(somefield)s'} match = {'@timestamp': '2014-01-01T00:00:00', @@ -38,6 +40,8 @@ def test_command_old_style_string_format1(): with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen: alert.alert([match]) assert mock_popen.called_with('/bin/test --arg foobarbaz', stdin=subprocess.PIPE, shell=False) + assert ('elastalert', logging.WARNING, 'Warning! You could be vulnerable to shell injection!') == caplog.record_tuples[0] + assert ('elastalert', logging.INFO, 'Alert sent to Command') == caplog.record_tuples[1] def test_command_old_style_string_format2(): diff --git a/tests/alerters/thehive_test.py b/tests/alerters/thehive_test.py index 40458d3f4..751ee4190 100644 --- a/tests/alerters/thehive_test.py +++ b/tests/alerters/thehive_test.py @@ -1,4 +1,5 @@ import json +import logging from unittest import mock import pytest @@ -9,7 +10,8 @@ from elastalert.alerters.thehive import HiveAlerter -def test_thehive_alerter(): +def test_thehive_alerter(caplog): + caplog.set_level(logging.INFO) rule = {'alert': [], 'alert_text': '', 'alert_text_type': 'alert_text_only', @@ -90,10 +92,11 @@ def test_thehive_alerter(): del actual_data['sourceRef'] assert expected_data == actual_data + assert ('elastalert', logging.INFO, 'Alert sent to TheHive') == caplog.record_tuples[0] def test_thehive_ea_exception(): - try: + with pytest.raises(EAException) as ea: rule = {'alert': [], 'alert_text': '', 'alert_text_type': 'alert_text_only', @@ -125,8 +128,7 @@ def test_thehive_ea_exception(): mock_run = mock.MagicMock(side_effect=RequestException) with mock.patch('requests.post', mock_run), pytest.raises(RequestException): alert.alert([match]) - except EAException: - assert True + assert 'Error posting to TheHive:' in str(ea) @pytest.mark.parametrize('hive_host, expect', [ From c4e41dfee2b814a1a1e7efeaf4b26a6b841aeb50 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 19 Jun 2021 02:22:33 +0900 Subject: [PATCH 0320/1065] Addition of explanation of proxy setting --- docs/source/ruletypes.rst | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 2b2a95f9b..d427b52e7 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1848,7 +1848,7 @@ Optional: ``discord_emoji_title``: By default ElastAlert will use the ``:warning:`` emoji when posting to the channel. You can use a different emoji per ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If slack_icon_url_override parameter is provided, emoji is ignored. -``discord_proxy``: By default ElastAlert will not use a network proxy to send notifications to Discord. Set this option using ``hostname:port`` if you need to use a proxy. +``discord_proxy``: By default ElastAlert will not use a network proxy to send notifications to Discord. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. ``discord_proxy_login``: The Discord proxy auth username. @@ -1896,7 +1896,7 @@ an email would be sent to ``qlo@example.com`` ``smtp_host``: The SMTP host to use, defaults to localhost. -``smtp_port``: The port to use. Default is 25. +``smtp_port``: The port to use. If smtp_port is not specified when smtp_ssl is False, 25 ports will be set internally. If smtp_port is not specified when smtp_ssl is True, 465 ports will be set internally. ``smtp_ssl``: Connect the SMTP host using TLS, defaults to ``false``. If ``smtp_ssl`` is not used, ElastAlert will still attempt STARTTLS. @@ -1978,7 +1978,7 @@ Optional: ``gitter_msg_level``: By default the alert will be posted with the 'error' level. You can use 'info' if you want the messages to be black instead of red. -``gitter_proxy``: By default ElastAlert will not use a network proxy to send notifications to Gitter. Set this option using ``hostname:port`` if you need to use a proxy. +``gitter_proxy``: By default ElastAlert will not use a network proxy to send notifications to Gitter. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. Example usage:: @@ -2024,7 +2024,7 @@ Optional: ``http_post_headers``: Key:value pairs of headers to be sent as part of the request. -``http_post_proxy``: URL of proxy, if required. +``http_post_proxy``: URL of proxy, if required. only supports https. ``http_post_all_values``: Boolean of whether or not to include every key value pair from the match in addition to those in http_post_payload and http_post_static_payload. Defaults to True if http_post_payload is not specified, otherwise False. @@ -2183,7 +2183,7 @@ The alerter requires the following option: Optional: -``mattermost_proxy``: By default ElastAlert will not use a network proxy to send notifications to Mattermost. Set this option using ``hostname:port`` if you need to use a proxy. +``mattermost_proxy``: By default ElastAlert will not use a network proxy to send notifications to Mattermost. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. ``mattermost_ignore_ssl_errors``: By default ElastAlert will verify SSL certificate. Set this option to ``False`` if you want to ignore SSL errors. @@ -2252,7 +2252,7 @@ Optional: ``ms_teams_theme_color``: By default the alert will be posted without any color line. To add color, set this attribute to a HTML color value e.g. ``#ff0000`` for red. -``ms_teams_proxy``: By default ElastAlert will not use a network proxy to send notifications to MS Teams. Set this option using ``hostname:port`` if you need to use a proxy. +``ms_teams_proxy``: By default ElastAlert will not use a network proxy to send notifications to MS Teams. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. ``ms_teams_alert_fixed_width``: By default this is ``False`` and the notification will be sent to MS Teams as-is. Teams supports a partial Markdown implementation, which means asterisk, underscore and other characters may be interpreted as Markdown. Currenlty, Teams does not fully implement code blocks. Setting this attribute to ``True`` will enable line by line code blocks. It is recommended to enable this to get clearer notifications in Teams. @@ -2309,7 +2309,7 @@ Optional: ``opsgenie_details``: Map of custom key/value pairs to include in the alert's details. The value can sourced from either fields in the first match, environment variables, or a constant value. -``opsgenie_proxy``: By default ElastAlert will not use a network proxy to send notifications to OpsGenie. Set this option using ``hostname:port`` if you need to use a proxy. +``opsgenie_proxy``: By default ElastAlert will not use a network proxy to send notifications to OpsGenie. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. Example usage:: @@ -2342,7 +2342,7 @@ If there's no open (i.e. unresolved) incident with this key, a new one will be c ``pagerduty_incident_key_args``: If set, and ``pagerduty_incident_key`` is a formattable string, ElastAlert 2 will format the incident key based on the provided array of fields from the rule or match. -``pagerduty_proxy``: By default ElastAlert will not use a network proxy to send notifications to PagerDuty. Set this option using ``hostname:port`` if you need to use a proxy. +``pagerduty_proxy``: By default ElastAlert will not use a network proxy to send notifications to PagerDuty. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. V2 API Options (Optional): @@ -2383,7 +2383,7 @@ The alerter requires the following options: ``pagertree_integration_url``: URL generated by PagerTree for the integration. -``pagertree_proxy``: By default ElastAlert will not use a network proxy to send notifications to PagerTree. Set this option using ``hostname:port`` if you need to use a proxy. +``pagertree_proxy``: By default ElastAlert will not use a network proxy to send notifications to PagerTree. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. Example usage:: @@ -2414,7 +2414,7 @@ ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . ``rocket_chat_text_string``: Notification message you want to add. -``rocket_chat_proxy``: By default ElastAlert will not use a network proxy to send notifications to Rocket.Chat. Set this option using ``hostname:port`` if you need to use a proxy. +``rocket_chat_proxy``: By default ElastAlert will not use a network proxy to send notifications to Rocket.Chat. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. ``rocket_chat_attach_kibana_discover_url``: Enables the attachment of the ``kibana_discover_url`` to the Rocket.Chat notification. The config ``generate_kibana_discover_url`` must also be ``True`` in order to generate the url. Defaults to ``False``. @@ -2482,7 +2482,7 @@ The alerter requires the following options: Optional: -``servicenow_proxy``: By default ElastAlert will not use a network proxy to send notifications to ServiceNow. Set this option using ``hostname:port`` if you need to use a proxy. +``servicenow_proxy``: By default ElastAlert will not use a network proxy to send notifications to ServiceNow. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. Example usage:: @@ -2528,7 +2528,7 @@ Provide absolute address of the pciture. ``slack_text_string``: Notification message you want to add. -``slack_proxy``: By default ElastAlert will not use a network proxy to send notifications to Slack. Set this option using ``hostname:port`` if you need to use a proxy. +``slack_proxy``: By default ElastAlert will not use a network proxy to send notifications to Slack. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. ``slack_alert_fields``: You can add additional fields to your slack alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. @@ -2596,7 +2596,7 @@ Optional: ``victorops_entity_display_name``: Human-readable name of alerting entity to summarize incidents without affecting the life-cycle workflow. -``victorops_proxy``: By default ElastAlert will not use a network proxy to send notifications to Splunk On-Call (Formerly VictorOps). Set this option using ``hostname:port`` if you need to use a proxy. +``victorops_proxy``: By default ElastAlert will not use a network proxy to send notifications to Splunk On-Call (Formerly VictorOps). Set this option using ``hostname:port`` if you need to use a proxy. only supports https. Example usage:: @@ -2654,7 +2654,7 @@ Optional: ``telegram_api_url``: Custom domain to call Telegram Bot API. Default to api.telegram.org -``telegram_proxy``: By default ElastAlert will not use a network proxy to send notifications to Telegram. Set this option using ``hostname:port`` if you need to use a proxy. +``telegram_proxy``: By default ElastAlert will not use a network proxy to send notifications to Telegram. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. ``telegram_proxy_login``: The Telegram proxy auth username. From dc213e9333d04da5ff1424141819a176e95b8a70 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Fri, 18 Jun 2021 13:59:29 -0400 Subject: [PATCH 0321/1065] Clarify smtp port defaults in docs --- docs/source/ruletypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index d427b52e7..5d7e5c7b0 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1896,7 +1896,7 @@ an email would be sent to ``qlo@example.com`` ``smtp_host``: The SMTP host to use, defaults to localhost. -``smtp_port``: The port to use. If smtp_port is not specified when smtp_ssl is False, 25 ports will be set internally. If smtp_port is not specified when smtp_ssl is True, 465 ports will be set internally. +``smtp_port``: The port to use. Defaults to port 25 when SSL is not used, or 465 when SSL is used. ``smtp_ssl``: Connect the SMTP host using TLS, defaults to ``false``. If ``smtp_ssl`` is not used, ElastAlert will still attempt STARTTLS. From 7710526870319c4c45ea414bfb98c77a980064c3 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 19 Jun 2021 13:44:56 +0900 Subject: [PATCH 0322/1065] Added test code for files in alerters folder --- elastalert/alerters/pagerduty.py | 4 +- tests/alerters/alerta_test.py | 201 +++++++++++++++++++++++++++++- tests/alerters/chatwork_test.py | 11 +- tests/alerters/datadog_test.py | 8 +- tests/alerters/debug_test.py | 54 ++++++++ tests/alerters/dingtalk_test.py | 54 +++++++- tests/alerters/discord_test.py | 10 +- tests/alerters/gitter_test.py | 10 +- tests/alerters/googlechat_test.py | 119 +++++++++++++++++- tests/alerters/httppost_test.py | 10 +- tests/alerters/jira_test.py | 8 +- tests/alerters/line_test.py | 12 +- tests/alerters/mattermost_test.py | 57 ++++++++- tests/alerters/opsgenie_test.py | 121 +++++++++++++----- tests/alerters/pagerduty_test.py | 132 +++++++++++++++++++- tests/alerters/pagertree_test.py | 10 +- tests/alerters/rocketchat_test.py | 10 +- tests/alerters/servicenow_test.py | 10 +- tests/alerters/slack_test.py | 10 +- tests/alerters/teams_test.py | 10 +- tests/alerters/telegram_test.py | 10 +- tests/alerters/victorops_test.py | 10 +- 22 files changed, 778 insertions(+), 103 deletions(-) diff --git a/elastalert/alerters/pagerduty.py b/elastalert/alerters/pagerduty.py index b804b26ca..426e97792 100644 --- a/elastalert/alerters/pagerduty.py +++ b/elastalert/alerters/pagerduty.py @@ -103,9 +103,9 @@ def alert(self, matches): if self.pagerduty_event_type == 'trigger': elastalert_logger.info("Trigger sent to PagerDuty") - elif self.pagerduty_event_type == 'resolve': + if self.pagerduty_event_type == 'resolve': elastalert_logger.info("Resolve sent to PagerDuty") - elif self.pagerduty_event_type == 'acknowledge': + if self.pagerduty_event_type == 'acknowledge': elastalert_logger.info("acknowledge sent to PagerDuty") def resolve_formatted_key(self, key, args, matches): diff --git a/tests/alerters/alerta_test.py b/tests/alerters/alerta_test.py index 35cd17582..858a06ba4 100644 --- a/tests/alerters/alerta_test.py +++ b/tests/alerters/alerta_test.py @@ -1,5 +1,6 @@ import datetime import json +import logging from unittest import mock import pytest @@ -10,7 +11,8 @@ from elastalert.util import EAException -def test_alerta_no_auth(): +def test_alerta_no_auth(caplog): + caplog.set_level(logging.INFO) rule = { 'name': 'Test Alerta rule!', 'alerta_api_url': 'http://elastalerthost:8080/api/alert', @@ -73,6 +75,7 @@ def test_alerta_no_auth(): ) assert expected_data == json.loads( mock_post_request.call_args_list[0][1]['data']) + assert ('elastalert', logging.INFO, 'Alert sent to Alerta') == caplog.record_tuples[0] def test_alerta_auth(): @@ -616,7 +619,7 @@ def test_alerta_tags(): def test_alerta_ea_exception(): - try: + with pytest.raises(EAException) as ea: rule = { 'name': 'Test Alerta rule!', 'alerta_api_url': 'http://elastalerthost:8080/api/alert', @@ -649,8 +652,7 @@ def test_alerta_ea_exception(): mock_run = mock.MagicMock(side_effect=RequestException) with mock.patch('requests.post', mock_run), pytest.raises(RequestException): alert.alert([match]) - except EAException: - assert True + assert 'Error posting to Alerta: ' in str(ea) def test_alerta_getinfo(): @@ -705,3 +707,194 @@ def test_alerta_required_error(alerta_api_url, expected_data): assert expected_data == actual_data except Exception as ea: assert expected_data in str(ea) + + +@pytest.mark.parametrize('query_key, expected_data', [ + ('hostname', 'Test Alerta rule!.aProbe'), + ('test', 'Test Alerta rule!'), + ('', 'Test Alerta rule!'), +]) +def test_alerta_create_default_title(query_key, expected_data): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'type': 'any', + 'alert': 'alerta' + } + if query_key != '': + rule['query_key'] = query_key + + match = [ + { + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + }, + { + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname2': 'aProbe' + } + ] + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + + result = alert.create_default_title(match) + assert expected_data == result + + +def test_alerta_match_timestamp_none(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': True, + 'alerta_tags': ['elastalert2'], + 'alert': 'alerta' + } + + match = { + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": ['elastalert2'], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "environment": "Production", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" + } + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + + actual_data = json.loads( + mock_post_request.call_args_list[0][1]['data']) + del actual_data['createTime'] + del actual_data['rawData'] + assert expected_data == actual_data + + +def test_alerta_use_match_timestamp(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "ElastAlert 2", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", + 'type': 'any', + 'alerta_use_match_timestamp': False, + 'alerta_tags': ['elastalert2'], + 'alert': 'alerta' + } + + match = { + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + "origin": "ElastAlert 2", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": ['elastalert2'], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "environment": "Production", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" + } + + mock_post_request.assert_called_once_with( + alert.url, + data=mock.ANY, + verify=True, + headers={ + 'content-type': 'application/json'} + ) + + actual_data = json.loads( + mock_post_request.call_args_list[0][1]['data']) + del actual_data['createTime'] + del actual_data['rawData'] + assert expected_data == actual_data + + +def test_get_json_payload_error(): + rule = { + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'type': 'any', + 'alert': 'alerta', + 'query_key': 'hostname' + } + match = { + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) + + mock_run = mock.MagicMock(side_effect=Exception) + with mock.patch('json.dumps', mock_run): + + with pytest.raises(Exception) as e: + alert.get_json_payload(match) + + assert 'Error building Alerta request: ' in str(e) diff --git a/tests/alerters/chatwork_test.py b/tests/alerters/chatwork_test.py index cd8945a8f..fb0a1e601 100644 --- a/tests/alerters/chatwork_test.py +++ b/tests/alerters/chatwork_test.py @@ -1,3 +1,5 @@ +import logging + from unittest import mock import pytest from requests import RequestException @@ -8,7 +10,8 @@ from elastalert.util import EAException -def test_chatwork(): +def test_chatwork(caplog): + caplog.set_level(logging.INFO) rule = { 'name': 'Test Chatwork Rule', 'type': 'any', @@ -39,6 +42,7 @@ def test_chatwork(): actual_data = mock_post_request.call_args_list[0][1]['params'] assert expected_data == actual_data + assert ('elastalert', logging.INFO, 'Alert sent to Chatwork room xxxx2') == caplog.record_tuples[0] def test_chatwork_proxy(): @@ -78,7 +82,7 @@ def test_chatwork_proxy(): def test_chatwork_ea_exception(): - try: + with pytest.raises(EAException) as ea: rule = { 'name': 'Test Chatwork Rule', 'type': 'any', @@ -99,8 +103,7 @@ def test_chatwork_ea_exception(): mock_run = mock.MagicMock(side_effect=RequestException) with mock.patch('requests.post', mock_run), pytest.raises(RequestException): alert.alert([match]) - except EAException: - assert True + assert 'Error posting to Chattwork: . Details: ' in str(ea) def test_chatwork_getinfo(): diff --git a/tests/alerters/datadog_test.py b/tests/alerters/datadog_test.py index e70325851..3e082aa7c 100644 --- a/tests/alerters/datadog_test.py +++ b/tests/alerters/datadog_test.py @@ -48,8 +48,8 @@ def test_datadog_alerter(caplog): assert ('elastalert', logging.INFO, 'Alert sent to Datadog') == caplog.record_tuples[0] -def test_datadog_alerterea_exception(): - try: +def test_datadog_ea_exception(): + with pytest.raises(EAException) as ea: rule = { 'name': 'Test Datadog Event Alerter', 'type': 'any', @@ -68,9 +68,7 @@ def test_datadog_alerterea_exception(): mock_run = mock.MagicMock(side_effect=RequestException) with mock.patch('requests.post', mock_run), pytest.raises(RequestException): alert.alert([match]) - assert False - except EAException as ea: - assert 'Error posting event to Datadog:' in str(ea) + assert 'Error posting event to Datadog:' in str(ea) def test_datadog_getinfo(): diff --git a/tests/alerters/debug_test.py b/tests/alerters/debug_test.py index be09cbc2f..f35053c9c 100644 --- a/tests/alerters/debug_test.py +++ b/tests/alerters/debug_test.py @@ -1,3 +1,5 @@ +import logging + from elastalert.alerters.debug import DebugAlerter from elastalert.loaders import FileRulesLoader @@ -18,3 +20,55 @@ def test_debug_getinfo(): } actual_data = alert.get_info() assert expected_data == actual_data + + +def test_debug_alerter(caplog): + caplog.set_level(logging.INFO) + rule = { + 'name': 'Test Debug Event Alerter', + 'type': 'any', + 'alert': [], + 'timestamp_field': 'timestamp' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DebugAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'name': 'debug-test-name' + } + alert.alert([match]) + + excepted1 = 'Alert for Test Debug Event Alerter at None:' + assert ('elastalert', logging.INFO, excepted1) == caplog.record_tuples[0] + + excepted2 = 'Test Debug Event Alerter\n\n@timestamp: 2021-01-01T00:00:00\n' + excepted2 += 'name: debug-test-name\n' + assert ('elastalert', logging.INFO, excepted2) == caplog.record_tuples[1] + + +def test_debug_alerter_querykey(caplog): + caplog.set_level(logging.INFO) + rule = { + 'name': 'Test Debug Event Alerter', + 'type': 'any', + 'alert': [], + 'timestamp_field': 'timestamp', + 'query_key': 'hostname' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DebugAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'name': 'debug-test-name', + 'hostname': 'aProbe' + } + alert.alert([match]) + + excepted1 = 'Alert for Test Debug Event Alerter, aProbe at None:' + assert ('elastalert', logging.INFO, excepted1) == caplog.record_tuples[0] + + excepted2 = 'Test Debug Event Alerter\n\n@timestamp: 2021-01-01T00:00:00\n' + excepted2 += 'hostname: aProbe\nname: debug-test-name\n' + assert ('elastalert', logging.INFO, excepted2) == caplog.record_tuples[1] diff --git a/tests/alerters/dingtalk_test.py b/tests/alerters/dingtalk_test.py index b4c1250ce..c1a2731fa 100644 --- a/tests/alerters/dingtalk_test.py +++ b/tests/alerters/dingtalk_test.py @@ -1,4 +1,5 @@ import json +import logging from unittest import mock import pytest @@ -10,7 +11,8 @@ from elastalert.util import EAException -def test_dingtalk_text(): +def test_dingtalk_text(caplog): + caplog.set_level(logging.INFO) rule = { 'name': 'Test DingTalk Rule', 'type': 'any', @@ -47,6 +49,7 @@ def test_dingtalk_text(): actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data + assert ('elastalert', logging.INFO, 'Trigger sent to dingtalk') == caplog.record_tuples[0] def test_dingtalk_markdown(): @@ -194,6 +197,50 @@ def test_dingtalk_action_card(): assert expected_data == actual_data +def test_dingtalk_action_card2(): + rule = { + 'name': 'Test DingTalk Rule', + 'type': 'any', + 'dingtalk_access_token': 'xxxxxxx', + 'dingtalk_msgtype': 'action_card', + 'dingtalk_single_title': 'elastalert', + 'dingtalk_single_url': 'http://xxxxx2', + 'alert': [], + 'alert_subject': 'Test DingTalk' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DingTalkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'msgtype': 'actionCard', + 'actionCard': { + 'title': 'Test DingTalk', + 'text': 'Test DingTalk Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + } + + mock_post_request.assert_called_once_with( + 'https://oapi.dingtalk.com/robot/send?access_token=xxxxxxx', + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'Accept': 'application/json;charset=utf-8' + }, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + def test_dingtalk_proxy(): rule = { 'name': 'Test DingTalk Rule', @@ -255,7 +302,7 @@ def test_dingtalk_proxy(): def test_dingtalk_ea_exception(): - try: + with pytest.raises(EAException) as ea: rule = { 'name': 'Test DingTalk Rule', 'type': 'any', @@ -290,8 +337,7 @@ def test_dingtalk_ea_exception(): mock_run = mock.MagicMock(side_effect=RequestException) with mock.patch('requests.post', mock_run), pytest.raises(RequestException): alert.alert([match]) - except EAException: - assert True + assert 'Error posting to dingtalk: ' in str(ea) def test_dingtalk_getinfo(): diff --git a/tests/alerters/discord_test.py b/tests/alerters/discord_test.py index 5f7c441dd..3d6893c49 100644 --- a/tests/alerters/discord_test.py +++ b/tests/alerters/discord_test.py @@ -1,4 +1,5 @@ import json +import logging from unittest import mock import pytest @@ -10,7 +11,8 @@ from elastalert.util import EAException -def test_discord(): +def test_discord(caplog): + caplog.set_level(logging.INFO) rule = { 'name': 'Test Discord Rule', 'type': 'any', @@ -55,6 +57,7 @@ def test_discord(): actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data + assert ('elastalert', logging.INFO, 'Alert sent to the webhook http://xxxxxxx') == caplog.record_tuples[0] def test_discord_not_footer(): @@ -185,7 +188,7 @@ def test_discord_description_maxlength(): def test_discord_ea_exception(): - try: + with pytest.raises(EAException) as ea: rule = { 'name': 'Test Discord Rule' + ('a' * 2069), 'type': 'any', @@ -205,8 +208,7 @@ def test_discord_ea_exception(): mock_run = mock.MagicMock(side_effect=RequestException) with mock.patch('requests.post', mock_run), pytest.raises(RequestException): alert.alert([match]) - except EAException: - assert True + assert 'Error posting to Discord: . Details: ' in str(ea) def test_discord_getinfo(): diff --git a/tests/alerters/gitter_test.py b/tests/alerters/gitter_test.py index 40cfb509f..efb84a009 100644 --- a/tests/alerters/gitter_test.py +++ b/tests/alerters/gitter_test.py @@ -1,4 +1,5 @@ import json +import logging from unittest import mock import pytest @@ -14,7 +15,8 @@ ('error', 'error'), ('info', 'info') ]) -def test_gitter_msg_level(msg_level, except_msg_level): +def test_gitter_msg_level(msg_level, except_msg_level, caplog): + caplog.set_level(logging.INFO) rule = { 'name': 'Test Gitter Rule', 'type': 'any', @@ -49,6 +51,7 @@ def test_gitter_msg_level(msg_level, except_msg_level): actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data + assert ('elastalert', logging.INFO, 'Alert sent to Gitter') == caplog.record_tuples[0] def test_gitter_proxy(): @@ -86,7 +89,7 @@ def test_gitter_proxy(): def test_gitter_ea_exception(): - try: + with pytest.raises(EAException) as ea: rule = { 'name': 'Test Gitter Rule', 'type': 'any', @@ -105,8 +108,7 @@ def test_gitter_ea_exception(): mock_run = mock.MagicMock(side_effect=RequestException) with mock.patch('requests.post', mock_run), pytest.raises(RequestException): alert.alert([match]) - except EAException: - assert True + assert 'Error posting to Gitter: ' in str(ea) def test_gitter_getinfo(): diff --git a/tests/alerters/googlechat_test.py b/tests/alerters/googlechat_test.py index e41808a69..5e998ca28 100644 --- a/tests/alerters/googlechat_test.py +++ b/tests/alerters/googlechat_test.py @@ -1,4 +1,5 @@ import json +import logging from unittest import mock import pytest @@ -9,7 +10,8 @@ from elastalert.util import EAException -def test_google_chat_basic(): +def test_google_chat_basic(caplog): + caplog.set_level(logging.INFO) rule = { 'name': 'Test GoogleChat Rule', 'type': 'any', @@ -38,6 +40,7 @@ def test_google_chat_basic(): actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data + assert ('elastalert', logging.INFO, 'Alert sent to Google Chat!') == caplog.record_tuples[0] def test_google_chat_card(): @@ -106,7 +109,7 @@ def test_google_chat_card(): def test_google_chat_ea_exception(): - try: + with pytest.raises(EAException) as ea: rule = { 'name': 'Test GoogleChat Rule', 'type': 'any', @@ -123,8 +126,7 @@ def test_google_chat_ea_exception(): mock_run = mock.MagicMock(side_effect=RequestException) with mock.patch('requests.post', mock_run), pytest.raises(RequestException): alert.alert([match]) - except EAException: - assert True + assert 'Error posting to google chat: ' in str(ea) def test_google_chat_getinfo(): @@ -173,3 +175,112 @@ def test_google_chat_required_error(googlechat_webhook_url, expected_data): assert expected_data == actual_data except Exception as ea: assert expected_data in str(ea) + + +def test_ggooglechat_header_title_none(): + rule = { + 'name': 'Test GoogleChat Rule', + 'type': 'any', + 'googlechat_webhook_url': 'http://xxxxxxx', + 'googlechat_format': 'card', + 'googlechat_header_subtitle': 'xxxx2', + 'googlechat_header_image': 'http://xxxx/image.png', + 'googlechat_footer_kibanalink': 'http://xxxxx/kibana', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GoogleChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'cards': [{ + 'sections': [ + { + 'widgets': [{ + "textParagraph": { + 'text': 'Test GoogleChat Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + }] + }, + { + 'widgets': [{ + 'buttons': [{ + 'textButton': { + 'text': 'VISIT KIBANA', + 'onClick': { + 'openLink': { + 'url': rule['googlechat_footer_kibanalink'] + } + } + } + }] + }] + } + ]} + ] + } + + mock_post_request.assert_called_once_with( + rule['googlechat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + + +def test_googlechat_footer_kibanalink_none(): + rule = { + 'name': 'Test GoogleChat Rule', + 'type': 'any', + 'googlechat_webhook_url': 'http://xxxxxxx', + 'googlechat_format': 'card', + 'googlechat_header_title': 'xxxx1', + 'googlechat_header_subtitle': 'xxxx2', + 'googlechat_header_image': 'http://xxxx/image.png', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = GoogleChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'cards': [{ + 'header': { + 'title': rule['googlechat_header_title'], + 'subtitle': rule['googlechat_header_subtitle'], + 'imageUrl': rule['googlechat_header_image'] + }, + 'sections': [ + { + 'widgets': [{ + "textParagraph": { + 'text': 'Test GoogleChat Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + } + }] + } + ]} + ] + } + + mock_post_request.assert_called_once_with( + rule['googlechat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data diff --git a/tests/alerters/httppost_test.py b/tests/alerters/httppost_test.py index 5ba4106d8..a36050fe0 100644 --- a/tests/alerters/httppost_test.py +++ b/tests/alerters/httppost_test.py @@ -1,4 +1,5 @@ import json +import logging from unittest import mock import pytest @@ -9,7 +10,8 @@ from elastalert.util import EAException -def test_http_alerter_with_payload(): +def test_http_alerter_with_payload(caplog): + caplog.set_level(logging.INFO) rule = { 'name': 'Test HTTP Post Alerter With Payload', 'type': 'any', @@ -40,6 +42,7 @@ def test_http_alerter_with_payload(): verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + assert ('elastalert', logging.INFO, 'HTTP Post alert sent.') == caplog.record_tuples[0] def test_http_alerter_with_payload_all_values(): @@ -264,7 +267,7 @@ def test_http_alerter_post_ca_certs(ca_certs, ignore_ssl_errors, excpet_verify): def test_http_alerter_post_ea_exception(): - try: + with pytest.raises(EAException) as ea: rule = { 'name': 'Test HTTP Post Alerter Without Payload', 'type': 'any', @@ -283,8 +286,7 @@ def test_http_alerter_post_ea_exception(): mock_run = mock.MagicMock(side_effect=RequestException) with mock.patch('requests.post', mock_run), pytest.raises(RequestException): alert.alert([match]) - except EAException: - assert True + assert 'Error posting HTTP Post alert: ' in str(ea) def test_http_getinfo(): diff --git a/tests/alerters/jira_test.py b/tests/alerters/jira_test.py index b38472de3..636d5cd37 100644 --- a/tests/alerters/jira_test.py +++ b/tests/alerters/jira_test.py @@ -1,4 +1,5 @@ import datetime +import logging from unittest import mock import pytest @@ -25,7 +26,8 @@ def test_jira_formatted_match_string(ea): assert expected_alert_text_snippet in alert_text -def test_jira(): +def test_jira(caplog): + caplog.set_level(logging.INFO) description_txt = "Description stuff goes here like a runbook link." rule = { 'name': 'test alert', @@ -75,6 +77,10 @@ def test_jira(): # We don't care about additional calls to mock_jira, such as __str__ assert mock_jira.mock_calls[:6] == expected assert mock_jira.mock_calls[3][2]['description'].startswith(description_txt) + user, level, message = caplog.record_tuples[0] + assert 'elastalert' == user + assert logging.INFO == level + assert 'pened Jira ticket: ' in message # Search called if jira_bump_tickets rule['jira_bump_tickets'] = True diff --git a/tests/alerters/line_test.py b/tests/alerters/line_test.py index c01ce701a..e30eb551c 100644 --- a/tests/alerters/line_test.py +++ b/tests/alerters/line_test.py @@ -1,3 +1,5 @@ +import logging + from unittest import mock import pytest from requests import RequestException @@ -7,7 +9,8 @@ from elastalert.util import EAException -def test_line_notify(): +def test_line_notify(caplog): + caplog.set_level(logging.INFO) rule = { 'name': 'Test LineNotify Rule', 'type': 'any', @@ -39,10 +42,11 @@ def test_line_notify(): actual_data = mock_post_request.call_args_list[0][1]['data'] assert expected_data == actual_data + assert ('elastalert', logging.INFO, 'Alert sent to Line Notify') == caplog.record_tuples[0] def test_line_notify_ea_exception(): - try: + with pytest.raises(EAException) as ea: rule = { 'name': 'Test LineNotify Rule', 'type': 'any', @@ -59,8 +63,8 @@ def test_line_notify_ea_exception(): mock_run = mock.MagicMock(side_effect=RequestException) with mock.patch('requests.post', mock_run), pytest.raises(RequestException): alert.alert([match]) - except EAException: - assert True + + assert 'Error posting to Line Notify: ' in str(ea) def test_line_getinfo(): diff --git a/tests/alerters/mattermost_test.py b/tests/alerters/mattermost_test.py index 3a32000b4..41ad8de18 100644 --- a/tests/alerters/mattermost_test.py +++ b/tests/alerters/mattermost_test.py @@ -1,4 +1,5 @@ import json +import logging from unittest import mock import pytest @@ -9,7 +10,8 @@ from elastalert.util import EAException -def test_mattermost_proxy(): +def test_mattermost_proxy(caplog): + caplog.set_level(logging.INFO) rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -54,6 +56,7 @@ def test_mattermost_proxy(): actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data + assert ('elastalert', logging.INFO, 'Alert sent to Mattermost') == caplog.record_tuples[0] def test_mattermost_alert_text_only(): @@ -751,7 +754,7 @@ def test_mattermost_author_icon(): def test_mattermost_ea_exception(): - try: + with pytest.raises(EAException) as ea: rule = { 'name': 'Test Mattermost Rule', 'type': 'any', @@ -773,8 +776,7 @@ def test_mattermost_ea_exception(): mock_run = mock.MagicMock(side_effect=RequestException) with mock.patch('requests.post', mock_run), pytest.raises(RequestException): alert.alert([match]) - except EAException: - assert True + assert 'Error posting to Mattermost: ' in str(ea) def test_mattermost_get_aggregation_summary_text__maximum_width(): @@ -1093,3 +1095,50 @@ def test_mattermost_kibana_discover_color(): actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data + + +def test_mattermost_username_override(): + rule = { + 'name': 'Test Mattermost Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'mattermost_webhook_url': 'http://xxxxx', + 'mattermost_msg_pretext': 'aaaaa', + 'mattermost_msg_color': 'danger', + 'mattermost_username_override': 'test user', + 'alert': [], + 'alert_subject': 'Test Mattermost' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = MattermostAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'attachments': [ + { + 'fallback': 'Test Mattermost: aaaaa', + 'color': 'danger', + 'title': 'Test Mattermost', + 'pretext': 'aaaaa', + 'fields': [], + 'text': 'Test Mattermost Rule\n\n' + } + ], 'username': 'test user' + } + + mock_post_request.assert_called_once_with( + rule['mattermost_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + verify=True, + proxies=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data diff --git a/tests/alerters/opsgenie_test.py b/tests/alerters/opsgenie_test.py index 0547d7e61..a9ceb64b2 100644 --- a/tests/alerters/opsgenie_test.py +++ b/tests/alerters/opsgenie_test.py @@ -1,3 +1,5 @@ +import logging + from unittest import mock import pytest from requests import RequestException @@ -8,10 +10,16 @@ from tests.alerts_test import mock_rule -def test_opsgenie_basic(): - rule = {'name': 'testOGalert', 'opsgenie_key': 'ogkey', - 'opsgenie_account': 'genies', 'opsgenie_addr': 'https://api.opsgenie.com/v2/alerts', - 'opsgenie_recipients': ['lytics'], 'type': mock_rule()} +def test_opsgenie_basic(caplog): + caplog.set_level(logging.INFO) + rule = { + 'name': 'testOGalert', + 'opsgenie_key': 'ogkey', + 'opsgenie_account': 'genies', + 'opsgenie_addr': 'https://api.opsgenie.com/v2/alerts', + 'opsgenie_recipients': ['lytics'], + 'type': mock_rule() + } with mock.patch('requests.post') as mock_post: alert = OpsGenieAlerter(rule) @@ -27,14 +35,22 @@ def test_opsgenie_basic(): assert mcal[0][1]['json']['source'] == 'ElastAlert' assert mcal[0][1]['json']['responders'] == [{'username': 'lytics', 'type': 'user'}] assert mcal[0][1]['json']['source'] == 'ElastAlert' + user, level, message = caplog.record_tuples[0] + assert "Error response from https://api.opsgenie.com/v2/alerts \n API Response: Date: Sun, 20 Jun 2021 08:32:58 +0100 Subject: [PATCH 0323/1065] Tidy test files There are a lot of test-specific files that were in the root of the repository, which makes the repo structure slightly messy and difficult to understand. This moves the test-specific configuration into tests/ and makes other necessary changes to maintain the build instructions. --- Makefile | 11 ++++++----- requirements-dev.txt | 3 ++- Dockerfile-test => tests/Dockerfile-test | 0 docker-compose.yml => tests/docker-compose.yml | 6 +++--- pytest.ini => tests/pytest.ini | 2 +- tox.ini => tests/tox.ini | 11 +++++------ 6 files changed, 17 insertions(+), 16 deletions(-) rename Dockerfile-test => tests/Dockerfile-test (100%) rename docker-compose.yml => tests/docker-compose.yml (60%) rename pytest.ini => tests/pytest.ini (61%) rename tox.ini => tests/tox.ini (77%) diff --git a/Makefile b/Makefile index cbe634879..11217658f 100644 --- a/Makefile +++ b/Makefile @@ -6,7 +6,7 @@ production: @true docs: - tox -e docs + tox -c tests/tox.ini -e docs dev: $(LOCAL_CONFIG_DIR) $(LOGS_DIR) install-hooks @@ -14,14 +14,15 @@ install-hooks: pre-commit install -f --install-hooks test: - tox + tox -c tests/tox.ini test-elasticsearch: - tox -- --runelasticsearch + tox -c tests/tox.ini -- --runelasticsearch test-docker: - docker-compose --project-name elastalert build tox - docker-compose --project-name elastalert run --rm tox tox -- $(filter-out $@,$(MAKECMDGOALS)) + docker-compose -f tests/docker-compose.yml --project-name elastalert build tox + docker-compose -f tests/docker-compose.yml --project-name elastalert run --rm tox \ + tox -c tests/tox.ini -- $(filter-out $@,$(MAKECMDGOALS)) clean: make -C docs clean diff --git a/requirements-dev.txt b/requirements-dev.txt index ea0c82390..7abc77409 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,6 +1,5 @@ -r requirements.txt docutils<0.17 -pytest-cov==2.12.1 flake8 flake8-absolute-import m2r2 @@ -8,7 +7,9 @@ pluggy>=0.12.0 pre-commit pylint<2.9 pytest==6.2.4 +pytest-cov==2.12.1 pytest-xdist==2.3.0 setuptools +sphinx==4.0.2 sphinx_rtd_theme tox==3.23.1 diff --git a/Dockerfile-test b/tests/Dockerfile-test similarity index 100% rename from Dockerfile-test rename to tests/Dockerfile-test diff --git a/docker-compose.yml b/tests/docker-compose.yml similarity index 60% rename from docker-compose.yml rename to tests/docker-compose.yml index 88badf6e1..7b8dd2854 100644 --- a/docker-compose.yml +++ b/tests/docker-compose.yml @@ -2,10 +2,10 @@ version: '2' services: tox: build: - context: ./ - dockerfile: Dockerfile-test + context: ../ + dockerfile: tests/Dockerfile-test command: tox container_name: elastalert_tox working_dir: /home/elastalert volumes: - - ./:/home/elastalert/ + - ../:/home/elastalert/ diff --git a/pytest.ini b/tests/pytest.ini similarity index 61% rename from pytest.ini rename to tests/pytest.ini index 259ba35a2..d859f3a9c 100644 --- a/pytest.ini +++ b/tests/pytest.ini @@ -2,4 +2,4 @@ markers = elasticsearch: mark a test as using elasticsearch. filterwarnings = - ignore::pytest.PytestUnhandledThreadExceptionWarning \ No newline at end of file + ignore::pytest.PytestUnhandledThreadExceptionWarning diff --git a/tox.ini b/tests/tox.ini similarity index 77% rename from tox.ini rename to tests/tox.ini index 6f42fae25..0a6ad2355 100644 --- a/tox.ini +++ b/tests/tox.ini @@ -1,16 +1,16 @@ [tox] project = elastalert envlist = py39,docs +setupdir = .. [testenv] -deps = -rrequirements-dev.txt +deps = -r../requirements-dev.txt commands = - pytest --cov=elastalert --cov-report=term-missing --cov-branch --strict-markers tests/ -n 4 {posargs} - flake8 . + pytest --cov=elastalert --cov-report=term-missing --cov-branch --strict-markers . -n 4 {posargs} + flake8 --config ../setup.cfg . [testenv:lint] deps = {[testenv]deps} - pylint commands = pylint --rcfile=.pylintrc elastalert pylint --rcfile=.pylintrc tests @@ -24,6 +24,5 @@ norecursedirs = .* virtualenv_run docs build venv env [testenv:docs] deps = {[testenv]deps} - sphinx==4.0.2 -changedir = docs +changedir = ../docs commands = sphinx-build -b html -d build/doctrees -W source build/html From 1feb741778712e7a7ada6e047946b4b3780c5d83 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Sun, 20 Jun 2021 17:12:13 +0100 Subject: [PATCH 0324/1065] Fix timezone conversion check I noticed today that `make test` had started failing on my machine when run outside of the Docker container. This was due to the fact that a timezone wasn't being set correctly when run on my machine, which is on BST. The test worked fine with the container because it was on UTC. I traced the error down to https://github.com/jertel/elastalert2/pull/51, in particular, a check which would return True in some cases where it wasn't intended, because get() returns a None value by default instead of the expected empty string. --- elastalert/elastalert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 3786eaf51..f96d93ef6 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -628,7 +628,7 @@ def run_query(self, rule, start=None, end=None, scroll=False): if end is None: end = ts_now() - if rule.get('query_timezone') != "": + if rule.get('query_timezone') is not None: elastalert_logger.info("Query start and end time converting UTC to query_timezone : {}".format(rule.get('query_timezone'))) start = ts_utc_to_tz(start, rule.get('query_timezone')) end = ts_utc_to_tz(end, rule.get('query_timezone')) From 78f009a03fa62c3305b740405d41d63700ee5bb2 Mon Sep 17 00:00:00 2001 From: Feroz Salam Date: Mon, 21 Jun 2021 11:18:10 +0100 Subject: [PATCH 0325/1065] Make field presence check more general Catches the case where the field is set to an empty string in a rule --- elastalert/elastalert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index f96d93ef6..3631ffa37 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -628,7 +628,7 @@ def run_query(self, rule, start=None, end=None, scroll=False): if end is None: end = ts_now() - if rule.get('query_timezone') is not None: + if rule.get('query_timezone'): elastalert_logger.info("Query start and end time converting UTC to query_timezone : {}".format(rule.get('query_timezone'))) start = ts_utc_to_tz(start, rule.get('query_timezone')) end = ts_utc_to_tz(end, rule.get('query_timezone')) From a73ecee9f3c7e15f37fd75a5316eb1b51984b9c5 Mon Sep 17 00:00:00 2001 From: lepouletsuisse Date: Mon, 21 Jun 2021 15:35:30 +0200 Subject: [PATCH 0326/1065] Add securityContext and podSecurityContext in Helm Chart --- CHANGELOG.md | 10 ++++++++++ chart/elastalert2/README.md | 2 ++ chart/elastalert2/templates/deployment.yaml | 8 ++++++++ chart/elastalert2/values.yaml | 9 +++++++++ 4 files changed, 29 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4dc7c3c45..84ad36fba 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,16 @@ ## Other changes - None +# 2.x.x +## Breaking changes +- None + +## New features +- Add securityContext and podSecurityContext to Helm chart - [#289](https://github.com/jertel/elastalert2/pull/289) - @lepouletsuisse + +## Other changes +- None + # 2.1.1 ## Breaking changes diff --git a/chart/elastalert2/README.md b/chart/elastalert2/README.md index 92d0ec78c..0d0b4c565 100644 --- a/chart/elastalert2/README.md +++ b/chart/elastalert2/README.md @@ -50,6 +50,8 @@ The command removes all the Kubernetes components associated with the chart and | `image.tag` | docker image tag | 2.1.1 | | `image.pullPolicy` | image pull policy | IfNotPresent | | `podAnnotations` | Annotations to be added to pods | {} | +| `podSecurityContext` | Configurable podSecurityContext for pod execution environment | {} | +| `securityContext` | Allows you to set the securityContext for the container | {} | | `command` | command override for container | `NULL` | | `args` | args override for container | `NULL` | | `replicaCount` | number of replicas to run | 1 | diff --git a/chart/elastalert2/templates/deployment.yaml b/chart/elastalert2/templates/deployment.yaml index 47a3faf8d..49975754f 100644 --- a/chart/elastalert2/templates/deployment.yaml +++ b/chart/elastalert2/templates/deployment.yaml @@ -28,10 +28,18 @@ spec: release: {{ .Release.Name }} spec: serviceAccountName: {{ include "elastalert.serviceAccountName" . }} +{{- if .Values.podSecurityContext }} + securityContext: +{{ toYaml .Values.podSecurityContext | indent 8 }} +{{- end }} containers: - name: elastalert image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}" imagePullPolicy: {{ .Values.image.pullPolicy }} +{{- if .Values.securityContext }} + securityContext: +{{ toYaml .Values.securityContext | indent 10 }} +{{- end }} {{- if .Values.command }} command: {{ toYaml .Values.command | indent 10 }} diff --git a/chart/elastalert2/values.yaml b/chart/elastalert2/values.yaml index a61adb877..428d09bbe 100644 --- a/chart/elastalert2/values.yaml +++ b/chart/elastalert2/values.yaml @@ -197,6 +197,15 @@ serviceAccount: podSecurityPolicy: create: false +# securityContext: +# runAsNonRoot: true +# runAsUser: 1000 + +# podSecurityContext: +# fsGroup: 1000 +# runAsUser: 1000 +# runAsGroup: 1000 + # Support using node selectors and tolerations # nodeSelector: # "node-role.kubernetes.io/infra_worker": "true" From 736bae332fc72488ca801d55f141e83da5a124db Mon Sep 17 00:00:00 2001 From: lepouletsuisse Date: Tue, 22 Jun 2021 09:32:03 +0200 Subject: [PATCH 0327/1065] Add default securityContext/podSecurityContext + deprecate the podSecurityPolicy feature. --- CHANGELOG.md | 2 +- chart/elastalert2/README.md | 92 +++++++++++++++++------------------ chart/elastalert2/values.yaml | 15 +++--- 3 files changed, 55 insertions(+), 54 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 84ad36fba..64095ddfa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,7 +17,7 @@ - Add securityContext and podSecurityContext to Helm chart - [#289](https://github.com/jertel/elastalert2/pull/289) - @lepouletsuisse ## Other changes -- None +- Deprecated `podSecurityPolicy` feature in Helm Chart as [it's deprecated in Kubernetes 1.21](https://kubernetes.io/blog/2021/04/06/podsecuritypolicy-deprecation-past-present-and-future/) - [#289](https://github.com/jertel/elastalert2/pull/289) - @lepouletsuisse # 2.1.1 diff --git a/chart/elastalert2/README.md b/chart/elastalert2/README.md index 0d0b4c565..9d1817d6b 100644 --- a/chart/elastalert2/README.md +++ b/chart/elastalert2/README.md @@ -44,49 +44,49 @@ The command removes all the Kubernetes components associated with the chart and ## Configuration -| Parameter | Description | Default | -|----------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------|---------------------------------| -| `image.repository` | docker image | jertel/elastalert2 | -| `image.tag` | docker image tag | 2.1.1 | -| `image.pullPolicy` | image pull policy | IfNotPresent | -| `podAnnotations` | Annotations to be added to pods | {} | -| `podSecurityContext` | Configurable podSecurityContext for pod execution environment | {} | -| `securityContext` | Allows you to set the securityContext for the container | {} | -| `command` | command override for container | `NULL` | -| `args` | args override for container | `NULL` | -| `replicaCount` | number of replicas to run | 1 | -| `elasticsearch.host` | elasticsearch endpoint to use | elasticsearch | -| `elasticsearch.port` | elasticsearch port to use | 9200 | -| `elasticsearch.useSsl` | whether or not to connect to es_host using SSL | False | -| `elasticsearch.username` | Username for ES with basic auth | `NULL` | -| `elasticsearch.password` | Password for ES with basic auth | `NULL` | -| `elasticsearch.credentialsSecret` | Specifies an existing secret to be used for the ES username/password auth | `NULL` | -| `elasticsearch.credentialsSecretUsernameKey` | The key in elasticsearch.credentialsSecret that stores the ES password auth | `NULL` | -| `elasticsearch.credentialsSecretPasswordKey` | The key in elasticsearch.credentialsSecret that stores the ES username auth | `NULL` | -| `elasticsearch.verifyCerts` | whether or not to verify TLS certificates | True | -| `elasticsearch.clientCert` | path to a PEM certificate to use as the client certificate | /certs/client.pem | -| `elasticsearch.clientKey` | path to a private key file to use as the client key | /certs/client-key.pem | -| `elasticsearch.caCerts` | path to a CA cert bundle to use to verify SSL connections | /certs/ca.pem | -| `elasticsearch.certsVolumes` | certs volumes, required to mount ssl certificates when elasticsearch has tls enabled | `NULL` | -| `elasticsearch.certsVolumeMounts` | mount certs volumes, required to mount ssl certificates when elasticsearch has tls enabled | `NULL` | -| `extraConfigOptions` | Additional options to propagate to all rules, cannot be `alert`, `type`, `name` or `index` | `{}` | -| `secretConfigName` | name of the secret which holds the ElastAlert config. **Note:** this will completely overwrite the generated config | `NULL` | -| `secretRulesName` | name of the secret which holds the ElastAlert rules. **Note:** this will overwrite the generated rules | `NULL` | -| `secretRulesList` | a list of rules to enable from the secret | [] | -| `optEnv` | Additional pod environment variable definitions | [] | -| `extraVolumes` | Additional volume definitions | [] | -| `extraVolumeMounts` | Additional volumeMount definitions | [] | -| `serviceAccount.create` | Specifies whether a service account should be created. | `true` | -| `serviceAccount.name` | Service account to be used. If not set and `serviceAccount.create` is `true`, a name is generated using the fullname template | | -| `serviceAccount.annotations` | ServiceAccount annotations | | -| `podSecurityPolicy.create` | Create pod security policy resources | `false` | -| `resources` | Container resource requests and limits | {} | -| `rules` | Rule and alert configuration for ElastAlert 2 | {} example shown in values.yaml | -| `runIntervalMins` | Default interval between alert checks, in minutes | 1 | -| `realertIntervalMins` | Time between alarms for same rule, in minutes | `NULL` | -| `alertRetryLimitMins` | Time to retry failed alert deliveries, in minutes | 2880 (2 days) | -| `bufferTimeMins` | Default rule buffer time, in minutes | 15 | -| `writebackIndex` | Name or prefix of elastalert index(es) | elastalert | -| `nodeSelector` | Node selector for deployment | {} | -| `affinity` | Affinity specifications for the deployed pod(s) | {} | -| `tolerations` | Tolerations for deployment | [] | +| Parameter | Description | Default | +|----------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------| +| `image.repository` | docker image | jertel/elastalert2 | +| `image.tag` | docker image tag | 2.1.1 | +| `image.pullPolicy` | image pull policy | IfNotPresent | +| `podAnnotations` | Annotations to be added to pods | {} | +| `podSecurityContext` | Configurable podSecurityContext for pod execution environment | {"runAsUser": 1000, "runAsGroup": 1000, "fsGroup": 1000} | +| `securityContext` | Allows you to set the securityContext for the container | {"runAsNonRoot": true, "runAsUser": 1000} | +| `command` | command override for container | `NULL` | +| `args` | args override for container | `NULL` | +| `replicaCount` | number of replicas to run | 1 | +| `elasticsearch.host` | elasticsearch endpoint to use | elasticsearch | +| `elasticsearch.port` | elasticsearch port to use | 9200 | +| `elasticsearch.useSsl` | whether or not to connect to es_host using SSL | False | +| `elasticsearch.username` | Username for ES with basic auth | `NULL` | +| `elasticsearch.password` | Password for ES with basic auth | `NULL` | +| `elasticsearch.credentialsSecret` | Specifies an existing secret to be used for the ES username/password auth | `NULL` | +| `elasticsearch.credentialsSecretUsernameKey` | The key in elasticsearch.credentialsSecret that stores the ES password auth | `NULL` | +| `elasticsearch.credentialsSecretPasswordKey` | The key in elasticsearch.credentialsSecret that stores the ES username auth | `NULL` | +| `elasticsearch.verifyCerts` | whether or not to verify TLS certificates | True | +| `elasticsearch.clientCert` | path to a PEM certificate to use as the client certificate | /certs/client.pem | +| `elasticsearch.clientKey` | path to a private key file to use as the client key | /certs/client-key.pem | +| `elasticsearch.caCerts` | path to a CA cert bundle to use to verify SSL connections | /certs/ca.pem | +| `elasticsearch.certsVolumes` | certs volumes, required to mount ssl certificates when elasticsearch has tls enabled | `NULL` | +| `elasticsearch.certsVolumeMounts` | mount certs volumes, required to mount ssl certificates when elasticsearch has tls enabled | `NULL` | +| `extraConfigOptions` | Additional options to propagate to all rules, cannot be `alert`, `type`, `name` or `index` | `{}` | +| `secretConfigName` | name of the secret which holds the ElastAlert config. **Note:** this will completely overwrite the generated config | `NULL` | +| `secretRulesName` | name of the secret which holds the ElastAlert rules. **Note:** this will overwrite the generated rules | `NULL` | +| `secretRulesList` | a list of rules to enable from the secret | [] | +| `optEnv` | Additional pod environment variable definitions | [] | +| `extraVolumes` | Additional volume definitions | [] | +| `extraVolumeMounts` | Additional volumeMount definitions | [] | +| `serviceAccount.create` | Specifies whether a service account should be created. | `true` | +| `serviceAccount.name` | Service account to be used. If not set and `serviceAccount.create` is `true`, a name is generated using the fullname template | | +| `serviceAccount.annotations` | ServiceAccount annotations | | +| `podSecurityPolicy.create` | [DEPRECATED] Create pod security policy resources | `false` | +| `resources` | Container resource requests and limits | {} | +| `rules` | Rule and alert configuration for ElastAlert 2 | {} example shown in values.yaml | +| `runIntervalMins` | Default interval between alert checks, in minutes | 1 | +| `realertIntervalMins` | Time between alarms for same rule, in minutes | `NULL` | +| `alertRetryLimitMins` | Time to retry failed alert deliveries, in minutes | 2880 (2 days) | +| `bufferTimeMins` | Default rule buffer time, in minutes | 15 | +| `writebackIndex` | Name or prefix of elastalert index(es) | elastalert | +| `nodeSelector` | Node selector for deployment | {} | +| `affinity` | Affinity specifications for the deployed pod(s) | {} | +| `tolerations` | Tolerations for deployment | [] | diff --git a/chart/elastalert2/values.yaml b/chart/elastalert2/values.yaml index 428d09bbe..e7a9f0780 100644 --- a/chart/elastalert2/values.yaml +++ b/chart/elastalert2/values.yaml @@ -194,17 +194,18 @@ serviceAccount: # Enable pod security policy # https://kubernetes.io/docs/concepts/policy/pod-security-policy/ +# DEPRECATED in Kubernetes 1.21 (https://kubernetes.io/blog/2021/04/06/podsecuritypolicy-deprecation-past-present-and-future/) podSecurityPolicy: create: false -# securityContext: -# runAsNonRoot: true -# runAsUser: 1000 +securityContext: + runAsNonRoot: true + runAsUser: 1000 -# podSecurityContext: -# fsGroup: 1000 -# runAsUser: 1000 -# runAsGroup: 1000 +podSecurityContext: + fsGroup: 1000 + runAsUser: 1000 + runAsGroup: 1000 # Support using node selectors and tolerations # nodeSelector: From c982b9489be8f0856a943e3bc7bd9d532b284701 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Thu, 24 Jun 2021 13:06:06 -0400 Subject: [PATCH 0328/1065] Catch up CHANGELOG with PRs as of 6/24/2021 at 1pm EDT --- CHANGELOG.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 64095ddfa..252e92c7b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,9 +14,26 @@ - None ## New features +- Add support for generating Kibana Discover URLs to Rocket.Chat alerter - [#260](https://github.com/jertel/elastalert2/pull/260) - @nsanorururu +- Provide rule key/values as possible Jinja data inputs - [#281](https://github.com/jertel/elastalert2/pull/281) - @mrfroggg - Add securityContext and podSecurityContext to Helm chart - [#289](https://github.com/jertel/elastalert2/pull/289) - @lepouletsuisse ## Other changes +- Continue fix for prometheus wrapper writeback function signature - [#256](https://github.com/jertel/elastalert2/pull/256) - @greut +- Improve exception handling in Stomp alerter - [#261](https://github.com/jertel/elastalert2/pull/261) - @nsanorururu +- Improve exception handling in Amazon SES and SNS alerters - [#264](https://github.com/jertel/elastalert2/pull/264) - @nsanorururu +- Clarify documentation for starting ElastAlert 2 - [#265](https://github.com/jertel/elastalert2/pull/265) - @ferozsalam +- Add exception handling for unsupported operand type - [#266](https://github.com/jertel/elastalert2/pull/266) - @nsanorururu +- Improve documentation for Python build requirements - [#267](https://github.com/jertel/elastalert2/pull/267) - @nsanorururu +- Correct DataDog alerter logging - [#268](https://github.com/jertel/elastalert2/pull/268) - @nsanorururu +- Correct parameter code documentation for main ElastAlert runner - [#269](https://github.com/jertel/elastalert2/pull/269) - @ferozsalam +- Command alerter will now fail during init instead of during alert if given invalid command setting - [#270](https://github.com/jertel/elastalert2/pull/270) - @nsanorururu +- Consolidate all examples into a new examples/ sub folder - [#271](https://github.com/jertel/elastalert2/pull/271) - @ferozsalam +- Add TheHive example rule with Kibana Discover URL and query values in alert text - [#276](https://github.com/jertel/elastalert2/pull/276) - @markus-nclose +- Upgrade pytest-xdist from 2.2.1 to 2.3.0; clarify HTTPS support in docs; Add additional logging - [#283](https://github.com/jertel/elastalert2/pull/283) - @nsanorururu +- Add more alerter test coverage - [#284](https://github.com/jertel/elastalert2/pull/284) - @nsanorururu +- Improve structure and placement of test-related files in project tree - [#287](https://github.com/jertel/elastalert2/pull/287) - @ferozsalam +- Only attempt to adjust timezone if timezone is set to a non-empty string - [#288](https://github.com/jertel/elastalert2/pull/288) - @ferozsalam - Deprecated `podSecurityPolicy` feature in Helm Chart as [it's deprecated in Kubernetes 1.21](https://kubernetes.io/blog/2021/04/06/podsecuritypolicy-deprecation-past-present-and-future/) - [#289](https://github.com/jertel/elastalert2/pull/289) - @lepouletsuisse # 2.1.1 From 56496e38749ed4c6c816ffe3acf80c493bab5871 Mon Sep 17 00:00:00 2001 From: Jeff Ashton Date: Fri, 25 Jun 2021 13:33:31 -0400 Subject: [PATCH 0329/1065] Fixing the schema for slack_channel_override --- elastalert/schema.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index c724fd40a..772e42521 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -495,7 +495,7 @@ properties: ### Slack slack_webhook_url: *arrayOfString slack_username_override: {type: string} - slack_channel_override: {type: string} + slack_channel_override: *arrayOfString slack_emoji_override: {type: string} slack_icon_url_override: {type: string} slack_msg_color: {enum: [good, warning, danger]} From fc25036fc73e979f5ec7c321bd1da28fa496e439 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 26 Jun 2021 03:11:33 +0900 Subject: [PATCH 0330/1065] Add test code --- elastalert/alerters/dingtalk.py | 9 +- elastalert/alerters/mattermost.py | 4 +- elastalert/alerters/opsgenie.py | 3 - tests/alerters/alerta_test.py | 7 +- tests/alerters/chatwork_test.py | 13 +-- tests/alerters/datadog_test.py | 7 +- tests/alerters/dingtalk_test.py | 5 +- tests/alerters/discord_test.py | 5 +- tests/alerters/email_test.py | 58 ++++-------- tests/alerters/exotel_test.py | 148 +++++++++++++++++++++++------- tests/alerters/gitter_test.py | 11 ++- tests/alerters/googlechat_test.py | 5 +- tests/alerters/httppost_test.py | 9 +- tests/alerters/jira_test.py | 37 +++++++- tests/alerters/line_test.py | 5 +- tests/alerters/mattermost_test.py | 13 +-- tests/alerters/opsgenie_test.py | 99 +++++++++++++++----- tests/alerters/pagerduty_test.py | 9 +- tests/alerters/pagertree_test.py | 5 +- tests/alerters/rocketchat_test.py | 13 +-- tests/alerters/servicenow_test.py | 95 +++++++++++++++---- tests/alerters/ses_test.py | 68 +++++++++++++- tests/alerters/slack_test.py | 11 ++- tests/alerters/sns_test.py | 28 +++++- tests/alerters/stomp_test.py | 30 ++++++ tests/alerters/telegram_test.py | 7 +- tests/alerters/twilio_test.py | 109 ++++++++++++++++++++-- tests/alerters/victorops_test.py | 9 +- tests/alerters/zabbix_test.py | 35 ++++++- 29 files changed, 662 insertions(+), 195 deletions(-) diff --git a/elastalert/alerters/dingtalk.py b/elastalert/alerters/dingtalk.py index 2e023b2a1..7bb4c0b41 100644 --- a/elastalert/alerters/dingtalk.py +++ b/elastalert/alerters/dingtalk.py @@ -26,9 +26,6 @@ def __init__(self, rule): self.dingtalk_proxy_login = self.rule.get('dingtalk_proxy_login', None) self.dingtalk_proxy_password = self.rule.get('dingtalk_proxy_pass', None) - def format_body(self, body): - return body.encode('utf8') - def alert(self, matches): title = self.create_title(matches) body = self.create_alert_body(matches) @@ -48,7 +45,7 @@ def alert(self, matches): 'content': body } } - elif self.dingtalk_msgtype == 'markdown': + if self.dingtalk_msgtype == 'markdown': # markdown payload = { 'msgtype': self.dingtalk_msgtype, @@ -57,7 +54,7 @@ def alert(self, matches): 'text': body } } - elif self.dingtalk_msgtype == 'single_action_card': + if self.dingtalk_msgtype == 'single_action_card': # singleActionCard payload = { 'msgtype': 'actionCard', @@ -68,7 +65,7 @@ def alert(self, matches): 'singleURL': self.dingtalk_single_url } } - elif self.dingtalk_msgtype == 'action_card': + if self.dingtalk_msgtype == 'action_card': # actionCard payload = { 'msgtype': 'actionCard', diff --git a/elastalert/alerters/mattermost.py b/elastalert/alerters/mattermost.py index 2da5b8c42..0ba6ec81b 100644 --- a/elastalert/alerters/mattermost.py +++ b/elastalert/alerters/mattermost.py @@ -80,6 +80,7 @@ def alert(self, matches): # set https proxy, if it was provided proxies = {'https': self.mattermost_proxy} if self.mattermost_proxy else None payload = { + 'username': self.mattermost_username_override, 'attachments': [ { 'fallback': "{0}: {1}".format(title, self.mattermost_msg_pretext), @@ -102,9 +103,6 @@ def alert(self, matches): if self.mattermost_icon_url_override != '': payload['icon_url'] = self.mattermost_icon_url_override - if self.mattermost_username_override != '': - payload['username'] = self.mattermost_username_override - if self.mattermost_channel_override != '': payload['channel'] = self.mattermost_channel_override diff --git a/elastalert/alerters/opsgenie.py b/elastalert/alerters/opsgenie.py index b916446df..bc241f364 100644 --- a/elastalert/alerters/opsgenie.py +++ b/elastalert/alerters/opsgenie.py @@ -53,9 +53,6 @@ def _parse_responders(self, responders, responder_args, matches, default_respond responders = formated_responders return responders - def _fill_responders(self, responders, type_): - return [{'id': r, 'type': type_} for r in responders] - def alert(self, matches): body = '' for match in matches: diff --git a/tests/alerters/alerta_test.py b/tests/alerters/alerta_test.py index 858a06ba4..6e3949682 100644 --- a/tests/alerters/alerta_test.py +++ b/tests/alerters/alerta_test.py @@ -1,9 +1,10 @@ import datetime import json import logging +import pytest from unittest import mock -import pytest + from requests import RequestException from elastalert.alerters.alerta import AlertaAlerter @@ -696,7 +697,7 @@ def test_alerta_required_error(alerta_api_url, expected_data): 'alert': 'alerta' } - if alerta_api_url != '': + if alerta_api_url: rule['alerta_api_url'] = alerta_api_url rules_loader = FileRulesLoader({}) @@ -723,7 +724,7 @@ def test_alerta_create_default_title(query_key, expected_data): 'type': 'any', 'alert': 'alerta' } - if query_key != '': + if query_key: rule['query_key'] = query_key match = [ diff --git a/tests/alerters/chatwork_test.py b/tests/alerters/chatwork_test.py index fb0a1e601..23279e2d3 100644 --- a/tests/alerters/chatwork_test.py +++ b/tests/alerters/chatwork_test.py @@ -1,7 +1,8 @@ import logging +import pytest from unittest import mock -import pytest + from requests import RequestException from requests.auth import HTTPProxyAuth @@ -127,9 +128,9 @@ def test_chatwork_getinfo(): @pytest.mark.parametrize('chatwork_apikey, chatwork_room_id, expected_data', [ - ('', '', 'Missing required option(s): chatwork_apikey, chatwork_room_id'), - ('xxxx1', '', 'Missing required option(s): chatwork_apikey, chatwork_room_id'), - ('', 'xxxx2', '1Missing required option(s): chatwork_apikey, chatwork_room_id'), + ('', '', 'Missing required option(s): chatwork_apikey, chatwork_room_id'), + ('xxxx1', '', 'Missing required option(s): chatwork_apikey, chatwork_room_id'), + ('', 'xxxx2', '1Missing required option(s): chatwork_apikey, chatwork_room_id'), ('xxxx1', 'xxxx2', { "type": "chatwork", @@ -144,10 +145,10 @@ def test_chatwork_required_error(chatwork_apikey, chatwork_room_id, expected_dat 'alert': [] } - if chatwork_apikey != '': + if chatwork_apikey: rule['chatwork_apikey'] = chatwork_apikey - if chatwork_room_id != '': + if chatwork_room_id: rule['chatwork_room_id'] = chatwork_room_id rules_loader = FileRulesLoader({}) diff --git a/tests/alerters/datadog_test.py b/tests/alerters/datadog_test.py index 3e082aa7c..ad0b77f34 100644 --- a/tests/alerters/datadog_test.py +++ b/tests/alerters/datadog_test.py @@ -1,8 +1,9 @@ import json import logging +import pytest from unittest import mock -import pytest + from requests import RequestException from elastalert.alerters.datadog import DatadogAlerter @@ -107,10 +108,10 @@ def test_datadog_required_error(datadog_api_key, datadog_app_key, expected_data) 'alert_subject': 'Test Datadog Event Alert' } - if datadog_api_key != '': + if datadog_api_key: rule['datadog_api_key'] = datadog_api_key - if datadog_app_key != '': + if datadog_app_key: rule['datadog_app_key'] = datadog_app_key rules_loader = FileRulesLoader({}) diff --git a/tests/alerters/dingtalk_test.py b/tests/alerters/dingtalk_test.py index c1a2731fa..bdda888f5 100644 --- a/tests/alerters/dingtalk_test.py +++ b/tests/alerters/dingtalk_test.py @@ -1,8 +1,9 @@ import json import logging +import pytest from unittest import mock -import pytest + from requests import RequestException from requests.auth import HTTPProxyAuth @@ -377,7 +378,7 @@ def test_dingtalk_required_error(dingtalk_access_token, expected_data): 'alert_subject': 'Test DingTalk' } - if dingtalk_access_token != '': + if dingtalk_access_token: rule['dingtalk_access_token'] = dingtalk_access_token rules_loader = FileRulesLoader({}) diff --git a/tests/alerters/discord_test.py b/tests/alerters/discord_test.py index 3d6893c49..634b9b35e 100644 --- a/tests/alerters/discord_test.py +++ b/tests/alerters/discord_test.py @@ -1,8 +1,9 @@ import json import logging +import pytest from unittest import mock -import pytest + from requests import RequestException from requests.auth import HTTPProxyAuth @@ -248,7 +249,7 @@ def test_discord_required_error(discord_webhook_url, expected_data): 'alert_subject': 'Test Discord' } - if discord_webhook_url != '': + if discord_webhook_url: rule['discord_webhook_url'] = discord_webhook_url rules_loader = FileRulesLoader({}) diff --git a/tests/alerters/email_test.py b/tests/alerters/email_test.py index 31597526a..5d06a35f5 100644 --- a/tests/alerters/email_test.py +++ b/tests/alerters/email_test.py @@ -53,7 +53,15 @@ def test_email(caplog): assert ('elastalert', logging.INFO, "Sent email to ['testing@test.test', 'test@test.test']") == caplog.record_tuples[0] -def test_email_from_field(): +@pytest.mark.parametrize('email_from_field, email_add_domain, match_data, expected_data', [ + ('data.user', '', [{'data': {'user': 'qlo'}}], ['qlo@example.com']), + ('data.user', '@example.com', [{'data': {'user': 'qlo'}}], ['qlo@example.com']), + ('data.user', 'example.com', [{'data': {'user': '@qlo'}}], ['@qlo']), + ('data.user', 'example.com', [{'data': {'user': ['qlo', 'foo']}}], ['qlo@example.com', 'foo@example.com']), + ('data.user', 'example.com', [{'data': {'foo': 'qlo'}}], ['testing@test.test']), + ('data.user', 'example.com', [{'data': {'user': 17}}], ['testing@test.test']) +]) +def test_email_from_field(email_from_field, email_add_domain, match_data, expected_data): rule = { 'name': 'test alert', 'email': ['testing@test.test'], @@ -63,41 +71,15 @@ def test_email_from_field(): 'email_from_field': 'data.user', 'owner': 'owner_value' } - # Found, without @ - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - mock_smtp.return_value = mock.Mock() - alert = EmailAlerter(rule) - alert.alert([{'data': {'user': 'qlo'}}]) - assert mock_smtp.mock_calls[4][1][1] == ['qlo@example.com'] - - # Found, with @ - rule['email_add_domain'] = '@example.com' - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - mock_smtp.return_value = mock.Mock() - alert = EmailAlerter(rule) - alert.alert([{'data': {'user': 'qlo'}}]) - assert mock_smtp.mock_calls[4][1][1] == ['qlo@example.com'] - - # Found, list + if email_from_field: + rule['email_from_field'] = email_from_field + if email_add_domain: + rule['email_add_domain'] = email_add_domain with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: mock_smtp.return_value = mock.Mock() alert = EmailAlerter(rule) - alert.alert([{'data': {'user': ['qlo', 'foo']}}]) - assert mock_smtp.mock_calls[4][1][1] == ['qlo@example.com', 'foo@example.com'] - - # Not found - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - mock_smtp.return_value = mock.Mock() - alert = EmailAlerter(rule) - alert.alert([{'data': {'foo': 'qlo'}}]) - assert mock_smtp.mock_calls[4][1][1] == ['testing@test.test'] - - # Found, wrong type - with mock.patch('elastalert.alerters.email.SMTP') as mock_smtp: - mock_smtp.return_value = mock.Mock() - alert = EmailAlerter(rule) - alert.alert([{'data': {'user': 17}}]) - assert mock_smtp.mock_calls[4][1][1] == ['testing@test.test'] + alert.alert(match_data) + assert mock_smtp.mock_calls[4][1][1] == expected_data def test_email_with_unicode_strings(): @@ -447,7 +429,7 @@ def test_email_key_error(email, expected_data): 'snowman': '☃' } - if email != '': + if email: rule['email'] = email alert = EmailAlerter(rule) @@ -473,7 +455,7 @@ def test_email_create_default_title(query_key, expected_data): 'alert': 'email', 'email': 'test@test.com' } - if query_key != '': + if query_key: rule['query_key'] = query_key match = [ @@ -629,13 +611,11 @@ def test_email_smtp_exception(): 'smtp_auth_file': 'file.txt', 'rule_file': '/tmp/foo.yaml' } - with mock.patch('elastalert.alerters.email.SMTP_SSL') as mock_smtp: + with mock.patch('elastalert.alerters.email.SMTP_SSL'): with mock.patch('elastalert.alerts.read_yaml') as mock_open: mock_open.return_value = {'user': 'someone', 'password': 'hunter2'} - mock_smtp.return_value = mock.Mock() alert = EmailAlerter(rule) - - alert.alert([{'test_term': 'test_value'}]) + alert.alert([{'test_term': 'test_value'}]) assert 'Error connecting to SMTP host: ' in str(ea) diff --git a/tests/alerters/exotel_test.py b/tests/alerters/exotel_test.py index 6d5e5e9a8..b4cb7e7c2 100644 --- a/tests/alerters/exotel_test.py +++ b/tests/alerters/exotel_test.py @@ -1,7 +1,13 @@ +import logging import pytest +from unittest import mock + +from requests import RequestException + from elastalert.alerters.exotel import ExotelAlerter from elastalert.loaders import FileRulesLoader +from elastalert.util import EAException def test_exotel_getinfo(): @@ -27,35 +33,25 @@ def test_exotel_getinfo(): assert expected_data == actual_data +exotel_required_error_expected_data = 'Missing required option(s): exotel_account_sid, ' +exotel_required_error_expected_data += 'exotel_auth_token, exotel_to_number, exotel_from_number' + + @pytest.mark.parametrize('exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number, expected_data', [ - ('', '', '', '', - 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), - ('xxxx1', '', '', '', - 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), - ('', 'xxxx2', '', '', - 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), - ('', '', 'xxxx3', '', - 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), - ('', '', '', 'xxxx4', - 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), - ('xxxx1', 'xxxx2', '', '', - 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), - ('xxxx1', '', 'xxxx3', '', - 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), - ('xxxx1', '', '', 'xxxx4', - 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), - ('', 'xxxx2', 'xxxx3', '', - 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), - ('', 'xxxx2', '', 'xxxx4', - 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), - ('', '', 'xxxx3', 'xxxx4', - 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), - ('xxxx1', 'xxxx2', 'xxxx3', '', - 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), - ('xxxx1', '', 'xxxx3', 'xxxx4', - 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), - ('', 'xxxx2', 'xxxx3', 'xxxx4', - 'Missing required option(s): exotel_account_sid, exotel_auth_token, exotel_to_number, exotel_from_number'), + ('', '', '', '', exotel_required_error_expected_data), + ('xxxx1', '', '', '', exotel_required_error_expected_data), + ('', 'xxxx2', '', '', exotel_required_error_expected_data), + ('', '', 'xxxx3', '', exotel_required_error_expected_data), + ('', '', '', 'xxxx4', exotel_required_error_expected_data), + ('xxxx1', 'xxxx2', '', '', exotel_required_error_expected_data), + ('xxxx1', '', 'xxxx3', '', exotel_required_error_expected_data), + ('xxxx1', '', '', 'xxxx4', exotel_required_error_expected_data), + ('', 'xxxx2', 'xxxx3', '', exotel_required_error_expected_data), + ('', 'xxxx2', '', 'xxxx4', exotel_required_error_expected_data), + ('', '', 'xxxx3', 'xxxx4', exotel_required_error_expected_data), + ('xxxx1', 'xxxx2', 'xxxx3', '', exotel_required_error_expected_data), + ('xxxx1', '', 'xxxx3', 'xxxx4', exotel_required_error_expected_data), + ('', 'xxxx2', 'xxxx3', 'xxxx4', exotel_required_error_expected_data), ('xxxx1', 'xxxx2', 'xxxx3', 'xxxx4', { 'type': 'exotel', @@ -70,16 +66,16 @@ def test_exotel_required_error(exotel_account_sid, exotel_auth_token, exotel_to_ 'alert': [] } - if exotel_account_sid != '': + if exotel_account_sid: rule['exotel_account_sid'] = exotel_account_sid - if exotel_auth_token != '': + if exotel_auth_token: rule['exotel_auth_token'] = exotel_auth_token - if exotel_to_number != '': + if exotel_to_number: rule['exotel_to_number'] = exotel_to_number - if exotel_from_number != '': + if exotel_from_number: rule['exotel_from_number'] = exotel_from_number rules_loader = FileRulesLoader({}) @@ -90,3 +86,91 @@ def test_exotel_required_error(exotel_account_sid, exotel_auth_token, exotel_to_ assert expected_data == actual_data except Exception as ea: assert expected_data in str(ea) + + +def test_exotel(caplog): + caplog.set_level(logging.INFO) + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'exotel_account_sid': 'xxxxx1', + 'exotel_auth_token': 'xxxxx2', + 'exotel_to_number': 'xxxxx3', + 'exotel_from_number': 'xxxxx4', + 'alert': [] + } + match = { + '@timestamp': '2021-01-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + + with mock.patch('elastalert.alerters.exotel.Exotel.sms') as mock_exotel: + mock_exotel.return_value = 200 + alert = ExotelAlerter(rule) + alert.alert([match]) + expected = [ + mock.call()('xxxxx4', 'xxxxx3', 'Test Rule') + ] + + assert mock_exotel.mock_calls == expected + assert ('elastalert', logging.INFO, 'Trigger sent to Exotel') == caplog.record_tuples[0] + + +def test_exotel_status_cod_not_200(): + with pytest.raises(EAException) as ea: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'exotel_account_sid': 'xxxxx1', + 'exotel_auth_token': 'xxxxx2', + 'exotel_to_number': 'xxxxx3', + 'exotel_from_number': 'xxxxx4', + 'alert': [] + } + match = { + '@timestamp': '2021-01-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + + with mock.patch('elastalert.alerters.exotel.Exotel.sms') as mock_exotel: + mock_exotel.return_value = 201 + alert = ExotelAlerter(rule) + alert.alert([match]) + + assert 'Error posting to Exotel, response code is' in str(ea) + + +def test_exotel_request_error(): + with pytest.raises(EAException) as ea: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'exotel_account_sid': 'xxxxx1', + 'exotel_auth_token': 'xxxxx2', + 'exotel_to_number': 'xxxxx3', + 'exotel_from_number': 'xxxxx4', + 'alert': [] + } + match = { + '@timestamp': '2021-01-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('elastalert.alerters.exotel.Exotel.sms', mock_run), pytest.raises(RequestException) as mock_exotel: + mock_exotel.return_value = 200 + alert = ExotelAlerter(rule) + alert.alert([match]) + assert 'Error posting to Exotel' in str(ea) diff --git a/tests/alerters/gitter_test.py b/tests/alerters/gitter_test.py index efb84a009..8935d01c6 100644 --- a/tests/alerters/gitter_test.py +++ b/tests/alerters/gitter_test.py @@ -1,8 +1,9 @@ import json import logging +import pytest from unittest import mock -import pytest + from requests import RequestException from elastalert.alerters.gitter import GitterAlerter @@ -11,9 +12,9 @@ @pytest.mark.parametrize('msg_level, except_msg_level', [ - ('', 'error'), + ('', 'error'), ('error', 'error'), - ('info', 'info') + ('info', 'info') ]) def test_gitter_msg_level(msg_level, except_msg_level, caplog): caplog.set_level(logging.INFO) @@ -24,7 +25,7 @@ def test_gitter_msg_level(msg_level, except_msg_level, caplog): 'alert': [] } - if msg_level != '': + if msg_level: rule['gitter_msg_level'] = msg_level rules_loader = FileRulesLoader({}) @@ -146,7 +147,7 @@ def test_gitter_required_error(gitter_webhook_url, expected_data): 'alert': [] } - if gitter_webhook_url != '': + if gitter_webhook_url: rule['gitter_webhook_url'] = gitter_webhook_url rules_loader = FileRulesLoader({}) diff --git a/tests/alerters/googlechat_test.py b/tests/alerters/googlechat_test.py index 5e998ca28..b8f95ce11 100644 --- a/tests/alerters/googlechat_test.py +++ b/tests/alerters/googlechat_test.py @@ -1,8 +1,9 @@ import json import logging +import pytest from unittest import mock -import pytest + from requests import RequestException from elastalert.alerters.googlechat import GoogleChatAlerter @@ -164,7 +165,7 @@ def test_google_chat_required_error(googlechat_webhook_url, expected_data): 'alert': [] } - if googlechat_webhook_url != '': + if googlechat_webhook_url: rule['googlechat_webhook_url'] = googlechat_webhook_url rules_loader = FileRulesLoader({}) diff --git a/tests/alerters/httppost_test.py b/tests/alerters/httppost_test.py index a36050fe0..507331902 100644 --- a/tests/alerters/httppost_test.py +++ b/tests/alerters/httppost_test.py @@ -1,8 +1,9 @@ import json import logging +import pytest from unittest import mock -import pytest + from requests import RequestException from elastalert.alerters.httppost import HTTPPostAlerter @@ -235,10 +236,10 @@ def test_http_alerter_post_ca_certs(ca_certs, ignore_ssl_errors, excpet_verify): 'http_post_static_payload': {'name': 'somestaticname'}, 'alert': [] } - if ca_certs != '': + if ca_certs: rule['http_post_ca_certs'] = ca_certs - if ignore_ssl_errors != '': + if ignore_ssl_errors: rule['http_post_ignore_ssl_errors'] = ignore_ssl_errors rules_loader = FileRulesLoader({}) @@ -324,7 +325,7 @@ def test_http_required_error(http_post_url, expected_data): 'alert': [] } - if http_post_url != '': + if http_post_url: rule['http_post_url'] = http_post_url rules_loader = FileRulesLoader({}) diff --git a/tests/alerters/jira_test.py b/tests/alerters/jira_test.py index 636d5cd37..c469e8ca9 100644 --- a/tests/alerters/jira_test.py +++ b/tests/alerters/jira_test.py @@ -1,9 +1,9 @@ import datetime import logging - -from unittest import mock import pytest + from jira import JIRAError +from unittest import mock from elastalert.alerters.jira import JiraFormattedMatchString, JiraAlerter from elastalert.util import ts_now @@ -126,6 +126,10 @@ def test_jira(caplog): alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) assert mock_jira.mock_calls == expected + log_messeage = 'Error while searching for JIRA ticket using jql \'project=testproject' + log_messeage += ' AND summary~"Issue occurred at 2014-10-31T00:00:00" and' + log_messeage += ' created >= "2021-05-25"\': JiraError HTTP None\n\t' + assert ('elastalert', logging.ERROR, log_messeage) == caplog.record_tuples[3] # Only bump after 3d of inactivity rule['jira_bump_after_inactivity'] = 3 @@ -363,3 +367,32 @@ def test_jira_getinfo(): } actual_data = alert.get_info() assert expected_data == actual_data + + +def test_jira_set_priority(caplog): + description_txt = "Description stuff goes here like a runbook link." + rule = { + 'name': 'test alert', + 'jira_account_file': 'jirafile', + 'type': mock_rule(), + 'jira_project': 'testproject', + 'jira_priority': 0, + 'jira_issuetype': 'testtype', + 'jira_server': 'jiraserver', + 'jira_description': description_txt, + 'jira_assignee': 'testuser', + 'timestamp_field': '@timestamp', + 'alert_subject': 'Issue {0} occurred at {1}', + 'alert_subject_args': ['test_term', '@timestamp'], + 'rule_file': '/tmp/foo.yaml' + } + with mock.patch('elastalert.alerters.jira.JIRA'), \ + mock.patch('elastalert.alerts.read_yaml') as mock_open: + mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'} + alert = JiraAlerter(rule) + alert.set_priority + + assert ('elastalert', logging.ERROR, + 'Priority 0 not found. Valid priorities are []') == caplog.record_tuples[0] + assert ('elastalert', logging.ERROR, + 'Priority 0 not found. Valid priorities are []') == caplog.record_tuples[1] diff --git a/tests/alerters/line_test.py b/tests/alerters/line_test.py index e30eb551c..2e8a2bafa 100644 --- a/tests/alerters/line_test.py +++ b/tests/alerters/line_test.py @@ -1,7 +1,8 @@ import logging +import pytest from unittest import mock -import pytest + from requests import RequestException from elastalert.alerters.line import LineNotifyAlerter @@ -102,7 +103,7 @@ def test_line_required_error(linenotify_access_token, expected_data): 'alert': [] } - if linenotify_access_token != '': + if linenotify_access_token: rule['linenotify_access_token'] = linenotify_access_token rules_loader = FileRulesLoader({}) diff --git a/tests/alerters/mattermost_test.py b/tests/alerters/mattermost_test.py index 41ad8de18..92cdf3cb4 100644 --- a/tests/alerters/mattermost_test.py +++ b/tests/alerters/mattermost_test.py @@ -1,8 +1,9 @@ import json import logging +import pytest from unittest import mock -import pytest + from requests import RequestException from elastalert.alerters.mattermost import MattermostAlerter @@ -798,9 +799,9 @@ def test_mattermost_get_aggregation_summary_text__maximum_width(): @pytest.mark.parametrize('msg_color, except_msg_color', [ - ('', 'danger'), - ('danger', 'danger'), - ('good', 'good'), + ('', 'danger'), + ('danger', 'danger'), + ('good', 'good'), ('warning', 'warning') ]) def test_mattermost_msg_color(msg_color, except_msg_color): @@ -815,7 +816,7 @@ def test_mattermost_msg_color(msg_color, except_msg_color): 'alert_subject': 'Test Mattermost' } - if msg_color != '': + if msg_color: rule['mattermost_msg_color'] = msg_color rules_loader = FileRulesLoader({}) @@ -896,7 +897,7 @@ def test_mattermost_required_error(mattermost_webhook_url, expected_data): 'alert_subject': 'Test Mattermost' } - if mattermost_webhook_url != '': + if mattermost_webhook_url: rule['mattermost_webhook_url'] = mattermost_webhook_url rules_loader = FileRulesLoader({}) diff --git a/tests/alerters/opsgenie_test.py b/tests/alerters/opsgenie_test.py index a9ceb64b2..4dc56416c 100644 --- a/tests/alerters/opsgenie_test.py +++ b/tests/alerters/opsgenie_test.py @@ -1,16 +1,49 @@ import logging +import pytest +import requests from unittest import mock -import pytest + from requests import RequestException -from elastalert.util import EAException from elastalert.alerters.opsgenie import OpsGenieAlerter from elastalert.alerts import BasicMatchString +from elastalert.util import EAException from tests.alerts_test import mock_rule def test_opsgenie_basic(caplog): + caplog.set_level(logging.INFO) + rule = { + 'name': 'testOGalert', + 'opsgenie_key': 'ogkey', + 'opsgenie_addr': 'https://api.opsgenie.com/v2/alerts', + 'type': mock_rule() + } + with mock.patch('requests.post') as mock_post: + rep = requests + rep.status_code = 202 + mock_post.return_value = rep + + alert = OpsGenieAlerter(rule) + alert.alert([{'@timestamp': '2014-10-31T00:00:00'}]) + print(("mock_post: {0}".format(mock_post._mock_call_args_list))) + mcal = mock_post._mock_call_args_list + + print(('mcal: {0}'.format(mcal[0]))) + assert mcal[0][0][0] == ('https://api.opsgenie.com/v2/alerts') + + assert mock_post.called + + assert mcal[0][1]['headers']['Authorization'] == 'GenieKey ogkey' + assert mcal[0][1]['json']['source'] == 'ElastAlert' + assert mcal[0][1]['json']['source'] == 'ElastAlert' + user, level, message = caplog.record_tuples[0] + assert "Error response from https://api.opsgenie.com/v2/alerts \n API Response: Date: Sat, 26 Jun 2021 03:45:09 +0900 Subject: [PATCH 0331/1065] Fixing the schema for rocket_chat_channel_override --- elastalert/schema.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index c724fd40a..1365f9507 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -470,7 +470,7 @@ properties: ### RocketChat rocket_chat_webhook_url: *arrayOfString rocket_chat_username_override: {type: string} - rocket_chat_channel_override: {type: string} + rocket_chat_channel_override: *arrayOfString rocket_chat_emoji_override: {type: string} rocket_chat_msg_color: {enum: [good, warning, danger]} rocket_chat_text_string: {type: string} From 366c0fde63e83e47289990b5de16932e675dfce4 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 26 Jun 2021 04:19:24 +0900 Subject: [PATCH 0332/1065] fix test_jira --- tests/alerters/jira_test.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/alerters/jira_test.py b/tests/alerters/jira_test.py index c469e8ca9..74712be7e 100644 --- a/tests/alerters/jira_test.py +++ b/tests/alerters/jira_test.py @@ -126,10 +126,10 @@ def test_jira(caplog): alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) assert mock_jira.mock_calls == expected - log_messeage = 'Error while searching for JIRA ticket using jql \'project=testproject' - log_messeage += ' AND summary~"Issue occurred at 2014-10-31T00:00:00" and' - log_messeage += ' created >= "2021-05-25"\': JiraError HTTP None\n\t' - assert ('elastalert', logging.ERROR, log_messeage) == caplog.record_tuples[3] + user, level, message = caplog.record_tuples[3] + assert 'elastalert' in user + assert logging.ERROR == level + assert 'Error while searching for JIRA ticket using jql' in message # Only bump after 3d of inactivity rule['jira_bump_after_inactivity'] = 3 From 8c5fcba8cf193f95e591bbaa970abb50c7a1cfc2 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Fri, 25 Jun 2021 16:17:28 -0400 Subject: [PATCH 0333/1065] Update CONTRIBUTING.md --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f4f310e65..5b0b2997a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -15,7 +15,7 @@ Before submitting the PR review that you have included the following changes, wh ## Releases -This section is only applicable to "maintainers". PR "contributors" do not need to follow the below procedure. +This section is only applicable to _maintainers_. PR _contributors_ do not need to follow the below procedure. As ElastAlert 2 is a community-maintained project, releases will typically contain unrelated contributions without a common theme. It's up to the maintainers to determine when the project is ready for a release, however, if you are looking to use a newly merged feature that hasn't yet been released, feel free to open a [discussion][5] and let us know. From dcb460f5e8bd795f8b07c63869bb5573094f38d1 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sat, 26 Jun 2021 22:46:45 +0900 Subject: [PATCH 0334/1065] Added kibana discover sample to docs --- docs/source/ruletypes.rst | 97 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 97 insertions(+) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 5d7e5c7b0..218f9632d 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -585,6 +585,20 @@ This setting requires the following settings are also configured: ``generate_kibana_discover_url: true`` +Example usage:: + + generate_kibana_discover_url: True + kibana_discover_app_url: "http://localhost:5601/app/discover#/" + kibana_discover_index_pattern_id: "4babf380-c3b1-11eb-b616-1b59c2feec54" + kibana_discover_version: "7.13" + kibana_discover_from_timedelta: + minutes: 10 + kibana_discover_to_timedelta: + minutes: 10 + alert_text: '{0}' + alert_text_args: [ kibana_discover_url ] + alert_text_type: alert_text_only + kibana_discover_app_url ^^^^^^^^^^^^^^^^^^^^^^^ @@ -2236,6 +2250,27 @@ Example mattermost_msg_fields:: ``mattermost_kibana_discover_title``: The title of the Kibana Discover url attachment. Defaults to ``Discover in Kibana``. +Example mattermost_attach_kibana_discover_url, mattermost_kibana_discover_color, mattermost_kibana_discover_title:: + + # (Required) + generate_kibana_discover_url: True + kibana_discover_app_url: "http://localhost:5601/app/discover#/" + kibana_discover_index_pattern_id: "4babf380-c3b1-11eb-b616-1b59c2feec54" + kibana_discover_version: "7.13" + + # (Optional) + kibana_discover_from_timedelta: + minutes: 10 + kibana_discover_to_timedelta: + minutes: 10 + + # (Required) + mattermost_attach_kibana_discover_url: True + + # (Optional) + mattermost_kibana_discover_color: "#ec4b98" + mattermost_kibana_discover_title: "Discover in Kibana" + Microsoft Teams ~~~~~~~~~~~~~~~ @@ -2318,6 +2353,26 @@ Example usage:: Environment: '$VAR' # environment variable Message: { field: message } # field in the first match +Example opsgenie_details with kibana_discover_url:: + + # (Required) + generate_kibana_discover_url: True + kibana_discover_app_url: "http://localhost:5601/app/discover#/" + kibana_discover_index_pattern_id: "4babf380-c3b1-11eb-b616-1b59c2feec54" + kibana_discover_version: "7.13" + + # (Optional) + kibana_discover_from_timedelta: + minutes: 10 + kibana_discover_to_timedelta: + minutes: 10 + + # (Required) + opsgenie_details: + Kibana Url: { field: kibana_discover_url } + Message: { field: message } + Testing: 'yes' + PagerDuty ~~~~~~~~~ @@ -2422,6 +2477,27 @@ ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . ``rocket_chat_kibana_discover_title``: The title of the Kibana Discover url attachment. Defaults to ``Discover in Kibana``. +Example rocket_chat_attach_kibana_discover_url, rocket_chat_kibana_discover_color, rocket_chat_kibana_discover_title:: + + # (Required) + generate_kibana_discover_url: True + kibana_discover_app_url: "http://localhost:5601/app/discover#/" + kibana_discover_index_pattern_id: "4babf380-c3b1-11eb-b616-1b59c2feec54" + kibana_discover_version: "7.13" + + # (Optional) + kibana_discover_from_timedelta: + minutes: 10 + kibana_discover_to_timedelta: + minutes: 10 + + # (Required) + rocket_chat_attach_kibana_discover_url: True + + # (Optional) + rocket_chat_kibana_discover_color: "#ec4b98" + rocket_chat_kibana_discover_title: "Discover in Kibana" + ```rocket_chat_alert_fields``: You can add additional fields to your Rocket.Chat alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. Example rocket_chat_alert_fields:: @@ -2559,6 +2635,27 @@ Example slack_alert_fields:: ``slack_kibana_discover_title``: The title of the Kibana Discover url attachment. Defaults to ``Discover in Kibana``. +Example slack_attach_kibana_discover_url, slack_kibana_discover_color, slack_kibana_discover_title:: + + # (Required) + generate_kibana_discover_url: True + kibana_discover_app_url: "http://localhost:5601/app/discover#/" + kibana_discover_index_pattern_id: "4babf380-c3b1-11eb-b616-1b59c2feec54" + kibana_discover_version: "7.13" + + # (Optional) + kibana_discover_from_timedelta: + minutes: 10 + kibana_discover_to_timedelta: + minutes: 10 + + # (Required) + slack_attach_kibana_discover_url: True + + # (Optional) + slack_kibana_discover_color: "#ec4b98" + slack_kibana_discover_title: "Discover in Kibana" + ``slack_ca_certs``: Set this option to ``True`` if you want to validate the SSL certificate. ``slack_footer``: Add a static footer text for alert. Defaults to "". From c94ec65cd6048af7a458ce882635c25332fb71bc Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 27 Jun 2021 02:07:03 +0900 Subject: [PATCH 0335/1065] remove boto_profile --- docs/source/elastalert.rst | 2 -- elastalert/util.py | 5 ----- 2 files changed, 7 deletions(-) diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index 3cf1ec242..0c8799a3b 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -213,8 +213,6 @@ unless overwritten in the rule config. The default is "localhost". ``aws_region``: This makes ElastAlert to sign HTTP requests when using Amazon Elasticsearch Service. It'll use instance role keys to sign the requests. The environment variable ``AWS_DEFAULT_REGION`` will override this field. -``boto_profile``: Deprecated! Boto profile to use when signing requests to Amazon Elasticsearch Service, if you don't want to use the instance role keys. - ``profile``: AWS profile to use when signing requests to Amazon Elasticsearch Service, if you don't want to use the instance role keys. The environment variable ``AWS_DEFAULT_PROFILE`` will override this field. diff --git a/elastalert/util.py b/elastalert/util.py index 22d881205..a5a4d5f78 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -387,11 +387,6 @@ def build_es_conn_config(conf): if 'aws_region' in conf: parsed_conf['aws_region'] = conf['aws_region'] - # Deprecated - if 'boto_profile' in conf: - elastalert_logger.warning('Found deprecated "boto_profile", use "profile" instead!') - parsed_conf['profile'] = conf['boto_profile'] - if 'profile' in conf: parsed_conf['profile'] = conf['profile'] From 92a809b01553ed999a32ba3599e2154583a42778 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 27 Jun 2021 02:14:20 +0900 Subject: [PATCH 0336/1065] Added slack_alert_fields to schema.yaml --- elastalert/schema.yaml | 13 +++++++++++++ tests/alerters/slack_test.py | 8 ++++---- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index be04883b3..02c4f3f81 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -34,6 +34,18 @@ definitions: milliseconds: {type: number} schedule: {type: string} + slackField: &slackField + type: object + additionalProperties: false + properties: + title: {type: string} + value: {type: string} + short: {type: boolean} + + arrayOfSlackFields: &arrayOfSlackField + type: array + items: *slackField + mattermostField: &mattermostField type: object additionalProperties: false @@ -502,6 +514,7 @@ properties: slack_parse_override: {enum: [none, full]} slack_text_string: {type: string} slack_proxy: {type: string} + slack_alert_fields: *arrayOfSlackField slack_ignore_ssl_errors: {type: boolean} slack_title: {type: string} slack_title_link: {type: string} diff --git a/tests/alerters/slack_test.py b/tests/alerters/slack_test.py index 6e5a6945c..7db351b4e 100644 --- a/tests/alerters/slack_test.py +++ b/tests/alerters/slack_test.py @@ -850,12 +850,12 @@ def test_slack_alert_fields(): { 'title': 'Host', 'value': 'somefield', - 'short': 'true' + 'short': True }, { 'title': 'Sensors', 'value': '@timestamp', - 'short': 'true' + 'short': True } ], 'alert_subject': 'Cool subject', @@ -884,12 +884,12 @@ def test_slack_alert_fields(): 'fields': [ { - 'short': 'true', + 'short': True, 'title': 'Host', 'value': 'foobarbaz' }, { - 'short': 'true', + 'short': True, 'title': 'Sensors', 'value': '2016-01-01T00:00:00' } From 8f9ab97ba6dae522097f1a9327407ca9aec382b3 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 26 Jun 2021 16:38:25 -0400 Subject: [PATCH 0337/1065] Correct the multiple terms fields example. --- docs/source/recipes/writing_filters.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/source/recipes/writing_filters.rst b/docs/source/recipes/writing_filters.rst index 1d2959262..8e3189f4b 100644 --- a/docs/source/recipes/writing_filters.rst +++ b/docs/source/recipes/writing_filters.rst @@ -67,11 +67,13 @@ Terms allows for easy combination of multiple term filters:: - terms: field: ["value1", "value2"] # value1 OR value2 -You can also match on multiple fields:: +You can also match on multiple fields (All terms must match at least one of the given values):: - terms: fieldX: ["value1", "value2"] + - terms: fieldY: ["something", "something_else"] + - terms: fieldZ: ["foo", "bar", "baz"] wildcard From 04b472a41f3b254b866b28b798a52b45f3cf72ac Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 27 Jun 2021 19:52:58 +0900 Subject: [PATCH 0338/1065] Fixing Coverage.py warning etc --- Makefile | 2 +- tests/tox.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 11217658f..49e5ada53 100644 --- a/Makefile +++ b/Makefile @@ -28,7 +28,7 @@ clean: make -C docs clean find . -name '*.pyc' -delete find . -name '__pycache__' -delete - rm -rf virtualenv_run .tox .coverage *.egg-info build + rm -rf virtualenv_run tests/.tox tests/.coverage *.egg-info docs/build %: @: diff --git a/tests/tox.ini b/tests/tox.ini index 0a6ad2355..8b977a3d5 100644 --- a/tests/tox.ini +++ b/tests/tox.ini @@ -6,7 +6,7 @@ setupdir = .. [testenv] deps = -r../requirements-dev.txt commands = - pytest --cov=elastalert --cov-report=term-missing --cov-branch --strict-markers . -n 4 {posargs} + pytest --cov=../elastalert --cov-report=term-missing --cov-branch --strict-markers . -n 4 {posargs} flake8 --config ../setup.cfg . [testenv:lint] From d1a2a4e714f8b5c00c7c47b10dfb6cb7c371655e Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 27 Jun 2021 21:08:16 +0900 Subject: [PATCH 0339/1065] Added Rocket.Chat settings rocket_chat_ca_certs rocket_chat_ignore_ssl_errors rocket_chat_timeout --- docs/source/ruletypes.rst | 12 ++- elastalert/alerters/rocketchat.py | 13 ++- tests/alerters/rocketchat_test.py | 164 +++++++++++++++++++++++++++--- 3 files changed, 170 insertions(+), 19 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 218f9632d..2032622a2 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2462,14 +2462,20 @@ Optional: ``rocket_chat_channel_override``: Incoming webhooks have a default channel, but it can be overridden. A public channel can be specified “#other-channel”, and a Direct Message with “@username”. -``rocket_chat_emoji_override``: By default ElastAlert will use the :ghost: emoji when posting to the channel. You can use a different emoji per +``rocket_chat_emoji_override``: By default ElastAlert 2 will use the :ghost: emoji when posting to the channel. You can use a different emoji per ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . ``rocket_chat_msg_color``: By default the alert will be posted with the ‘danger’ color. You can also use ‘good’ or ‘warning’ colors. ``rocket_chat_text_string``: Notification message you want to add. -``rocket_chat_proxy``: By default ElastAlert will not use a network proxy to send notifications to Rocket.Chat. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. +``rocket_chat_proxy``: By default ElastAlert 2 will not use a network proxy to send notifications to Rocket.Chat. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. + +``rocket_chat_ca_certs``: Set this option to ``True`` if you want to validate the SSL certificate. + +``rocket_chat_ignore_ssl_errors``: By default ElastAlert 2 will verify SSL certificate. Set this option to ``False`` if you want to ignore SSL errors. + +``rocket_chat_timeout``: You can specify a timeout value, in seconds, for making communicating with Rocket.Chat. The default is 10. If a timeout occurs, the alert will be retried next time ElastAlert 2 cycles. ``rocket_chat_attach_kibana_discover_url``: Enables the attachment of the ``kibana_discover_url`` to the Rocket.Chat notification. The config ``generate_kibana_discover_url`` must also be ``True`` in order to generate the url. Defaults to ``False``. @@ -2498,7 +2504,7 @@ Example rocket_chat_attach_kibana_discover_url, rocket_chat_kibana_discover_colo rocket_chat_kibana_discover_color: "#ec4b98" rocket_chat_kibana_discover_title: "Discover in Kibana" -```rocket_chat_alert_fields``: You can add additional fields to your Rocket.Chat alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. +``rocket_chat_alert_fields``: You can add additional fields to your Rocket.Chat alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. Example rocket_chat_alert_fields:: diff --git a/elastalert/alerters/rocketchat.py b/elastalert/alerters/rocketchat.py index a16ccc3fc..6b54ba941 100644 --- a/elastalert/alerters/rocketchat.py +++ b/elastalert/alerters/rocketchat.py @@ -31,6 +31,9 @@ def __init__(self, rule): self.rocket_chat_attach_kibana_discover_url = self.rule.get('rocket_chat_attach_kibana_discover_url', False) self.rocket_chat_kibana_discover_color = self.rule.get('rocket_chat_kibana_discover_color', '#ec4b98') self.rocket_chat_kibana_discover_title = self.rule.get('rocket_chat_kibana_discover_title', 'Discover in Kibana') + self.rocket_chat_ignore_ssl_errors = self.rule.get('rocket_chat_ignore_ssl_errors', False) + self.rocket_chat_timeout = self.rule.get('rocket_chat_timeout', 10) + self.rocket_chat_ca_certs = self.rule.get('rocket_chat_ca_certs') def format_body(self, body): return body @@ -92,11 +95,19 @@ def alert(self, matches): for url in self.rocket_chat_webhook_url: for channel_override in self.rocket_chat_channel_override: try: + if self.rocket_chat_ca_certs: + verify = self.rocket_chat_ca_certs + else: + verify = not self.rocket_chat_ignore_ssl_errors + if self.rocket_chat_ignore_ssl_errors: + requests.packages.urllib3.disable_warnings() payload['channel'] = channel_override response = requests.post( url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, - proxies=proxies) + verify=verify, + proxies=proxies, + timeout=self.rocket_chat_timeout) warnings.resetwarnings() response.raise_for_status() except RequestException as e: diff --git a/tests/alerters/rocketchat_test.py b/tests/alerters/rocketchat_test.py index d46cb15fd..67630562d 100644 --- a/tests/alerters/rocketchat_test.py +++ b/tests/alerters/rocketchat_test.py @@ -49,7 +49,9 @@ def test_rocketchat_uses_custom_title(caplog): rule['rocket_chat_webhook_url'], data=mock.ANY, headers={'content-type': 'application/json'}, - proxies=None + proxies=None, + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) assert ('elastalert', logging.INFO, 'Alert sent to Rocket.Chat') == caplog.record_tuples[0] @@ -90,7 +92,9 @@ def test_rocketchat_uses_rule_name_when_custom_title_is_not_provided(): rule['rocket_chat_webhook_url'][0], data=mock.ANY, headers={'content-type': 'application/json'}, - proxies=None + proxies=None, + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -132,7 +136,9 @@ def test_rocketchat_username_override(): rule['rocket_chat_webhook_url'], data=mock.ANY, headers={'content-type': 'application/json'}, - proxies=None + proxies=None, + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -173,7 +179,9 @@ def test_rocketchat_chat_channel(): rule['rocket_chat_webhook_url'][0], data=mock.ANY, headers={'content-type': 'application/json'}, - proxies=None + proxies=None, + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -228,7 +236,9 @@ def test_rocketchat_uses_list_of_custom_rocket_chat_channel(): rule['rocket_chat_webhook_url'][0], data=mock.ANY, headers={'content-type': 'application/json'}, - proxies=None + proxies=None, + timeout=10, + verify=True ) assert expected_data1 == json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data2 == json.loads(mock_post_request.call_args_list[1][1]['data']) @@ -270,7 +280,9 @@ def test_rocketchat_emoji_override(): rule['rocket_chat_webhook_url'][0], data=mock.ANY, headers={'content-type': 'application/json'}, - proxies=None + proxies=None, + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -310,7 +322,9 @@ def test_rocketchat_emoji_override_blank(): rule['rocket_chat_webhook_url'][0], data=mock.ANY, headers={'content-type': 'application/json'}, - proxies=None + proxies=None, + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -362,7 +376,9 @@ def test_rocketchat_msg_color(msg_color, except_msg_color): rule['rocket_chat_webhook_url'], data=mock.ANY, headers={'content-type': 'application/json'}, - proxies=None + proxies=None, + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -405,7 +421,9 @@ def test_rocketchat_text_string(): rule['rocket_chat_webhook_url'], data=mock.ANY, headers={'content-type': 'application/json'}, - proxies=None + proxies=None, + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -447,7 +465,9 @@ def test_rocketchat_proxy(): rule['rocket_chat_webhook_url'], data=mock.ANY, headers={'content-type': 'application/json'}, - proxies={'https': rule['rocket_chat_proxy']} + proxies={'https': rule['rocket_chat_proxy']}, + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -513,7 +533,9 @@ def test_rocketchat_alert_fields(): rule['rocket_chat_webhook_url'], data=mock.ANY, headers={'content-type': 'application/json'}, - proxies=None + proxies=None, + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -677,7 +699,9 @@ def test_rocket_chat_attach_kibana_discover_url_when_generated(): rule['rocket_chat_webhook_url'], data=mock.ANY, headers={'content-type': 'application/json'}, - proxies=None + proxies=None, + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -722,7 +746,9 @@ def test_rocket_chat_attach_kibana_discover_url_when_not_generated(): rule['rocket_chat_webhook_url'], data=mock.ANY, headers={'content-type': 'application/json'}, - proxies=None + proxies=None, + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -774,7 +800,9 @@ def test_rocket_chat_kibana_discover_title(): rule['rocket_chat_webhook_url'], data=mock.ANY, headers={'content-type': 'application/json'}, - proxies=None + proxies=None, + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -826,6 +854,112 @@ def test_rocket_chat_kibana_discover_color(): rule['rocket_chat_webhook_url'], data=mock.ANY, headers={'content-type': 'application/json'}, - proxies=None + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +@pytest.mark.parametrize('ca_certs, ignore_ssl_errors, excpet_verify', [ + ('', '', True), + ('', True, False), + ('', False, True), + (True, '', True), + (True, True, True), + (True, False, True), + (False, '', True), + (False, True, False), + (False, False, True) +]) +def test_rocket_chat_ca_certs(ca_certs, ignore_ssl_errors, excpet_verify): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'alert_subject': 'Cool subject', + 'alert': [] + } + if ca_certs: + rule['rocket_chat_ca_certs'] = ca_certs + + if ignore_ssl_errors: + rule['rocket_chat_ignore_ssl_errors'] = ignore_ssl_errors + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert2', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Cool subject', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=excpet_verify, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocket_chat_uses_custom_timeout(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'alert_subject': 'Cool subject', + 'alert': [], + 'rocket_chat_timeout': 20 + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert2', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Cool subject', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=20 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) From 1b6dc8c7dfa04b63c2b671a44b8c1431fed10065 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Sun, 27 Jun 2021 21:17:28 +0900 Subject: [PATCH 0340/1065] Added Rocket.Chat settings to schema.yaml rocket_chat_ca_certs rocket_chat_ignore_ssl_errors rocket_chat_timeout --- elastalert/schema.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 02c4f3f81..0bbb22a91 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -490,6 +490,9 @@ properties: rocket_chat_attach_kibana_discover_url {type: boolean} rocket_chat_kibana_discover_color {type: string} rocket_chat_kibana_discover_title {type: string} + rocket_chat_ca_certs: {type: boolean} + rocket_chat_ignore_ssl_errors: {type: boolean} + rocket_chat_timeout: {type: integer} ### ServiceNow servicenow_rest_url: {type: string} From d85e43536be0c54ac8772608226cbfb1a6fd718f Mon Sep 17 00:00:00 2001 From: Jeff Ashton Date: Mon, 28 Jun 2021 11:40:54 -0400 Subject: [PATCH 0341/1065] Reducing the unused calls to elasticsearch_client --- elastalert/elastalert.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 3631ffa37..3116fa871 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -859,6 +859,14 @@ def enhance_filter(self, rule): filters.append({'query': query_str_filter}) elastalert_logger.debug("Enhanced filter with {} terms: {}".format(listname, str(query_str_filter))) + def get_elasticsearch_client(self, rule): + key = rule['name'] + es_client = self.es_clients.get(key) + if es_client is None: + es_client = elasticsearch_client(rule) + self.es_clients[key] = es_client + return es_client + def run_rule(self, rule, endtime, starttime=None): """ Run a rule for a given time period, including querying and alerting on results. @@ -868,7 +876,7 @@ def run_rule(self, rule, endtime, starttime=None): :return: The number of matches that the rule produced. """ run_start = time.time() - self.thread_data.current_es = self.es_clients.setdefault(rule['name'], elasticsearch_client(rule)) + self.thread_data.current_es = self.get_elasticsearch_client(rule) # If there are pending aggregate matches, try processing them for x in range(len(rule['agg_matches'])): From 95748220fabceaf79a92ea3d045a36da56feb768 Mon Sep 17 00:00:00 2001 From: Jeff Ashton Date: Mon, 28 Jun 2021 12:25:04 -0400 Subject: [PATCH 0342/1065] Adding tests for get_elasticsearch_client --- tests/base_test.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/tests/base_test.py b/tests/base_test.py index 7bcf3f48c..785befd58 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -1426,3 +1426,20 @@ def test_add_aggregated_alert_error(ea, caplog): exceptd = "[add_aggregated_alert]" exceptd += "Error parsing aggregate send time format unsupported operand type(s) for +: 'datetime.datetime' and 'dict'" assert exceptd in message + + +def test_get_elasticsearch_client_same_rule(ea): + x = ea.get_elasticsearch_client(ea.rules[0]) + y = ea.get_elasticsearch_client(ea.rules[0]) + assert x is y, "Should return same client for the same rule" + + +def test_get_elasticsearch_client_different_rule(ea): + x_rule = ea.rules[0] + x = ea.get_elasticsearch_client(x_rule) + + y_rule = copy.copy(x_rule) + y_rule['name'] = 'different_rule' + y = ea.get_elasticsearch_client(y_rule) + + assert x is not y, 'Should return unique client for each rule' From 048da33330fe8f1d618c60b818c0121068dc3f93 Mon Sep 17 00:00:00 2001 From: Jeff Ashton Date: Mon, 28 Jun 2021 15:12:22 -0400 Subject: [PATCH 0343/1065] Fixing zabbix timezone code --- elastalert/alerters/zabbix.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/elastalert/alerters/zabbix.py b/elastalert/alerters/zabbix.py index 558e9baa4..214bdb2c4 100644 --- a/elastalert/alerters/zabbix.py +++ b/elastalert/alerters/zabbix.py @@ -57,7 +57,7 @@ def __init__(self, *args): self.zbx_key = self.rule.get('zbx_key', None) self.timestamp_field = self.rule.get('timestamp_field', '@timestamp') self.timestamp_type = self.rule.get('timestamp_type', 'iso') - self.timestamp_strptime = self.rule.get('timestamp_strptime', '%Y-%m-%dT%H:%M:%S.%fZ') + self.timestamp_strptime = self.rule.get('timestamp_strptime', '%Y-%m-%dT%H:%M:%S.%f%z') # Alert is called def alert(self, matches): @@ -72,10 +72,10 @@ def alert(self, matches): else: try: ts_epoch = int(datetime.strptime(match[self.timestamp_field], self.timestamp_strptime) - .strftime('%s')) + .timestamp()) except ValueError: - ts_epoch = int(datetime.strptime(match[self.timestamp_field], '%Y-%m-%dT%H:%M:%SZ') - .strftime('%s')) + ts_epoch = int(datetime.strptime(match[self.timestamp_field], '%Y-%m-%dT%H:%M:%S%z') + .timestamp()) zm.append(ZabbixMetric(host=self.zbx_host, key=self.zbx_key, value='1', clock=ts_epoch)) try: From fefec6dd4601a7848e8709485354107364d77dd5 Mon Sep 17 00:00:00 2001 From: Jeff Ashton Date: Tue, 29 Jun 2021 09:29:19 -0400 Subject: [PATCH 0344/1065] Naming background scheduler jobs --- elastalert/elastalert.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 3116fa871..5cc1ab44a 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -1056,6 +1056,7 @@ def init_rule(self, new_rule, new=True): args=[new_rule], seconds=new_rule['run_every'].total_seconds(), id=new_rule['name'], + name="Rule: %s" % (new_rule['name']), max_instances=1, jitter=5) job.modify(next_run_time=datetime.datetime.now() + datetime.timedelta(seconds=random.randint(0, 15))) @@ -1188,9 +1189,13 @@ def start(self): self.running = True elastalert_logger.info("Starting up") self.scheduler.add_job(self.handle_pending_alerts, 'interval', - seconds=self.run_every.total_seconds(), id='_internal_handle_pending_alerts') + seconds=self.run_every.total_seconds(), + id='_internal_handle_pending_alerts', + name='Internal: Handle Pending Alerts') self.scheduler.add_job(self.handle_config_change, 'interval', - seconds=self.run_every.total_seconds(), id='_internal_handle_config_change') + seconds=self.run_every.total_seconds(), + id='_internal_handle_config_change', + name='Internal: Handle Config Change') self.scheduler.start() while self.running: next_run = datetime.datetime.utcnow() + self.run_every From c8b23d4e08db0c6d2334e01e537129510a74724e Mon Sep 17 00:00:00 2001 From: Cedric Charest Date: Tue, 29 Jun 2021 16:57:33 -0400 Subject: [PATCH 0345/1065] Fix #310 --- elastalert/alerts.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 07ee6a802..3a699d8d4 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -35,9 +35,9 @@ def _add_custom_alert_text(self): # Top fields are accessible via `{{field_name}}` or `{{jinja_root_name['field_name']}}` # `jinja_root_name` dict is useful when accessing *fields with dots in their keys*, # as Jinja treat dot as a nested field. - alert_text = self.rule.get("jinja_template").render(**self.match, **self.rule, - **{self.rule['jinja_root_name']: {**self.match, - **self.rule}}) + alert_text = self.rule.get("jinja_template").render(**{**self.rule, **self.match}, + **{self.rule['jinja_root_name']: {**self.rule, + **self.match}}) elif 'alert_text_args' in self.rule: alert_text_args = self.rule.get('alert_text_args') alert_text_values = [lookup_es_key(self.match, arg) for arg in alert_text_args] From a36ee3431a8ca58fd18dac78144a3936e77ca514 Mon Sep 17 00:00:00 2001 From: Konstantin Kornienko Date: Wed, 30 Jun 2021 13:01:12 +0300 Subject: [PATCH 0346/1065] [Opsgenie] Added possibility to specify source and entity attrs. Priority attr can now be templated from first match --- docs/source/ruletypes.rst | 6 ++- elastalert/alerters/opsgenie.py | 18 ++++++--- elastalert/schema.yaml | 2 + .../rules/example_opsgenie_frequency.yaml | 10 ++++- tests/alerters/opsgenie_test.py | 38 +++++++++++++++++-- 5 files changed, 62 insertions(+), 12 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 2032622a2..b585a9528 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2340,12 +2340,16 @@ Optional: ``opsgenie_subject_args``: A list of fields to use to format ``opsgenie_subject`` if it contains formaters. -``opsgenie_priority``: Set the OpsGenie priority level. Possible values are P1, P2, P3, P4, P5. +``opsgenie_priority``: Set the OpsGenie priority level. Possible values are P1, P2, P3, P4, P5. Can be formatted with fields from the first match e.g "P{level}" ``opsgenie_details``: Map of custom key/value pairs to include in the alert's details. The value can sourced from either fields in the first match, environment variables, or a constant value. ``opsgenie_proxy``: By default ElastAlert will not use a network proxy to send notifications to OpsGenie. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. +``opsgenie_source``: Set the OpsGenie source, default is `ElastAlert`. Can be formatted with fields from the first match e.g "{source} {region}" + +``opsgenie_entity``: Set the OpsGenie entity. Can be formatted with fields from the first match e.g "{host_name}" + Example usage:: opsgenie_details: diff --git a/elastalert/alerters/opsgenie.py b/elastalert/alerters/opsgenie.py index bc241f364..87e160b58 100644 --- a/elastalert/alerters/opsgenie.py +++ b/elastalert/alerters/opsgenie.py @@ -4,7 +4,7 @@ import requests from elastalert.alerts import Alerter, BasicMatchString -from elastalert.util import EAException, elastalert_logger, lookup_es_key +from elastalert.util import EAException, elastalert_logger, lookup_es_key, resolve_string class OpsGenieAlerter(Alerter): @@ -30,6 +30,8 @@ def __init__(self, *args): self.opsgenie_proxy = self.rule.get('opsgenie_proxy', None) self.priority = self.rule.get('opsgenie_priority') self.opsgenie_details = self.rule.get('opsgenie_details', {}) + self.entity = self.rule.get('opsgenie_entity', None) + self.source = self.rule.get('opsgenie_source', 'ElastAlert') def _parse_responders(self, responders, responder_args, matches, default_responders): if responder_args: @@ -76,17 +78,23 @@ def alert(self, matches): if self.teams: post['teams'] = [{'name': r, 'type': 'team'} for r in self.teams] post['description'] = body - post['source'] = 'ElastAlert' + if self.entity: + post['entity'] = self.entity.format(**matches[0]) + if self.source: + post['source'] = self.source.format(**matches[0]) for i, tag in enumerate(self.tags): self.tags[i] = tag.format(**matches[0]) post['tags'] = self.tags - if self.priority and self.priority not in ('P1', 'P2', 'P3', 'P4', 'P5'): + priority = self.priority + if priority: + priority = self.priority.format(**matches[0]) + if priority and priority not in ('P1', 'P2', 'P3', 'P4', 'P5'): elastalert_logger.warning("Priority level does not appear to be specified correctly. \ - Please make sure to set it to a value between P1 and P5") + Please make sure to set it to a value between P1 and P5") else: - post['priority'] = self.priority + post['priority'] = priority if self.alias is not None: post['alias'] = self.alias.format(**matches[0]) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 0bbb22a91..dcf06729f 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -443,6 +443,8 @@ properties: opsgenie_subject: {type: string} opsgenie_priority: {type: string} opsgenie_proxy: {type: string} + opsgenie_source: {type: string} + opsgenie_entity: {type: string} opsgenie_details: type: object minProperties: 1 diff --git a/examples/rules/example_opsgenie_frequency.yaml b/examples/rules/example_opsgenie_frequency.yaml index 9876f9162..6996207a8 100755 --- a/examples/rules/example_opsgenie_frequency.yaml +++ b/examples/rules/example_opsgenie_frequency.yaml @@ -24,7 +24,7 @@ opsgenie_key: ogkey # (Optional) # OpsGenie recipients with args # opsgenie_recipients: -# - {recipient} +# - {recipient} # opsgenie_recipients_args: # team_prefix:'user.email' @@ -36,7 +36,7 @@ opsgenie_key: ogkey # (Optional) # OpsGenie teams with args # opsgenie_teams: -# - {team_prefix}-Team +# - {team_prefix}-Team # opsgenie_teams_args: # team_prefix:'team' @@ -45,6 +45,12 @@ opsgenie_key: ogkey opsgenie_tags: - "Production" +# (Optional) OpsGenie source +# opsgenie_source: ElastAlert_EMEA + +# (Optional) OpsGenie entity +# opsgenie_entity: '{hostname}' + # (OptionaL) Connect with SSL to Elasticsearch #use_ssl: True diff --git a/tests/alerters/opsgenie_test.py b/tests/alerters/opsgenie_test.py index 4dc56416c..65ce7bcc0 100644 --- a/tests/alerters/opsgenie_test.py +++ b/tests/alerters/opsgenie_test.py @@ -36,7 +36,7 @@ def test_opsgenie_basic(caplog): assert mock_post.called assert mcal[0][1]['headers']['Authorization'] == 'GenieKey ogkey' - assert mcal[0][1]['json']['source'] == 'ElastAlert' + # Should be default source 'ElastAlert', because 'opsgenie_source' param isn't set in rule assert mcal[0][1]['json']['source'] == 'ElastAlert' user, level, message = caplog.record_tuples[0] assert "Error response from https://api.opsgenie.com/v2/alerts \n API Response: Date: Wed, 30 Jun 2021 16:28:33 +0200 Subject: [PATCH 0348/1065] Force obseravable artifacts to be strings in TheHive --- elastalert/alerters/thehive.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/alerters/thehive.py b/elastalert/alerters/thehive.py index e3aa1a029..1d519ddfd 100644 --- a/elastalert/alerters/thehive.py +++ b/elastalert/alerters/thehive.py @@ -34,7 +34,7 @@ def load_observable_artifacts(self, match: dict): artifacts = [] for mapping in self.rule.get('hive_observable_data_mapping', []): for observable_type, mapping_key in mapping.items(): - data = self.lookup_field(match, mapping_key, '') + data = str(self.lookup_field(match, mapping_key, '')) if len(data) != 0: artifact = {'tlp': 2, 'tags': [], From 70d09ec81e783293eb265b26089ab9bc6f84cadf Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Wed, 30 Jun 2021 23:34:46 +0900 Subject: [PATCH 0349/1065] Update pylint requirement from <2.9 to <2.10 --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 7abc77409..fb849ad4e 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -5,7 +5,7 @@ flake8-absolute-import m2r2 pluggy>=0.12.0 pre-commit -pylint<2.9 +pylint<2.10 pytest==6.2.4 pytest-cov==2.12.1 pytest-xdist==2.3.0 From d0377652cdb55b39fb73f4fa8dfb46650ca69666 Mon Sep 17 00:00:00 2001 From: Andrea Pavan Date: Wed, 30 Jun 2021 18:03:07 +0200 Subject: [PATCH 0350/1065] Update TheHive alerter unit test to validate the fix --- tests/alerters/thehive_test.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/tests/alerters/thehive_test.py b/tests/alerters/thehive_test.py index 751ee4190..9a14f16ac 100644 --- a/tests/alerters/thehive_test.py +++ b/tests/alerters/thehive_test.py @@ -29,7 +29,7 @@ def test_thehive_alerter(caplog): 'hive_connection': {'hive_apikey': '', 'hive_host': 'https://localhost', 'hive_port': 9000}, - 'hive_observable_data_mapping': [{'ip': 'test.ip'}], + 'hive_observable_data_mapping': [{'ip': 'test.ip', 'autonomous-system': 'test.as_number'}], 'name': 'test-thehive', 'tags': ['a', 'b'], 'type': 'any'} @@ -39,7 +39,8 @@ def test_thehive_alerter(caplog): match = { "test": { "ip": "127.0.0.1", - "port": 9876 + "port": 9876, + "as_number": 1234 }, "@timestamp": "2021-05-09T14:43:30", } @@ -54,6 +55,13 @@ def test_thehive_alerter(caplog): "message": None, "tags": [], "tlp": 2 + }, + { + "data": "1234", + "dataType": "autonomous-system", + "message": None, + "tags": [], + "tlp": 2 } ], "customFields": { From d4822d2dad7439f5bf9a98da1e5fc3f38a4ab010 Mon Sep 17 00:00:00 2001 From: Jeremy Randolph Date: Wed, 30 Jun 2021 09:35:24 -0700 Subject: [PATCH 0351/1065] Add impact and urgency as optional params for the servicenow alerter --- CHANGELOG.md | 1 + docs/source/ruletypes.rst | 4 ++++ elastalert/alerters/servicenow.py | 6 ++++++ elastalert/schema.yaml | 2 ++ tests/alerters/servicenow_test.py | 33 +++++++++++++++++++++++++++++++ 5 files changed, 46 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 252e92c7b..b6df4f8ae 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,6 +35,7 @@ - Improve structure and placement of test-related files in project tree - [#287](https://github.com/jertel/elastalert2/pull/287) - @ferozsalam - Only attempt to adjust timezone if timezone is set to a non-empty string - [#288](https://github.com/jertel/elastalert2/pull/288) - @ferozsalam - Deprecated `podSecurityPolicy` feature in Helm Chart as [it's deprecated in Kubernetes 1.21](https://kubernetes.io/blog/2021/04/06/podsecuritypolicy-deprecation-past-present-and-future/) - [#289](https://github.com/jertel/elastalert2/pull/289) - @lepouletsuisse +- Add support for `impact` and `urgency` parameters for Servicenow alerter # 2.1.1 diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 2032622a2..98b2b5e18 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2566,6 +2566,10 @@ Optional: ``servicenow_proxy``: By default ElastAlert will not use a network proxy to send notifications to ServiceNow. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. +``impact``: An integer 1, 2, or 3 representing high, medium, and low respectively. This measures the effect of an incident on business processes. + +``urgency``: An integer 1, 2, or 3 representing high, medium, and low respecitvely. This measures how long this incident can be delayed until there is a significant business impact. + Example usage:: alert: diff --git a/elastalert/alerters/servicenow.py b/elastalert/alerters/servicenow.py index 811fbecb9..a972ea8c5 100644 --- a/elastalert/alerters/servicenow.py +++ b/elastalert/alerters/servicenow.py @@ -26,6 +26,8 @@ def __init__(self, rule): super(ServiceNowAlerter, self).__init__(rule) self.servicenow_rest_url = self.rule.get('servicenow_rest_url', None) self.servicenow_proxy = self.rule.get('servicenow_proxy', None) + self.impact = self.rule.get('impact', None) + self.urgency = self.rule.get('urgency', None) def alert(self, matches): for match in matches: @@ -48,6 +50,10 @@ def alert(self, matches): "cmdb_ci": self.rule['cmdb_ci'], "caller_id": self.rule["caller_id"] } + if self.impact != None: + payload["impact"] = self.impact + if self.urgency != None: + payload["urgency"] = self.urgency try: response = requests.post( self.servicenow_rest_url, diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 0bbb22a91..0941c66bc 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -506,6 +506,8 @@ properties: cmdb_ci: {type: string} caller_id: {type: string} servicenow_proxy: {type: string} + impact: {type: integer} + urgency: {type: integer} ### Slack slack_webhook_url: *arrayOfString diff --git a/tests/alerters/servicenow_test.py b/tests/alerters/servicenow_test.py index 6a873aa5c..055477d5b 100644 --- a/tests/alerters/servicenow_test.py +++ b/tests/alerters/servicenow_test.py @@ -117,6 +117,39 @@ def test_service_now_proxy(): assert expected_data == actual_data +def test_service_now_impact_and_urgency(): + rule = { + 'name': 'Test ServiceNow Rule', + 'type': 'any', + 'username': 'ServiceNow username', + 'password': 'ServiceNow password', + 'servicenow_rest_url': 'https://xxxxxxxxxx', + 'short_description': 'ServiceNow short_description', + 'comments': 'ServiceNow comments', + 'assignment_group': 'ServiceNow assignment_group', + 'category': 'ServiceNow category', + 'subcategory': 'ServiceNow subcategory', + 'cmdb_ci': 'ServiceNow cmdb_ci', + 'caller_id': 'ServiceNow caller_id', + 'impact': '3', + 'urgency': '1', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ServiceNowAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert data['impact'] == rule['impact'] + assert data['urgency'] == rule['urgency'] + + def test_service_now_ea_exception(): with pytest.raises(EAException) as ea: rule = { From a72cf04c91c7414a436bfcf1914d260edd7a867f Mon Sep 17 00:00:00 2001 From: Jeremy Randolph Date: Wed, 30 Jun 2021 10:52:41 -0700 Subject: [PATCH 0352/1065] Prefix new parameters with "servicenow" --- CHANGELOG.md | 2 +- docs/source/ruletypes.rst | 6 ++++-- elastalert/alerters/servicenow.py | 4 ++-- elastalert/schema.yaml | 4 ++-- tests/alerters/servicenow_test.py | 8 ++++---- 5 files changed, 13 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b6df4f8ae..3937c1717 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,7 +35,7 @@ - Improve structure and placement of test-related files in project tree - [#287](https://github.com/jertel/elastalert2/pull/287) - @ferozsalam - Only attempt to adjust timezone if timezone is set to a non-empty string - [#288](https://github.com/jertel/elastalert2/pull/288) - @ferozsalam - Deprecated `podSecurityPolicy` feature in Helm Chart as [it's deprecated in Kubernetes 1.21](https://kubernetes.io/blog/2021/04/06/podsecuritypolicy-deprecation-past-present-and-future/) - [#289](https://github.com/jertel/elastalert2/pull/289) - @lepouletsuisse -- Add support for `impact` and `urgency` parameters for Servicenow alerter +- Add support for `servicenow_impact` and `servicenow_urgency` parameters for Servicenow alerter # 2.1.1 diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 98b2b5e18..1de30e71d 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -2566,9 +2566,9 @@ Optional: ``servicenow_proxy``: By default ElastAlert will not use a network proxy to send notifications to ServiceNow. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. -``impact``: An integer 1, 2, or 3 representing high, medium, and low respectively. This measures the effect of an incident on business processes. +``servicenow_impact``: An integer 1, 2, or 3 representing high, medium, and low respectively. This measures the effect of an incident on business processes. -``urgency``: An integer 1, 2, or 3 representing high, medium, and low respecitvely. This measures how long this incident can be delayed until there is a significant business impact. +``servicenow_urgency``: An integer 1, 2, or 3 representing high, medium, and low respecitvely. This measures how long this incident can be delayed until there is a significant business impact. Example usage:: @@ -2584,6 +2584,8 @@ Example usage:: subcategory: "xxxxxx" cmdb_ci: "xxxxxx" caller_id: "xxxxxx" + servicenow_impact: 1 + servicenow_urgenc: 3 Slack ~~~~~ diff --git a/elastalert/alerters/servicenow.py b/elastalert/alerters/servicenow.py index a972ea8c5..39d66af75 100644 --- a/elastalert/alerters/servicenow.py +++ b/elastalert/alerters/servicenow.py @@ -26,8 +26,8 @@ def __init__(self, rule): super(ServiceNowAlerter, self).__init__(rule) self.servicenow_rest_url = self.rule.get('servicenow_rest_url', None) self.servicenow_proxy = self.rule.get('servicenow_proxy', None) - self.impact = self.rule.get('impact', None) - self.urgency = self.rule.get('urgency', None) + self.impact = self.rule.get('servicenow_impact', None) + self.urgency = self.rule.get('servicenow_urgency', None) def alert(self, matches): for match in matches: diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 0941c66bc..cc608a099 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -506,8 +506,8 @@ properties: cmdb_ci: {type: string} caller_id: {type: string} servicenow_proxy: {type: string} - impact: {type: integer} - urgency: {type: integer} + servicenow_impact: {type: integer} + servicenow_urgency: {type: integer} ### Slack slack_webhook_url: *arrayOfString diff --git a/tests/alerters/servicenow_test.py b/tests/alerters/servicenow_test.py index 055477d5b..123172019 100644 --- a/tests/alerters/servicenow_test.py +++ b/tests/alerters/servicenow_test.py @@ -131,8 +131,8 @@ def test_service_now_impact_and_urgency(): 'subcategory': 'ServiceNow subcategory', 'cmdb_ci': 'ServiceNow cmdb_ci', 'caller_id': 'ServiceNow caller_id', - 'impact': '3', - 'urgency': '1', + 'servicenow_impact': 3, + 'servicenow_urgency': 1, 'alert': [] } rules_loader = FileRulesLoader({}) @@ -146,8 +146,8 @@ def test_service_now_impact_and_urgency(): alert.alert([match]) data = json.loads(mock_post_request.call_args_list[0][1]['data']) - assert data['impact'] == rule['impact'] - assert data['urgency'] == rule['urgency'] + assert data['impact'] == rule['servicenow_impact'] + assert data['urgency'] == rule['servicenow_urgency'] def test_service_now_ea_exception(): From b4b0c2a1f8fe7326ff554d0ed70521c193650958 Mon Sep 17 00:00:00 2001 From: Cedric Charest Date: Wed, 30 Jun 2021 14:33:10 -0400 Subject: [PATCH 0353/1065] Jinja 3.0.1, alert_text_jinja tests, fix #310 --- elastalert/alerts.py | 6 +++--- requirements.txt | 2 +- tests/alerts_test.py | 28 +++++++++++++++++++++++++++- 3 files changed, 31 insertions(+), 5 deletions(-) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 3a699d8d4..773c4c3e7 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -35,9 +35,9 @@ def _add_custom_alert_text(self): # Top fields are accessible via `{{field_name}}` or `{{jinja_root_name['field_name']}}` # `jinja_root_name` dict is useful when accessing *fields with dots in their keys*, # as Jinja treat dot as a nested field. - alert_text = self.rule.get("jinja_template").render(**{**self.rule, **self.match}, - **{self.rule['jinja_root_name']: {**self.rule, - **self.match}}) + template_values = self.rule | self.match + alert_text = self.rule.get("jinja_template").render( + template_values | {self.rule['jinja_root_name']: template_values}) elif 'alert_text_args' in self.rule: alert_text_args = self.rule.get('alert_text_args') alert_text_values = [lookup_es_key(self.match, arg) for arg in alert_text_args] diff --git a/requirements.txt b/requirements.txt index 797f19871..9643e1ebf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,7 +7,7 @@ croniter>=0.3.16 elasticsearch==7.0.0 envparse>=0.2.0 exotel>=0.1.3 -Jinja2==2.11.3 +Jinja2==3.0.1 jira>=2.0.0 jsonschema>=3.0.2 prison>=0.1.2 diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 6bcbf6163..eebd95369 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- import datetime import json - from unittest import mock +from jinja2 import Template from elastalert.alerts import Alerter from elastalert.alerts import BasicMatchString @@ -157,6 +157,32 @@ def test_alert_text_kw_global_substitution(ea): assert 'Abc: abc from match' in alert_text +def test_alert_text_jinja(ea): + rule = ea.rules[0].copy() + rule['foo_rule'] = 'foo from rule' + rule['owner'] = 'the owner from rule' + rule['abc'] = 'abc from rule' + rule['alert_text'] = 'Owner: {{owner}}; Foo: {{_data["foo_rule"]}}; Abc: {{abc}}; Xyz: {{_data["xyz"]}}' + rule['alert_text_type'] = "alert_text_jinja" + rule['jinja_root_name'] = "_data" + rule['jinja_template'] = Template(str(rule['alert_text'])) + + match = { + '@timestamp': '2016-01-01', + 'field': 'field_value', + 'abc': 'abc from match', + 'xyz': 'from match' + } + + alert_text = str(BasicMatchString(rule, match)) + assert 'Owner: the owner from rule' in alert_text + assert 'Foo: foo from rule' in alert_text + assert 'Xyz: from match' in alert_text + + # When the key exists in both places, it will come from the match + assert 'Abc: abc from match' in alert_text + + def test_resolving_rule_references(): rule = { 'name': 'test_rule', From 77365e4f9441971af8f7f428bb19088ebabc64cb Mon Sep 17 00:00:00 2001 From: Cedric Charest Date: Wed, 30 Jun 2021 14:45:08 -0400 Subject: [PATCH 0354/1065] Changelog for #311 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 252e92c7b..461d791cd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,6 +35,7 @@ - Improve structure and placement of test-related files in project tree - [#287](https://github.com/jertel/elastalert2/pull/287) - @ferozsalam - Only attempt to adjust timezone if timezone is set to a non-empty string - [#288](https://github.com/jertel/elastalert2/pull/288) - @ferozsalam - Deprecated `podSecurityPolicy` feature in Helm Chart as [it's deprecated in Kubernetes 1.21](https://kubernetes.io/blog/2021/04/06/podsecuritypolicy-deprecation-past-present-and-future/) - [#289](https://github.com/jertel/elastalert2/pull/289) - @lepouletsuisse +- Add alert_text_jinja test - [#311](https://github.com/jertel/elastalert2/pull/311) - @mrfroggg # 2.1.1 From 794891e0cf53336dbcc677dc526d299bff0b20c6 Mon Sep 17 00:00:00 2001 From: Jeremy Randolph Date: Wed, 30 Jun 2021 13:46:45 -0700 Subject: [PATCH 0355/1065] Include min and max bounds for impact and urgency --- elastalert/schema.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index cc608a099..94aea12ef 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -506,8 +506,8 @@ properties: cmdb_ci: {type: string} caller_id: {type: string} servicenow_proxy: {type: string} - servicenow_impact: {type: integer} - servicenow_urgency: {type: integer} + servicenow_impact: {type: integer, minimum: 1, maximum: 3} + servicenow_urgency: {type: integer, minimum: 1, maximum: 3} ### Slack slack_webhook_url: *arrayOfString From 87be723453388f491df82811cfc17aba2cb4b667 Mon Sep 17 00:00:00 2001 From: Cedric Charest Date: Wed, 30 Jun 2021 17:17:57 -0400 Subject: [PATCH 0356/1065] Fix #317 - Add support jinja2 support to alert_subject --- docs/source/ruletypes.rst | 2 +- elastalert/alerts.py | 6 +++++- tests/alerts_test.py | 22 ++++++++++++++++++++++ 3 files changed, 28 insertions(+), 2 deletions(-) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index 2032622a2..decabfc06 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -1435,7 +1435,7 @@ Similarly to ``alert_subject``, ``alert_text`` can be further formatted using Ji 1. Jinja Template -By setting ``alert_text_type: alert_text_jinja`` you can use jinja2 templates in ``alert_text``. :: +By setting ``alert_text_type: alert_text_jinja`` you can use jinja2 templates in ``alert_text`` and ``alert_subject``. :: alert_text_type: alert_text_jinja diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 773c4c3e7..034e48ae9 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -3,6 +3,7 @@ import json import os +from jinja2 import Template from texttable import Texttable from elastalert.util import EAException, lookup_es_key @@ -210,7 +211,10 @@ def create_custom_title(self, matches): missing = self.rule.get('alert_missing_value', '') alert_subject_values = [missing if val is None else val for val in alert_subject_values] alert_subject = alert_subject.format(*alert_subject_values) - + elif self.rule.get('alert_text_type') == "alert_text_jinja": + title_template = Template(str(self.rule.get('alert_subject', ''))) + template_values = self.rule | matches[0] + alert_subject = title_template.render(template_values | {self.rule['jinja_root_name']: template_values}) if len(alert_subject) > alert_subject_max_len: alert_subject = alert_subject[:alert_subject_max_len] diff --git a/tests/alerts_test.py b/tests/alerts_test.py index eebd95369..ed07f7b5f 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -274,3 +274,25 @@ def test_alert_subject_size_limit_with_args(ea): alert = Alerter(rule) alertSubject = alert.create_custom_title([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) assert 6 == len(alertSubject) + + +def test_alert_subject_with_jinja(): + rule = { + 'name': 'test_rule', + 'type': mock_rule(), + 'owner': 'the_owner', + 'priority': 2, + 'alert_subject': 'Test alert for {{owner}}; field {{field}}; Abc: {{_data["abc"]}}', + 'alert_text_type': "alert_text_jinja", + 'jinja_root_name': "_data" + } + match = { + '@timestamp': '2016-01-01', + 'field': 'field_value', + 'abc': 'abc from match', + } + alert = Alerter(rule) + alertsubject = alert.create_custom_title([match]) + assert "Test alert for the_owner;" in alertsubject + assert "field field_value;" in alertsubject + assert "Abc: abc from match" in alertsubject From 79d8c107399ae5b89a964bd6e999c46781e8a538 Mon Sep 17 00:00:00 2001 From: Cedric Charest Date: Wed, 30 Jun 2021 17:39:28 -0400 Subject: [PATCH 0357/1065] Changelog update for #318 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 461d791cd..6534b0cb1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ ## New features - Add support for generating Kibana Discover URLs to Rocket.Chat alerter - [#260](https://github.com/jertel/elastalert2/pull/260) - @nsanorururu - Provide rule key/values as possible Jinja data inputs - [#281](https://github.com/jertel/elastalert2/pull/281) - @mrfroggg +- Add Jinja support to alert_subject - [#318](https://github.com/jertel/elastalert2/pull/318) - @mrfroggg - Add securityContext and podSecurityContext to Helm chart - [#289](https://github.com/jertel/elastalert2/pull/289) - @lepouletsuisse ## Other changes From aac0dccb74eeb59bd5c5e85cb2886a14060925fa Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Thu, 1 Jul 2021 23:48:53 +0900 Subject: [PATCH 0358/1065] Support character limit for chatwork --- elastalert/alerters/chatwork.py | 12 ++++++++--- tests/alerters/chatwork_test.py | 38 +++++++++++++++++++++++++++++++-- 2 files changed, 45 insertions(+), 5 deletions(-) diff --git a/elastalert/alerters/chatwork.py b/elastalert/alerters/chatwork.py index 08bf62d57..acb508ea9 100644 --- a/elastalert/alerters/chatwork.py +++ b/elastalert/alerters/chatwork.py @@ -4,7 +4,7 @@ from requests import RequestException from requests.auth import HTTPProxyAuth -from elastalert.alerts import Alerter +from elastalert.alerts import Alerter, BasicMatchString from elastalert.util import EAException, elastalert_logger @@ -22,7 +22,14 @@ def __init__(self, rule): self.chatwork_proxy_pass = self.rule.get('chatwork_proxy_pass', None) def alert(self, matches): - body = self.create_alert_body(matches) + body = '' + for match in matches: + body += str(BasicMatchString(self.rule, match)) + if len(matches) > 1: + body += '\n----------------------------------------\n' + if len(body) > 2047: + body = body[0:1950] + '\n *message was cropped according to chatwork embed description limits!* ' + body += '```' headers = {'X-ChatWorkToken': self.chatwork_apikey} # set https proxy, if it was provided @@ -32,7 +39,6 @@ def alert(self, matches): try: response = requests.post(self.url, params=params, headers=headers, proxies=proxies, auth=auth) - warnings.resetwarnings() response.raise_for_status() except RequestException as e: raise EAException("Error posting to Chattwork: %s. Details: %s" % (e, "" if e.response is None else e.response.text)) diff --git a/tests/alerters/chatwork_test.py b/tests/alerters/chatwork_test.py index 23279e2d3..52f43a098 100644 --- a/tests/alerters/chatwork_test.py +++ b/tests/alerters/chatwork_test.py @@ -30,7 +30,7 @@ def test_chatwork(caplog): with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { - 'body': 'Test Chatwork Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'body': 'Test Chatwork Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', } mock_post_request.assert_called_once_with( @@ -67,7 +67,7 @@ def test_chatwork_proxy(): with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { - 'body': 'Test Chatwork Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'body': 'Test Chatwork Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', } mock_post_request.assert_called_once_with( @@ -159,3 +159,37 @@ def test_chatwork_required_error(chatwork_apikey, chatwork_room_id, expected_dat assert expected_data == actual_data except Exception as ea: assert expected_data in str(ea) + + +def test_chatwork_maxlength(): + rule = { + 'name': 'Test Chatwork Rule' + ('a' * 2069), + 'type': 'any', + 'chatwork_apikey': 'xxxx1', + 'chatwork_room_id': 'xxxx2', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ChatworkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = { + 'body': 'Test Chatwork Rule' + ('a' * 1932) + + '\n *message was cropped according to chatwork embed description limits!* ```' + } + + mock_post_request.assert_called_once_with( + 'https://api.chatwork.com/v2/rooms/xxxx2/messages', + params=mock.ANY, + headers={'X-ChatWorkToken': 'xxxx1'}, + proxies=None, + auth=None + ) + + actual_data = mock_post_request.call_args_list[0][1]['params'] + assert expected_data == actual_data From 0da477d6dce809ce7630c2b793adaf1af563dce7 Mon Sep 17 00:00:00 2001 From: Konstantin Kornienko Date: Thu, 1 Jul 2021 18:27:29 +0300 Subject: [PATCH 0359/1065] removed unused import --- elastalert/alerters/opsgenie.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/alerters/opsgenie.py b/elastalert/alerters/opsgenie.py index b756c4723..f1610f662 100644 --- a/elastalert/alerters/opsgenie.py +++ b/elastalert/alerters/opsgenie.py @@ -4,7 +4,7 @@ import requests from elastalert.alerts import Alerter, BasicMatchString -from elastalert.util import EAException, elastalert_logger, lookup_es_key, resolve_string +from elastalert.util import EAException, elastalert_logger, lookup_es_key class OpsGenieAlerter(Alerter): From 101924df53c5e473a2efd5ba4aac75d44af72c77 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 2 Jul 2021 00:50:58 +0900 Subject: [PATCH 0360/1065] Support character limit for line notify --- elastalert/alerters/line.py | 11 +++++++++-- tests/alerters/line_test.py | 37 ++++++++++++++++++++++++++++++++++++- 2 files changed, 45 insertions(+), 3 deletions(-) diff --git a/elastalert/alerters/line.py b/elastalert/alerters/line.py index 41d1d4c92..7c0887288 100644 --- a/elastalert/alerters/line.py +++ b/elastalert/alerters/line.py @@ -1,7 +1,7 @@ import requests from requests import RequestException -from elastalert.alerts import Alerter +from elastalert.alerts import Alerter, BasicMatchString from elastalert.util import EAException, elastalert_logger @@ -14,7 +14,14 @@ def __init__(self, rule): self.linenotify_access_token = self.rule.get("linenotify_access_token", None) def alert(self, matches): - body = self.create_alert_body(matches) + body = '' + for match in matches: + body += str(BasicMatchString(self.rule, match)) + if len(matches) > 1: + body += '\n----------------------------------------\n' + if len(body) > 999: + body = body[0:900] + '\n *message was cropped according to line notify embed description limits!* ' + body += '```' # post to Line Notify headers = { "Content-Type": "application/x-www-form-urlencoded", diff --git a/tests/alerters/line_test.py b/tests/alerters/line_test.py index 2e8a2bafa..9943a71ea 100644 --- a/tests/alerters/line_test.py +++ b/tests/alerters/line_test.py @@ -29,7 +29,7 @@ def test_line_notify(caplog): alert.alert([match]) expected_data = { - 'message': 'Test LineNotify Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' + 'message': 'Test LineNotify Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```' } mock_post_request.assert_called_once_with( @@ -114,3 +114,38 @@ def test_line_required_error(linenotify_access_token, expected_data): assert expected_data == actual_data except Exception as ea: assert expected_data in str(ea) + + +def test_line_notify_maxlength(): + rule = { + 'name': 'Test LineNotify Rule' + ('a' * 1000), + 'type': 'any', + 'linenotify_access_token': 'xxxxx', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = LineNotifyAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'message': 'Test LineNotify Rule' + ('a' * 880) + + '\n *message was cropped according to line notify embed description limits!* ```' + } + + mock_post_request.assert_called_once_with( + 'https://notify-api.line.me/api/notify', + data=mock.ANY, + headers={ + 'Content-Type': 'application/x-www-form-urlencoded', + 'Authorization': 'Bearer {}'.format('xxxxx') + } + ) + + actual_data = mock_post_request.call_args_list[0][1]['data'] + assert expected_data == actual_data From d13ee8a71c66975d761126afd7220b53701ccd97 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 2 Jul 2021 03:01:20 +0900 Subject: [PATCH 0361/1065] Remove unnecessary code --- elastalert/alerters/line.py | 3 +-- tests/alerters/line_test.py | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/elastalert/alerters/line.py b/elastalert/alerters/line.py index 7c0887288..e5e1a4433 100644 --- a/elastalert/alerters/line.py +++ b/elastalert/alerters/line.py @@ -20,8 +20,7 @@ def alert(self, matches): if len(matches) > 1: body += '\n----------------------------------------\n' if len(body) > 999: - body = body[0:900] + '\n *message was cropped according to line notify embed description limits!* ' - body += '```' + body = body[0:900] + '\n *message was cropped according to line notify embed description limits!*' # post to Line Notify headers = { "Content-Type": "application/x-www-form-urlencoded", diff --git a/tests/alerters/line_test.py b/tests/alerters/line_test.py index 9943a71ea..511bead03 100644 --- a/tests/alerters/line_test.py +++ b/tests/alerters/line_test.py @@ -29,7 +29,7 @@ def test_line_notify(caplog): alert.alert([match]) expected_data = { - 'message': 'Test LineNotify Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```' + 'message': 'Test LineNotify Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n' } mock_post_request.assert_called_once_with( @@ -135,7 +135,7 @@ def test_line_notify_maxlength(): expected_data = { 'message': 'Test LineNotify Rule' + ('a' * 880) + - '\n *message was cropped according to line notify embed description limits!* ```' + '\n *message was cropped according to line notify embed description limits!*' } mock_post_request.assert_called_once_with( From 19a75cec712ce8825fe20199473ecceedfc18d28 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 2 Jul 2021 03:04:17 +0900 Subject: [PATCH 0362/1065] Remove unnecessary code --- elastalert/alerters/chatwork.py | 4 +--- tests/alerters/chatwork_test.py | 6 +++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/elastalert/alerters/chatwork.py b/elastalert/alerters/chatwork.py index acb508ea9..ec1bc8031 100644 --- a/elastalert/alerters/chatwork.py +++ b/elastalert/alerters/chatwork.py @@ -28,9 +28,7 @@ def alert(self, matches): if len(matches) > 1: body += '\n----------------------------------------\n' if len(body) > 2047: - body = body[0:1950] + '\n *message was cropped according to chatwork embed description limits!* ' - body += '```' - + body = body[0:1950] + '\n *message was cropped according to chatwork embed description limits!*' headers = {'X-ChatWorkToken': self.chatwork_apikey} # set https proxy, if it was provided proxies = {'https': self.chatwork_proxy} if self.chatwork_proxy else None diff --git a/tests/alerters/chatwork_test.py b/tests/alerters/chatwork_test.py index 52f43a098..e74532abe 100644 --- a/tests/alerters/chatwork_test.py +++ b/tests/alerters/chatwork_test.py @@ -30,7 +30,7 @@ def test_chatwork(caplog): with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { - 'body': 'Test Chatwork Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', + 'body': 'Test Chatwork Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', } mock_post_request.assert_called_once_with( @@ -67,7 +67,7 @@ def test_chatwork_proxy(): with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { - 'body': 'Test Chatwork Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', + 'body': 'Test Chatwork Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', } mock_post_request.assert_called_once_with( @@ -180,7 +180,7 @@ def test_chatwork_maxlength(): alert.alert([match]) expected_data = { 'body': 'Test Chatwork Rule' + ('a' * 1932) + - '\n *message was cropped according to chatwork embed description limits!* ```' + '\n *message was cropped according to chatwork embed description limits!*' } mock_post_request.assert_called_once_with( From ad439078463417388445b304d7e3159db8cae7d8 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Fri, 2 Jul 2021 21:24:19 +0900 Subject: [PATCH 0363/1065] Remove unnecessary code in discord.py --- elastalert/alerters/discord.py | 4 +--- tests/alerters/discord_test.py | 8 ++++---- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/elastalert/alerters/discord.py b/elastalert/alerters/discord.py index 829653bdf..74caf2702 100644 --- a/elastalert/alerters/discord.py +++ b/elastalert/alerters/discord.py @@ -32,9 +32,7 @@ def alert(self, matches): if len(matches) > 1: body += '\n----------------------------------------\n' if len(body) > 2047: - body = body[0:1950] + '\n *message was cropped according to discord embed description limits!* ' - - body += '```' + body = body[0:1950] + '\n *message was cropped according to discord embed description limits!*' proxies = {'https': self.discord_proxy} if self.discord_proxy else None auth = HTTPProxyAuth(self.discord_proxy_login, self.discord_proxy_password) if self.discord_proxy_login else None diff --git a/tests/alerters/discord_test.py b/tests/alerters/discord_test.py index 634b9b35e..42f7f9687 100644 --- a/tests/alerters/discord_test.py +++ b/tests/alerters/discord_test.py @@ -39,7 +39,7 @@ def test_discord(caplog): 'content': ':warning: Test Discord :warning:', 'embeds': [{ - 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', + 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', 'color': 0xffffff, 'footer': { 'text': 'footer', @@ -85,7 +85,7 @@ def test_discord_not_footer(): 'content': ':warning: Test Discord :warning:', 'embeds': [{ - 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', + 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', 'color': 0xffffff }] } @@ -129,7 +129,7 @@ def test_discord_proxy(): 'content': ':warning: Test Discord :warning:', 'embeds': [{ - 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n```', + 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', 'color': 0xffffff }] } @@ -171,7 +171,7 @@ def test_discord_description_maxlength(): 'embeds': [{ 'description': 'Test Discord Rule' + ('a' * 1933) + - '\n *message was cropped according to discord embed description limits!* ```', + '\n *message was cropped according to discord embed description limits!*', 'color': 0xffffff }] } From 37476cbd29f8cc25c4bd925bda12d020819b8c7e Mon Sep 17 00:00:00 2001 From: Jeff Ashton Date: Fri, 2 Jul 2021 16:58:44 -0400 Subject: [PATCH 0364/1065] Including time_taken in the statsd metrics --- elastalert/elastalert.py | 38 ++++++++++++++++++++++---------------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 5cc1ab44a..a2f73b432 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -976,6 +976,7 @@ def run_rule(self, rule, endtime, starttime=None): rule['previous_endtime'] = endtime time_taken = time.time() - run_start + # Write to ES that we've run this rule against this time period body = {'rule_name': rule['name'], 'endtime': endtime, @@ -986,6 +987,27 @@ def run_rule(self, rule, endtime, starttime=None): 'time_taken': time_taken} self.writeback('elastalert_status', body) + # Write metrics about the run to statsd + if self.statsd: + try: + self.statsd.gauge( + 'rule.time_taken', time_taken, + tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + self.statsd.gauge( + 'query.hits', self.thread_data.num_hits, + tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + self.statsd.gauge( + 'already_seen.hits', self.thread_data.num_dupes, + tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + self.statsd.gauge( + 'query.matches', num_matches, + tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + self.statsd.gauge( + 'query.alerts_sent', self.thread_data.alerts_sent, + tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) + except BaseException as e: + elastalert_logger.error("unable to send metrics:\n%s" % str(e)) + return num_matches def init_rule(self, new_rule, new=True): @@ -1319,22 +1341,6 @@ def handle_rule_execution(self, rule): self.thread_data.alerts_sent)) rule_duration = seconds(endtime - rule.get('original_starttime')) elastalert_logger.info("%s range %s" % (rule['name'], rule_duration)) - if self.statsd: - try: - self.statsd.gauge( - 'query.hits', self.thread_data.num_hits, - tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) - self.statsd.gauge( - 'already_seen.hits', self.thread_data.num_dupes, - tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) - self.statsd.gauge( - 'query.matches', num_matches, - tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) - self.statsd.gauge( - 'query.alerts_sent', self.thread_data.alerts_sent, - tags={"elastalert_instance": self.statsd_instance_tag, "rule_name": rule['name']}) - except BaseException as e: - elastalert_logger.error("unable to send metrics:\n%s" % str(e)) self.thread_data.alerts_sent = 0 From b0d7f1168d35fd0173f296557ab40bd38e186af1 Mon Sep 17 00:00:00 2001 From: Jeff Ashton Date: Sat, 3 Jul 2021 12:05:53 -0400 Subject: [PATCH 0365/1065] Redirecting warnings to logging module --- elastalert/util.py | 1 + 1 file changed, 1 insertion(+) diff --git a/elastalert/util.py b/elastalert/util.py index a5a4d5f78..949639bdc 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -14,6 +14,7 @@ from elastalert.auth import Auth logging.basicConfig() +logging.captureWarnings(True) elastalert_logger = logging.getLogger('elastalert') From 02e860fc326589a4b7947d60d451b698778a128d Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sun, 4 Jul 2021 09:16:25 -0400 Subject: [PATCH 0366/1065] Add missing CHANGELOG entries from Jun 24 through Jul 4 --- CHANGELOG.md | 63 ++++++++++++++++++++++++++++++++++------------------ 1 file changed, 42 insertions(+), 21 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 28899ad91..e52affd36 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,35 +9,56 @@ ## Other changes - None -# 2.x.x +# 2.1.2 ## Breaking changes - None ## New features -- Add support for generating Kibana Discover URLs to Rocket.Chat alerter - [#260](https://github.com/jertel/elastalert2/pull/260) - @nsanorururu -- Provide rule key/values as possible Jinja data inputs - [#281](https://github.com/jertel/elastalert2/pull/281) - @mrfroggg +- [Rocket.Chat] Add support for generating Kibana Discover URLs to Rocket.Chat alerter - [#260](https://github.com/jertel/elastalert2/pull/260) - @nsano-rururu +- [Jinja] Provide rule key/values as possible Jinja data inputs - [#281](https://github.com/jertel/elastalert2/pull/281) - @mrfroggg +- [Kubernetes] Add securityContext and podSecurityContext to Helm chart - [#289](https://github.com/jertel/elastalert2/pull/289) - @lepouletsuisse +- [Rocket.Chat] Add options: rocket_chat_ca_certs, rocket_chat_ignore_ssl_errors, rocket_chat_timeout - [#302](https://github.com/jertel/elastalert2/pull/302) - @nsano-rururu +- [Jinja] Favor match keys over colliding rule keys when resolving Jinja vars; also add alert_text_jinja unit test - [#311](https://github.com/jertel/elastalert2/pull/311) - @mrfroggg +- [Opsgenie] Added possibility to specify source and entity attrs - [#315](https://github.com/jertel/elastalert2/pull/315) - @konstantin-komienko +- [ServiceNow] Add support for `servicenow_impact` and `servicenow_urgency` parameters for ServiceNow alerter - [#316](https://github.com/jertel/elastalert2/pull/316) - @randolph-esnet - Add Jinja support to alert_subject - [#318](https://github.com/jertel/elastalert2/pull/318) - @mrfroggg -- Add securityContext and podSecurityContext to Helm chart - [#289](https://github.com/jertel/elastalert2/pull/289) - @lepouletsuisse -- Favor match keys over colliding rule keys when resolving Jinja vars; also add alert_text_jinja unit test - [#311](https://github.com/jertel/elastalert2/pull/311) - @mrfroggg -- Add support for `servicenow_impact` and `servicenow_urgency` parameters for ServiceNow alerter - [#316](https://github.com/jertel/elastalert2/pull/316) - @randolph-esnet +@lepouletsuisse +- Metrics will now include time_taken, representing the execution duration of the rule - [#324](https://github.com/jertel/elastalert2/pull/324) - @JeffAshton ## Other changes -- Continue fix for prometheus wrapper writeback function signature - [#256](https://github.com/jertel/elastalert2/pull/256) - @greut -- Improve exception handling in Stomp alerter - [#261](https://github.com/jertel/elastalert2/pull/261) - @nsanorururu -- Improve exception handling in Amazon SES and SNS alerters - [#264](https://github.com/jertel/elastalert2/pull/264) - @nsanorururu -- Clarify documentation for starting ElastAlert 2 - [#265](https://github.com/jertel/elastalert2/pull/265) - @ferozsalam -- Add exception handling for unsupported operand type - [#266](https://github.com/jertel/elastalert2/pull/266) - @nsanorururu -- Improve documentation for Python build requirements - [#267](https://github.com/jertel/elastalert2/pull/267) - @nsanorururu -- Correct DataDog alerter logging - [#268](https://github.com/jertel/elastalert2/pull/268) - @nsanorururu -- Correct parameter code documentation for main ElastAlert runner - [#269](https://github.com/jertel/elastalert2/pull/269) - @ferozsalam -- Command alerter will now fail during init instead of during alert if given invalid command setting - [#270](https://github.com/jertel/elastalert2/pull/270) - @nsanorururu -- Consolidate all examples into a new examples/ sub folder - [#271](https://github.com/jertel/elastalert2/pull/271) - @ferozsalam -- Add TheHive example rule with Kibana Discover URL and query values in alert text - [#276](https://github.com/jertel/elastalert2/pull/276) - @markus-nclose -- Upgrade pytest-xdist from 2.2.1 to 2.3.0; clarify HTTPS support in docs; Add additional logging - [#283](https://github.com/jertel/elastalert2/pull/283) - @nsanorururu -- Add more alerter test coverage - [#284](https://github.com/jertel/elastalert2/pull/284) - @nsanorururu -- Improve structure and placement of test-related files in project tree - [#287](https://github.com/jertel/elastalert2/pull/287) - @ferozsalam +- [Prometheus] Continue fix for prometheus wrapper writeback function signature - [#256](https://github.com/jertel/elastalert2/pull/256) - @greut +- [Stomp] Improve exception handling in alerter - [#261](https://github.com/jertel/elastalert2/pull/261) - @nsano-rururu +- [AWS] Improve exception handling in Amazon SES and SNS alerters - [#264](https://github.com/jertel/elastalert2/pull/264) - @nsano-rururu +- [Docs] Clarify documentation for starting ElastAlert 2 - [#265](https://github.com/jertel/elastalert2/pull/265) - @ferozsalam +- Add exception handling for unsupported operand type - [#266](https://github.com/jertel/elastalert2/pull/266) - @nsano-rururu +- [Docs] Improve documentation for Python build requirements - [#267](https://github.com/jertel/elastalert2/pull/267) - @nsano-rururu +- [DataDog] Correct alerter logging - [#268](https://github.com/jertel/elastalert2/pull/268) - @nsano-rururu +- [Docs] Correct parameter code documentation for main ElastAlert runner - [#269](https://github.com/jertel/elastalert2/pull/269) - @ferozsalam +- [Command] alerter will now fail during init instead of during alert if given invalid command setting - [#270](https://github.com/jertel/elastalert2/pull/270) - @nsano-rururu +- [Docs] Consolidate all examples into a new examples/ sub folder - [#271](https://github.com/jertel/elastalert2/pull/271) - @ferozsalam +- [TheHive] Add example rule with Kibana Discover URL and query values in alert text - [#276](https://github.com/jertel/elastalert2/pull/276) - @markus-nclose +- Upgrade pytest-xdist from 2.2.1 to 2.3.0; clarify HTTPS support in docs; Add additional logging - [#283](https://github.com/jertel/elastalert2/pull/283) - @nsano-rururu +- [Tests] Add more alerter test coverage - [#284](https://github.com/jertel/elastalert2/pull/284) - @nsano-rururu +- [Tests] Improve structure and placement of test-related files in project tree - [#287](https://github.com/jertel/elastalert2/pull/287) - @ferozsalam - Only attempt to adjust timezone if timezone is set to a non-empty string - [#288](https://github.com/jertel/elastalert2/pull/288) - @ferozsalam -- Deprecated `podSecurityPolicy` feature in Helm Chart as [it's deprecated in Kubernetes 1.21](https://kubernetes.io/blog/2021/04/06/podsecuritypolicy-deprecation-past-present-and-future/) - [#289](https://github.com/jertel/elastalert2/pull/289) - @lepouletsuisse +- [Kubernetes] Deprecated `podSecurityPolicy` feature in Helm Chart as [it's deprecated in Kubernetes 1.21](https://kubernetes.io/blog/2021/04/06/podsecuritypolicy-deprecation-past-present-and-future/) - [#289](https://github.com/jertel/elastalert2/pull/289) - @lepouletsuisse +- [Slack] Fix slack_channel_override schema - [#291](https://github.com/jertel/elastalert2/pull/291) - @JeffAshton +- [Rocket.Chat] Fix rocket_chat_channel_override schema - [#293](https://github.com/jertel/elastalert2/pull/293) - @nsano-rururu +- [Tests] Increase code coverage - [#294](https://github.com/jertel/elastalert2/pull/294) - @nsano-rururu +- [Docs] Added Kibana Discover sample - [#295](https://github.com/jertel/elastalert2/pull/295) - @nsano-rururu +- [AWS] Remove deprecated boto_profile setting - [#299](https://github.com/jertel/elastalert2/pull/299) - @nsano-rururu +- [Slack] Correct slack_alert_fields schema definition - [#300](https://github.com/jertel/elastalert2/pull/300) - @nsano-rururu +- [Tests] Correct code coverage to eliminate warnings - [#301](https://github.com/jertel/elastalert2/pull/301) - @nsano-rururu +- Eliminate unnecessary calls to Elasticsearch - [#303](https://github.com/jertel/elastalert2/pull/303) - @JeffAshton +- [Zabbix] Fix timezone parsing - [#304](https://github.com/jertel/elastalert2/pull/304) - @JeffAshton +- Improve logging of scheduler - [#305](https://github.com/jertel/elastalert2/pull/305) - @JeffAshton +- [Jinja] Update Jinja from 2.11.3 to 3.0.1; Improve handling of colliding variables - [#311](https://github.com/jertel/elastalert2/pull/311) - @mrfroggg +- [TheHive] Force observable artifacts to be strings - [#313](https://github.com/jertel/elastalert2/pull/313) - @pandvan +- Upgrade pylint from <2.9 to <2.10 - [#314](https://github.com/jertel/elastalert2/pull/314) - @nsano-rururu +- [ChatWork] Enforce character limit - [#319](https://github.com/jertel/elastalert2/pull/319) - @nsano-rururu +- [LineNotify] Enforce character limit - [#320](https://github.com/jertel/elastalert2/pull/320) - @nsano-rururu +- [Discord] Remove trailing backticks from alert body - [#321](https://github.com/jertel/elastalert2/pull/321) - @nsano-rururu +- Redirecting warnings to logging module - [#325](https://github.com/jertel/elastalert2/pull/325) - @JeffAshton # 2.1.1 From 4896c0f532528bfad5073a8cc3e0ed4e828e2eba Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sun, 4 Jul 2021 09:22:01 -0400 Subject: [PATCH 0367/1065] Prepare release 2.1.2 --- CHANGELOG.md | 2 +- chart/elastalert2/Chart.yaml | 4 ++-- chart/elastalert2/README.md | 2 +- chart/elastalert2/values.yaml | 2 +- docs/source/running_elastalert.rst | 2 +- setup.py | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e52affd36..c6b18b17e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,7 +21,7 @@ - [Jinja] Favor match keys over colliding rule keys when resolving Jinja vars; also add alert_text_jinja unit test - [#311](https://github.com/jertel/elastalert2/pull/311) - @mrfroggg - [Opsgenie] Added possibility to specify source and entity attrs - [#315](https://github.com/jertel/elastalert2/pull/315) - @konstantin-komienko - [ServiceNow] Add support for `servicenow_impact` and `servicenow_urgency` parameters for ServiceNow alerter - [#316](https://github.com/jertel/elastalert2/pull/316) - @randolph-esnet -- Add Jinja support to alert_subject - [#318](https://github.com/jertel/elastalert2/pull/318) - @mrfroggg +- [Jinja] Add Jinja support to alert_subject - [#318](https://github.com/jertel/elastalert2/pull/318) - @mrfroggg @lepouletsuisse - Metrics will now include time_taken, representing the execution duration of the rule - [#324](https://github.com/jertel/elastalert2/pull/324) - @JeffAshton diff --git a/chart/elastalert2/Chart.yaml b/chart/elastalert2/Chart.yaml index 9ec21f9b3..b00b5d85d 100644 --- a/chart/elastalert2/Chart.yaml +++ b/chart/elastalert2/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: Automated rule-based alerting for Elasticsearch name: elastalert2 -version: 2.1.1 -appVersion: 2.1.1 +version: 2.1.2 +appVersion: 2.1.2 home: https://github.com/jertel/elastalert2 sources: - https://github.com/jertel/elastalert2 diff --git a/chart/elastalert2/README.md b/chart/elastalert2/README.md index 9d1817d6b..cae82faa4 100644 --- a/chart/elastalert2/README.md +++ b/chart/elastalert2/README.md @@ -47,7 +47,7 @@ The command removes all the Kubernetes components associated with the chart and | Parameter | Description | Default | |----------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------| | `image.repository` | docker image | jertel/elastalert2 | -| `image.tag` | docker image tag | 2.1.1 | +| `image.tag` | docker image tag | 2.1.2 | | `image.pullPolicy` | image pull policy | IfNotPresent | | `podAnnotations` | Annotations to be added to pods | {} | | `podSecurityContext` | Configurable podSecurityContext for pod execution environment | {"runAsUser": 1000, "runAsGroup": 1000, "fsGroup": 1000} | diff --git a/chart/elastalert2/values.yaml b/chart/elastalert2/values.yaml index e7a9f0780..b0488cf8d 100644 --- a/chart/elastalert2/values.yaml +++ b/chart/elastalert2/values.yaml @@ -25,7 +25,7 @@ image: # docker image repository: jertel/elastalert2 # docker image tag - tag: 2.1.1 + tag: 2.1.2 pullPolicy: IfNotPresent resources: {} diff --git a/docs/source/running_elastalert.rst b/docs/source/running_elastalert.rst index cf78f1086..97b63d8be 100644 --- a/docs/source/running_elastalert.rst +++ b/docs/source/running_elastalert.rst @@ -77,7 +77,7 @@ If you're interested in a pre-built Docker image check out the Be aware that the ``latest`` tag of the image represents the latest commit into the master branch. If you prefer to upgrade more slowly you will need utilize a -versioned tag, such as ``2.1.1`` instead, or ``2`` if you are comfortable with +versioned tag, such as ``2.1.2`` instead, or ``2`` if you are comfortable with always using the latest released version of ElastAlert 2. A properly configured config.yaml file must be mounted into the container during diff --git a/setup.py b/setup.py index 2b304755e..f1ce6db00 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ base_dir = os.path.dirname(__file__) setup( name='elastalert2', - version='2.1.1', + version='2.1.2', description='Automated rule-based alerting for Elasticsearch', setup_requires='setuptools', license='Apache 2.0', From 3df7643c51f891cf463a5916403af3b2d0cc7d97 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sun, 4 Jul 2021 09:30:09 -0400 Subject: [PATCH 0368/1065] Correct author username for PR #315 --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c6b18b17e..1403ec3ca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,7 +19,7 @@ - [Kubernetes] Add securityContext and podSecurityContext to Helm chart - [#289](https://github.com/jertel/elastalert2/pull/289) - @lepouletsuisse - [Rocket.Chat] Add options: rocket_chat_ca_certs, rocket_chat_ignore_ssl_errors, rocket_chat_timeout - [#302](https://github.com/jertel/elastalert2/pull/302) - @nsano-rururu - [Jinja] Favor match keys over colliding rule keys when resolving Jinja vars; also add alert_text_jinja unit test - [#311](https://github.com/jertel/elastalert2/pull/311) - @mrfroggg -- [Opsgenie] Added possibility to specify source and entity attrs - [#315](https://github.com/jertel/elastalert2/pull/315) - @konstantin-komienko +- [Opsgenie] Added possibility to specify source and entity attrs - [#315](https://github.com/jertel/elastalert2/pull/315) - @konstantin-kornienko - [ServiceNow] Add support for `servicenow_impact` and `servicenow_urgency` parameters for ServiceNow alerter - [#316](https://github.com/jertel/elastalert2/pull/316) - @randolph-esnet - [Jinja] Add Jinja support to alert_subject - [#318](https://github.com/jertel/elastalert2/pull/318) - @mrfroggg @lepouletsuisse From 99d70b7fc9d909648ea27f1af6ef39c0179ab45a Mon Sep 17 00:00:00 2001 From: Krzysztof Szyper <45788587+ChristophShyper@users.noreply.github.com> Date: Mon, 5 Jul 2021 22:57:02 +0200 Subject: [PATCH 0369/1065] VictorOps: Set state_message and entity_display_name from rule This change adds all data gathered from ElasticSearch to VictorOps's payload, which then can be used easily with transmogrifier. If `victorops_entity_display_name` is not set then `entity_display_name` is taking value of `alert_subject`. Or randomized by VictorOps. --- elastalert/alerters/victorops.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/elastalert/alerters/victorops.py b/elastalert/alerters/victorops.py index f0b39cfec..b9a7ecb82 100644 --- a/elastalert/alerters/victorops.py +++ b/elastalert/alerters/victorops.py @@ -17,7 +17,7 @@ def __init__(self, rule): self.victorops_routing_key = self.rule.get('victorops_routing_key', None) self.victorops_message_type = self.rule.get('victorops_message_type', None) self.victorops_entity_id = self.rule.get('victorops_entity_id', None) - self.victorops_entity_display_name = self.rule.get('victorops_entity_display_name', 'no entity display name') + self.victorops_entity_display_name = self.rule.get('victorops_entity_display_name', None) # set entity_display_name from alert_subject by default self.url = 'https://alert.victorops.com/integrations/generic/20131114/alert/%s/%s' % ( self.victorops_api_key, self.victorops_routing_key) self.victorops_proxy = self.rule.get('victorops_proxy', None) @@ -29,12 +29,17 @@ def alert(self, matches): headers = {'content-type': 'application/json'} # set https proxy, if it was provided proxies = {'https': self.victorops_proxy} if self.victorops_proxy else None + # set title with alert_subject + self.victorops_entity_display_name = self.create_title(matches) if \ + self.victorops_entity_display_name is None else self.victorops_entity_display_name payload = { "message_type": self.victorops_message_type, "entity_display_name": self.victorops_entity_display_name, "monitoring_tool": "ElastAlert", "state_message": body } + # add all data from event payload + payload.update(matches[0]) if self.victorops_entity_id: payload["entity_id"] = self.victorops_entity_id From e0b5e75d3b0a42d6c7feec3899fdc7b8bdd0700f Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Wed, 7 Jul 2021 22:19:26 +0900 Subject: [PATCH 0370/1065] =?UTF-8?q?Improved=20test=20code=20coverage=207?= =?UTF-8?q?0%=20=E2=86=92=2072%?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/alerters/chatwork_test.py | 44 +++++++ tests/alerters/discord_test.py | 58 +++++++++ tests/alerters/line_test.py | 45 +++++++ tests/alerters/opsgenie_test.py | 52 ++++++++ tests/alerters/telegram_test.py | 49 ++++++++ tests/alerters/thehive_test.py | 91 ++++++++++++++ tests/alerts_test.py | 15 +++ tests/util_test.py | 212 +++++++++++++++++++++++++++++++- 8 files changed, 561 insertions(+), 5 deletions(-) diff --git a/tests/alerters/chatwork_test.py b/tests/alerters/chatwork_test.py index e74532abe..9e8a8969a 100644 --- a/tests/alerters/chatwork_test.py +++ b/tests/alerters/chatwork_test.py @@ -193,3 +193,47 @@ def test_chatwork_maxlength(): actual_data = mock_post_request.call_args_list[0][1]['params'] assert expected_data == actual_data + + +def test_chatwork_matchs(): + rule = { + 'name': 'Test Chatwork Rule', + 'type': 'any', + 'chatwork_apikey': 'xxxx1', + 'chatwork_room_id': 'xxxx2', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = ChatworkAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match, match]) + expected_data = { + 'body': 'Test Chatwork Rule\n' + + '\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'somefield: foobarbaz\n' + + '\n' + + '----------------------------------------\n' + + 'Test Chatwork Rule\n' + + '\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'somefield: foobarbaz\n' + + '\n' + + '----------------------------------------\n', + } + + mock_post_request.assert_called_once_with( + 'https://api.chatwork.com/v2/rooms/xxxx2/messages', + params=mock.ANY, + headers={'X-ChatWorkToken': 'xxxx1'}, + proxies=None, + auth=None + ) + + actual_data = mock_post_request.call_args_list[0][1]['params'] + assert expected_data == actual_data diff --git a/tests/alerters/discord_test.py b/tests/alerters/discord_test.py index 42f7f9687..bec8b67fb 100644 --- a/tests/alerters/discord_test.py +++ b/tests/alerters/discord_test.py @@ -260,3 +260,61 @@ def test_discord_required_error(discord_webhook_url, expected_data): assert expected_data == actual_data except Exception as ea: assert expected_data in str(ea) + + +def test_discord_matches(): + rule = { + 'name': 'Test Discord Rule', + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'discord_embed_footer': 'footer', + 'discord_embed_icon_url': 'http://xxxx/image.png', + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match, match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule\n' + + '\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'somefield: foobarbaz\n' + + '\n' + + '----------------------------------------\n' + + 'Test Discord Rule\n' + + '\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'somefield: foobarbaz\n' + + '\n' + + '----------------------------------------\n', + 'color': 0xffffff, + 'footer': { + 'text': 'footer', + 'icon_url': 'http://xxxx/image.png' + } + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data diff --git a/tests/alerters/line_test.py b/tests/alerters/line_test.py index 511bead03..8e4ba3ccf 100644 --- a/tests/alerters/line_test.py +++ b/tests/alerters/line_test.py @@ -149,3 +149,48 @@ def test_line_notify_maxlength(): actual_data = mock_post_request.call_args_list[0][1]['data'] assert expected_data == actual_data + + +def test_line_notify_matchs(): + rule = { + 'name': 'Test LineNotify Rule', + 'type': 'any', + 'linenotify_access_token': 'xxxxx', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = LineNotifyAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match, match]) + + expected_data = { + 'message': 'Test LineNotify Rule\n' + '\n' + '@timestamp: 2021-01-01T00:00:00\n' + 'somefield: foobarbaz\n' + '\n' + '----------------------------------------\n' + 'Test LineNotify Rule\n' + '\n' + '@timestamp: 2021-01-01T00:00:00\n' + 'somefield: foobarbaz\n' + '\n' + '----------------------------------------\n' + } + + mock_post_request.assert_called_once_with( + 'https://notify-api.line.me/api/notify', + data=mock.ANY, + headers={ + 'Content-Type': 'application/x-www-form-urlencoded', + 'Authorization': 'Bearer {}'.format('xxxxx') + } + ) + + actual_data = mock_post_request.call_args_list[0][1]['data'] + assert expected_data == actual_data diff --git a/tests/alerters/opsgenie_test.py b/tests/alerters/opsgenie_test.py index 65ce7bcc0..10947a958 100644 --- a/tests/alerters/opsgenie_test.py +++ b/tests/alerters/opsgenie_test.py @@ -908,3 +908,55 @@ def test_opsgenie_substitution(opsgenie_entity, expected_entity, opsgenie_priori assert mcal[0][1]['json']['entity'] == expected_entity assert mcal[0][1]['json']['priority'] == expected_priority + + +def test_opsgenie_details_with_constant_value_matchs(): + rule = { + 'name': 'Opsgenie Details', + 'type': mock_rule(), + 'opsgenie_account': 'genies', + 'opsgenie_key': 'ogkey', + 'opsgenie_details': {'Foo': 'Bar'} + } + match = { + '@timestamp': '2014-10-31T00:00:00' + } + alert = OpsGenieAlerter(rule) + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match, match]) + + mock_post_request.assert_called_once_with( + 'https://api.opsgenie.com/v2/alerts', + headers={ + 'Content-Type': 'application/json', + 'Authorization': 'GenieKey ogkey' + }, + json=mock.ANY, + proxies=None + ) + + expected_json = { + 'description': 'Opsgenie Details\n' + '\n' + "{'@timestamp': '2014-10-31T00:00:00'}\n" + '\n' + '@timestamp: 2014-10-31T00:00:00\n' + '\n' + '----------------------------------------\n' + 'Opsgenie Details\n' + '\n' + "{'@timestamp': '2014-10-31T00:00:00'}\n" + '\n' + '@timestamp: 2014-10-31T00:00:00\n' + '\n' + '----------------------------------------\n', + 'details': {'Foo': 'Bar'}, + 'message': 'ElastAlert: Opsgenie Details', + 'priority': None, + 'source': 'ElastAlert', + 'tags': ['ElastAlert', 'Opsgenie Details'], + 'user': 'genies' + } + actual_json = mock_post_request.call_args_list[0][1]['json'] + assert expected_json == actual_json diff --git a/tests/alerters/telegram_test.py b/tests/alerters/telegram_test.py index fb1ef459e..1831832d1 100644 --- a/tests/alerters/telegram_test.py +++ b/tests/alerters/telegram_test.py @@ -200,3 +200,52 @@ def test_telegram_required_error(telegram_bot_token, telegram_room_id, expected_ assert expected_data == actual_data except Exception as ea: assert expected_data in str(ea) + + +def test_telegram_matchs(): + rule = { + 'name': 'Test Telegram Rule', + 'type': 'any', + 'telegram_bot_token': 'xxxxx1', + 'telegram_room_id': 'xxxxx2', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TelegramAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match, match]) + expected_data = { + 'chat_id': rule['telegram_room_id'], + 'text': '⚠ *Test Telegram Rule* ⚠ ```\n' + + 'Test Telegram Rule\n' + + '\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'somefield: foobarbaz\n' + + '\n' + + '----------------------------------------\n' + + 'Test Telegram Rule\n' + + '\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'somefield: foobarbaz\n' + + '\n' + + '----------------------------------------\n' + + ' ```', + 'parse_mode': 'markdown', + 'disable_web_page_preview': True + } + + mock_post_request.assert_called_once_with( + 'https://api.telegram.org/botxxxxx1/sendMessage', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data diff --git a/tests/alerters/thehive_test.py b/tests/alerters/thehive_test.py index 9a14f16ac..2663fe913 100644 --- a/tests/alerters/thehive_test.py +++ b/tests/alerters/thehive_test.py @@ -172,3 +172,94 @@ def test_thehive_getinfo(hive_host, expect): expected_data = expect actual_data = alert.get_info() assert expected_data == actual_data + + +def test_thehive_alerter2(): + rule = {'alert': [], + 'alert_text': '', + 'alert_text_type': 'alert_text_only', + 'description': 'test', + 'hive_alert_config': {'customFields': [{'name': 'test', + 'type': 'string', + 'value': 2}], + 'follow': True, + 'severity': 2, + 'source': 'elastalert', + 'status': 'New', + 'tags': ['test.port'], + 'tlp': 3, + 'type': 'external'}, + 'hive_connection': {'hive_apikey': '', + 'hive_host': 'https://localhost', + 'hive_port': 9000}, + 'hive_observable_data_mapping': [{'ip': 'test.ip', 'autonomous-system': 'test.as_number'}], + 'name': 'test-thehive', + 'tags': ['a', 'b'], + 'type': 'any'} + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = HiveAlerter(rule) + match = { + "test": { + "ip": "127.0.0.1", + "port": 9876, + "as_number": 1234 + }, + "@timestamp": "2021-05-09T14:43:30", + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + "artifacts": [ + { + "data": "127.0.0.1", + "dataType": "ip", + "message": None, + "tags": [], + "tlp": 2 + }, + { + "data": "1234", + "dataType": "autonomous-system", + "message": None, + "tags": [], + "tlp": 2 + } + ], + "customFields": { + "test": { + "order": 0, + "string": 2 + } + }, + "description": "\n\n", + "follow": True, + "severity": 2, + "source": "elastalert", + "status": "New", + "tags": [ + "9876" + ], + "title": "test-thehive", + "tlp": 3, + "type": "external" + } + + conn_config = rule['hive_connection'] + alert_url = f"{conn_config['hive_host']}:{conn_config['hive_port']}/api/alert" + mock_post_request.assert_called_once_with( + alert_url, + data=mock.ANY, + headers={'Content-Type': 'application/json', + 'Authorization': 'Bearer '}, + verify=False, + proxies={'http': '', 'https': ''} + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + # The date and sourceRef are autogenerated, so we can't expect them to be a particular value + del actual_data['date'] + del actual_data['sourceRef'] + + assert expected_data == actual_data diff --git a/tests/alerts_test.py b/tests/alerts_test.py index ed07f7b5f..2c1e25abd 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -296,3 +296,18 @@ def test_alert_subject_with_jinja(): assert "Test alert for the_owner;" in alertsubject assert "field field_value;" in alertsubject assert "Abc: abc from match" in alertsubject + + +def test_alert_getinfo(): + rule = { + 'name': 'test_rule', + 'type': mock_rule(), + 'owner': 'the_owner', + 'priority': 2, + 'alert_subject': 'A very long subject', + 'alert_subject_max_len': 5 + } + alert = Alerter(rule) + actual_data = alert.get_info() + expected_data = {'type': 'Unknown'} + assert expected_data == actual_data diff --git a/tests/util_test.py b/tests/util_test.py index 893278b06..ffffb1889 100644 --- a/tests/util_test.py +++ b/tests/util_test.py @@ -1,15 +1,24 @@ # -*- coding: utf-8 -*- +import logging +import os +import pytest + from datetime import datetime from datetime import timedelta -from unittest import mock -import pytest from dateutil.parser import parse as dt +from unittest import mock + from elastalert.util import add_raw_postfix +from elastalert.util import build_es_conn_config +from elastalert.util import dt_to_ts from elastalert.util import dt_to_ts_with_format +from elastalert.util import EAException +from elastalert.util import elasticsearch_client from elastalert.util import flatten_dict from elastalert.util import format_index +from elastalert.util import get_module from elastalert.util import lookup_es_key from elastalert.util import parse_deadline from elastalert.util import parse_duration @@ -19,6 +28,7 @@ from elastalert.util import set_es_key from elastalert.util import should_scrolling_continue from elastalert.util import ts_to_dt_with_format +from elastalert.util import ts_utc_to_tz @pytest.mark.parametrize('spec, expected_delta', [ @@ -234,20 +244,212 @@ def test_should_scrolling_continue(): assert should_scrolling_continue(rule_over_max_scrolling) is False -def test_ts_to_dt_with_format(): +def test_ts_to_dt_with_format1(): assert ts_to_dt_with_format('2021/02/01 12:30:00', '%Y/%m/%d %H:%M:%S') == dt('2021-02-01 12:30:00+00:00') + + +def test_ts_to_dt_with_format2(): assert ts_to_dt_with_format('01/02/2021 12:30:00', '%d/%m/%Y %H:%M:%S') == dt('2021-02-01 12:30:00+00:00') -def test_dt_to_ts_with_format(): +def test_ts_to_dt_with_format3(): + date = datetime(2021, 7, 6, hour=0, minute=0, second=0) + assert ts_to_dt_with_format(date, '') == dt('2021-7-6 00:00') + + +def test_ts_to_dt_with_format4(): + assert ts_to_dt_with_format('01/02/2021 12:30:00 +0900', '%d/%m/%Y %H:%M:%S %z') == dt('2021-02-01 12:30:00+09:00') + + +def test_dt_to_ts_with_format1(): assert dt_to_ts_with_format(dt('2021-02-01 12:30:00+00:00'), '%Y/%m/%d %H:%M:%S') == '2021/02/01 12:30:00' + + +def test_dt_to_ts_with_format2(): assert dt_to_ts_with_format(dt('2021-02-01 12:30:00+00:00'), '%d/%m/%Y %H:%M:%S') == '01/02/2021 12:30:00' +def test_dt_to_ts_with_format3(): + assert dt_to_ts_with_format('2021-02-01 12:30:00+00:00', '%d/%m/%Y %H:%M:%S') == '2021-02-01 12:30:00+00:00' + + def test_flatten_dict(): assert flatten_dict({'test': 'value1', 'test2': 'value2'}) == {'test': 'value1', 'test2': 'value2'} -def test_pytzfy(): +def test_pytzfy1(): assert pytzfy(dt('2021-02-01 12:30:00+00:00')) == dt('2021-02-01 12:30:00+00:00') + + +def test_pytzfy2(): assert pytzfy(datetime(2018, 12, 31, 5, 0, 30, 1000)) == dt('2018-12-31 05:00:30.001000') + + +def test_get_module(): + with pytest.raises(EAException) as ea: + get_module('test') + assert 'Could not import module' in str(ea) + + +def test_dt_to_ts(caplog): + caplog.set_level(logging.WARNING) + dt_to_ts('a') + user, level, message = caplog.record_tuples[0] + assert 'elastalert' == user + assert logging.WARNING == level + assert 'Expected datetime, got' in message + + +def test_ts_utc_to_tz(): + date = datetime(2021, 7, 6, hour=0, minute=0, second=0) + actual_data = ts_utc_to_tz(date, 'Europe/Istanbul') + assert '2021-07-06 03:00:00+03:00' == str(actual_data) + + +test_build_es_conn_config_param = 'es_host, es_port, es_conn_timeout, es_send_get_body_as, ssl_show_warn, es_username, ' +test_build_es_conn_config_param += 'es_password, es_api_key, es_bearer, aws_region, profile, use_ssl, verify_certs, ' +test_build_es_conn_config_param += 'ca_certs, client_cert,client_key,es_url_prefix, expected_data' + + +@pytest.mark.parametrize(test_build_es_conn_config_param, [ + ('', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', True), + ('localhost', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', True), + ('localhost', 9200, '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', + { + 'use_ssl': False, + 'verify_certs': True, + 'ca_certs': None, + 'client_cert': None, + 'client_key': None, + 'http_auth': None, + 'es_username': None, + 'es_password': None, + 'es_api_key': None, + 'es_bearer': None, + 'aws_region': None, + 'profile': None, + 'headers': None, + 'es_host': 'localhost', + 'es_port': 9200, + 'es_url_prefix': '', + 'es_conn_timeout': 20, + 'send_get_body_as': 'GET', + 'ssl_show_warn': True + }), + ('localhost', 9200, 30, 'POST', False, 'user', 'pass', 'key', 'bearer', 'us-east-1', 'default', + True, False, '/path/to/cacert.pem', '/path/to/client_cert.pem', '/path/to/client_key.key', 'elasticsearch', + { + 'use_ssl': True, + 'verify_certs': False, + 'ca_certs': '/path/to/cacert.pem', + 'client_cert': '/path/to/client_cert.pem', + 'client_key': '/path/to/client_key.key', + 'http_auth': None, + 'es_username': 'user', + 'es_password': 'pass', + 'es_api_key': 'key', + 'es_bearer': 'bearer', + 'aws_region': 'us-east-1', + 'profile': 'default', + 'headers': None, + 'es_host': 'localhost', + 'es_port': 9200, + 'es_url_prefix': 'elasticsearch', + 'es_conn_timeout': 30, + 'send_get_body_as': 'POST', + 'ssl_show_warn': False + }), +]) +def test_build_es_conn_config(es_host, es_port, es_conn_timeout, es_send_get_body_as, ssl_show_warn, es_username, + es_password, es_api_key, es_bearer, aws_region, profile, use_ssl, verify_certs, + ca_certs, client_cert, client_key, es_url_prefix, expected_data): + try: + conf = {} + if es_host: + conf['es_host'] = es_host + if es_port: + conf['es_port'] = es_port + if es_conn_timeout: + conf['es_conn_timeout'] = es_conn_timeout + if es_send_get_body_as: + conf['es_send_get_body_as'] = es_send_get_body_as + if ssl_show_warn != '': + conf['ssl_show_warn'] = ssl_show_warn + if es_username: + conf['es_username'] = es_username + if es_password: + conf['es_password'] = es_password + if es_api_key: + conf['es_api_key'] = es_api_key + if es_bearer: + conf['es_bearer'] = es_bearer + if aws_region: + conf['aws_region'] = aws_region + if profile: + conf['profile'] = profile + if use_ssl != '': + conf['use_ssl'] = use_ssl + if verify_certs != '': + conf['verify_certs'] = verify_certs + if ca_certs: + conf['ca_certs'] = ca_certs + if client_cert: + conf['client_cert'] = client_cert + if client_key: + conf['client_key'] = client_key + if es_url_prefix: + conf['es_url_prefix'] = es_url_prefix + actual = build_es_conn_config(conf) + assert expected_data == actual + except KeyError: + assert expected_data + + +@mock.patch.dict(os.environ, {'ES_USERNAME': 'USER', + 'ES_PASSWORD': 'PASS', + 'ES_API_KEY': 'KEY', + 'ES_BEARER': 'BEARE'}) +def test_build_es_conn_config2(): + conf = {} + conf['es_host'] = 'localhost' + conf['es_port'] = 9200 + expected = { + 'use_ssl': False, + 'verify_certs': True, + 'ca_certs': None, + 'client_cert': None, + 'client_key': None, + 'http_auth': None, + 'es_username': 'USER', + 'es_password': 'PASS', + 'es_api_key': 'KEY', + 'es_bearer': 'BEARE', + 'aws_region': None, + 'profile': None, + 'headers': None, + 'es_host': 'localhost', + 'es_port': 9200, + 'es_url_prefix': '', + 'es_conn_timeout': 20, + 'send_get_body_as': 'GET', + 'ssl_show_warn': True + } + actual = build_es_conn_config(conf) + assert expected == actual + + +@pytest.mark.parametrize('es_host, es_port, es_bearer, es_api_key', [ + ('localhost', 9200, '', ''), + ('localhost', 9200, 'bearer', 'bearer') +]) +@mock.patch.dict(os.environ, {'AWS_DEFAULT_REGION': ''}) +def test_elasticsearch_client(es_host, es_port, es_bearer, es_api_key): + conf = {} + conf['es_host'] = es_host + conf['es_port'] = es_port + if es_bearer: + conf['es_bearer'] = es_bearer + if es_api_key: + conf['es_api_key'] = es_api_key + acutual = elasticsearch_client(conf) + assert None is not acutual From 2c77da6b0abf91c5bc914a16fab39626dcfd47ab Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Wed, 7 Jul 2021 22:31:32 +0900 Subject: [PATCH 0371/1065] Bump sphinx from 4.0.2 to 4.0.3 --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index fb849ad4e..f295ef5b8 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -10,6 +10,6 @@ pytest==6.2.4 pytest-cov==2.12.1 pytest-xdist==2.3.0 setuptools -sphinx==4.0.2 +sphinx==4.0.3 sphinx_rtd_theme tox==3.23.1 From b9ff8b8ff2efcac9e0746a167297f6d64bfdbcb3 Mon Sep 17 00:00:00 2001 From: Jeff Ashton Date: Wed, 7 Jul 2021 09:35:32 -0400 Subject: [PATCH 0372/1065] Fixing get_hits_count for es 7.x --- elastalert/elastalert.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index a2f73b432..77f16b2bc 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -456,8 +456,21 @@ def get_hits_count(self, rule, starttime, endtime, index): five=rule['five'] ) + es_client = self.thread_data.current_es try: - res = self.thread_data.current_es.count(index=index, doc_type=rule['doc_type'], body=query, ignore_unavailable=True) + if es_client.is_atleastsixsix(): + res = es_client.count( + index=index, + body=query, + ignore_unavailable=True + ) + else: + res = es_client.count( + index=index, + doc_type=rule['doc_type'], + body=query, + ignore_unavailable=True + ) except ElasticsearchException as e: # Elasticsearch sometimes gives us GIGANTIC error messages # (so big that they will fill the entire terminal buffer) From 901b5d0f2545506b79c5ed4cab7298c725190486 Mon Sep 17 00:00:00 2001 From: Jeff Ashton Date: Wed, 7 Jul 2021 12:21:13 -0400 Subject: [PATCH 0373/1065] Switching to check is_atleastsixtwo to align with other similar checks --- elastalert/elastalert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 77f16b2bc..b1a577ce7 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -458,7 +458,7 @@ def get_hits_count(self, rule, starttime, endtime, index): es_client = self.thread_data.current_es try: - if es_client.is_atleastsixsix(): + if es_client.is_atleastsixtwo(): res = es_client.count( index=index, body=query, From 81a6b5fa45cce01b7ccf3b6a9e62d7a59943c9fb Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Wed, 7 Jul 2021 15:47:57 -0400 Subject: [PATCH 0374/1065] Add complex filter example --- docs/source/recipes/writing_filters.rst | 27 +++++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/docs/source/recipes/writing_filters.rst b/docs/source/recipes/writing_filters.rst index 8e3189f4b..e8e7361e7 100644 --- a/docs/source/recipes/writing_filters.rst +++ b/docs/source/recipes/writing_filters.rst @@ -116,12 +116,31 @@ For Elasticsearch 2.X, any of the filters can be embedded in ``not``, ``and``, a term: _type: "something" -For Elasticsearch 5.x, this will not work and to implement boolean logic use query strings:: +Below is a more complex example for Elasticsearch 7.x, provided by a `community user. `_:: filter: - - query: - query_string: - query: "somefield: somevalue OR foo: bar" + - term: + action: order + - terms: + dining: + - pickup + - delivery + - bool: + #exclude common/expected orders + must_not: + #Alice usually gets a pizza + - bool: + must: [ {term: {uid: alice}}, {term: {menu_item: pizza}} ] + #Bob loves his hoagies + - bool: + must: [ {term: {uid: bob}}, {term: {menu_item: sandwich}} ] + #Charlie has a few favorites + - bool: + must: + - term: + uid: charlie + - match: + menu_item: "burrito pasta salad pizza" Loading Filters Directly From Kibana 3 From 1010f70434eea5d6789a57c82d0f2f1eba84826f Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Wed, 14 Jul 2021 19:40:14 +0900 Subject: [PATCH 0375/1065] Improved test coverage MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit config.py 69% → 86% create_index.py 16% → 18% enhancements.py 82% → 100% kibana.py 82% → 100% loaders.py 70% → 71% --- tests/base_test.py | 22 +++ tests/config_test.py | 170 +++++++++++++++++- tests/create_index_test.py | 93 ++++++++++ tests/example.config.logging.yaml | 70 ++++++++ .../example.config.not_found.buffer_time.yaml | 15 ++ tests/example.config.not_found.es_host.yaml | 17 ++ tests/example.config.not_found.es_port.yaml | 17 ++ tests/example.config.not_found.run_every.yaml | 15 ++ ...mple.config.not_found.writeback_index.yaml | 13 ++ tests/example.config.old_query_limit.yaml | 21 +++ ...example.config.type_error.buffer_time.yaml | 17 ++ .../example.config.type_error.run_every.yaml | 17 ++ tests/kibana_test.py | 136 +++++++++++++- tests/loaders_test.py | 33 ++++ tests/rules_test.py | 55 ++++++ 15 files changed, 708 insertions(+), 3 deletions(-) create mode 100644 tests/example.config.logging.yaml create mode 100644 tests/example.config.not_found.buffer_time.yaml create mode 100644 tests/example.config.not_found.es_host.yaml create mode 100644 tests/example.config.not_found.es_port.yaml create mode 100644 tests/example.config.not_found.run_every.yaml create mode 100644 tests/example.config.not_found.writeback_index.yaml create mode 100644 tests/example.config.old_query_limit.yaml create mode 100644 tests/example.config.type_error.buffer_time.yaml create mode 100644 tests/example.config.type_error.run_every.yaml diff --git a/tests/base_test.py b/tests/base_test.py index 785befd58..12db4d96b 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -12,6 +12,7 @@ from elastalert.enhancements import BaseEnhancement from elastalert.enhancements import DropMatchException +from elastalert.enhancements import TimeEnhancement from elastalert.kibana import dashboard_temp from elastalert.util import dt_to_ts from elastalert.util import dt_to_unix @@ -1443,3 +1444,24 @@ def test_get_elasticsearch_client_different_rule(ea): y = ea.get_elasticsearch_client(y_rule) assert x is not y, 'Should return unique client for each rule' + + +def test_base_enhancement_process_error(ea): + try: + be = BaseEnhancement(ea.rules[0]) + be.process('') + assert False + except NotImplementedError: + assert True + + +def test_time_enhancement(ea): + be = BaseEnhancement(ea.rules[0]) + te = TimeEnhancement(be) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + te.process(match) + excepted = '2021-01-01 00:00 +0000' + assert match['@timestamp'] == excepted diff --git a/tests/config_test.py b/tests/config_test.py index 36266b91a..a6d43c9ce 100644 --- a/tests/config_test.py +++ b/tests/config_test.py @@ -1,9 +1,13 @@ # -*- coding: utf-8 -*- -import os -from unittest import mock import datetime +import logging +import os +import pytest from elastalert.config import load_conf +from elastalert.util import EAException + +from unittest import mock def test_config_loads(): @@ -31,3 +35,165 @@ def test_config_loads(): assert conf['writeback_index'] == 'elastalert_status' assert conf['alert_time_limit'] == datetime.timedelta(days=2) + + +def test_config_loads_ea_execption(): + with pytest.raises(EAException) as ea: + os.environ['ELASTIC_PASS'] = 'password_from_env' + + test_args = mock.Mock() + test_args.config = '' + test_args.rule = None + test_args.debug = False + test_args.es_debug_trace = None + + load_conf(test_args) + + assert 'No --config or config.yaml found' in str(ea) + + +@pytest.mark.parametrize('config, expected', [ + ('/example.config.type_error.run_every.yaml', 'Invalid time format used: '), + ('/example.config.type_error.buffer_time.yaml', 'Invalid time format used: ') +]) +def test_config_loads_type_error(config, expected): + with pytest.raises(EAException) as ea: + os.environ['ELASTIC_PASS'] = 'password_from_env' + dir_path = os.path.dirname(os.path.realpath(__file__)) + + test_args = mock.Mock() + test_args.config = dir_path + config + test_args.rule = None + test_args.debug = False + test_args.es_debug_trace = None + + load_conf(test_args) + + assert expected in str(ea) + + +@pytest.mark.parametrize('config, expected', [ + ('/example.config.not_found.run_every.yaml', 'must contain '), + ('/example.config.not_found.es_host.yaml', 'must contain '), + ('/example.config.not_found.es_port.yaml', 'must contain '), + ('/example.config.not_found.writeback_index.yaml', 'must contain '), + ('/example.config.not_found.buffer_time.yaml', 'must contain ') +]) +def test_config_loads_required_globals_error(config, expected): + with pytest.raises(EAException) as ea: + os.environ['ELASTIC_PASS'] = 'password_from_env' + dir_path = os.path.dirname(os.path.realpath(__file__)) + + test_args = mock.Mock() + test_args.config = dir_path + config + test_args.rule = None + test_args.debug = False + test_args.verbose = None + test_args.es_debug_trace = None + + load_conf(test_args) + + assert expected in str(ea) + + +def test_config_loads_debug(caplog): + caplog.set_level(logging.INFO) + os.environ['ELASTIC_PASS'] = 'password_from_env' + dir_path = os.path.dirname(os.path.realpath(__file__)) + + test_args = mock.Mock() + test_args.config = dir_path + '/example.config.yaml' + test_args.rule = None + test_args.debug = True + test_args.verbose = None + test_args.es_debug_trace = None + + load_conf(test_args) + + expected_msg = 'Note: In debug mode, alerts will be logged to console but NOT actually sent.\n' + expected_msg += ' To send them but remain verbose, use --verbose instead.' + assert ('elastalert', logging.INFO, expected_msg) == caplog.record_tuples[0] + + +def test_config_loads_debug_and_verbose(caplog): + caplog.set_level(logging.INFO) + os.environ['ELASTIC_PASS'] = 'password_from_env' + dir_path = os.path.dirname(os.path.realpath(__file__)) + + test_args = mock.Mock() + test_args.config = dir_path + '/example.config.yaml' + test_args.rule = None + test_args.debug = True + test_args.verbose = True + test_args.es_debug_trace = None + + load_conf(test_args) + + expected_msg = 'Note: --debug and --verbose flags are set. --debug takes precedent.' + assert ('elastalert', logging.INFO, expected_msg) == caplog.record_tuples[0] + + +def test_config_loads_old_query_limit(): + os.environ['ELASTIC_PASS'] = 'password_from_env' + dir_path = os.path.dirname(os.path.realpath(__file__)) + + test_args = mock.Mock() + test_args.config = dir_path + '/example.config.old_query_limit.yaml' + test_args.rule = None + test_args.debug = False + test_args.es_debug_trace = None + + conf = load_conf(test_args) + + assert conf['rules_folder'] == '/opt/elastalert/rules' + assert conf['run_every'] == datetime.timedelta(seconds=10) + assert conf['buffer_time'] == datetime.timedelta(minutes=15) + assert conf['es_host'] == 'elasticsearch' + assert conf['es_port'] == 9200 + assert conf['es_username'] == 'elastic' + assert conf['es_password'] == 'password_from_env' + assert conf['writeback_index'] == 'elastalert_status' + assert conf['alert_time_limit'] == datetime.timedelta(days=2) + assert conf['old_query_limit'] == datetime.timedelta(days=3) + + +def test_config_loads_logging(capfd): + os.environ['ELASTIC_PASS'] = 'password_from_env' + dir_path = os.path.dirname(os.path.realpath(__file__)) + + test_args = mock.Mock() + test_args.config = dir_path + '/example.config.logging.yaml' + test_args.rule = None + test_args.debug = True + test_args.verbose = True + test_args.es_debug_trace = None + + load_conf(test_args) + + expected1 = 'Note: --debug and --verbose flags are set. --debug takes precedent.' + expected2 = 'Note: In debug mode, alerts will be logged to console but NOT actually sent.\n' + expected3 = ' To send them but remain verbose, use --verbose instead.\n' + out, err = capfd.readouterr() + assert expected1 in err + assert expected2 in err + assert expected3 in err + + +def test_config_loads_logging2(caplog): + os.environ['ELASTIC_PASS'] = 'password_from_env' + dir_path = os.path.dirname(os.path.realpath(__file__)) + + test_args = mock.Mock() + test_args.config = dir_path + '/example.config.yaml' + test_args.rule = None + test_args.debug = True + test_args.verbose = False + test_args.es_debug_trace = None + + load_conf(test_args) + + expected1 = 'Note: In debug mode, alerts will be logged to console but NOT actually sent.' + expected2 = ' To send them but remain verbose, use --verbose instead.' + user, level, message = caplog.record_tuples[0] + assert expected1 in message + assert expected2 in message diff --git a/tests/create_index_test.py b/tests/create_index_test.py index 47a6247dc..03790ae50 100644 --- a/tests/create_index_test.py +++ b/tests/create_index_test.py @@ -51,3 +51,96 @@ def test_read_es_6_index_mappings(): mappings = elastalert.create_index.read_es_index_mappings(6) assert len(mappings) == len(es_mappings) print((json.dumps(mappings, indent=2))) + + +@pytest.mark.parametrize('es_version, expected', [ + ('5.6.0', False), + ('6.0.0', True), + ('6.1.0', True), + ('6.2.0', True), + ('6.3.0', True), + ('6.4.0', True), + ('6.5.0', True), + ('6.6.0', True), + ('6.7.0', True), + ('6.8.0', True), + ('7.0.0', True), + ('7.1.0', True), + ('7.2.0', True), + ('7.3.0', True), + ('7.4.0', True), + ('7.5.0', True), + ('7.6.0', True), + ('7.7.0', True), + ('7.8.0', True), + ('7.9.0', True), + ('7.10.0', True), + ('7.11.0', True), + ('7.12.0', True), + ('7.13.0', True) +]) +def test_is_atleastsix(es_version, expected): + result = elastalert.create_index.is_atleastsix(es_version) + assert result == expected + + +@pytest.mark.parametrize('es_version, expected', [ + ('5.6.0', False), + ('6.0.0', False), + ('6.1.0', False), + ('6.2.0', True), + ('6.3.0', True), + ('6.4.0', True), + ('6.5.0', True), + ('6.6.0', True), + ('6.7.0', True), + ('6.8.0', True), + ('7.0.0', True), + ('7.1.0', True), + ('7.2.0', True), + ('7.3.0', True), + ('7.4.0', True), + ('7.5.0', True), + ('7.6.0', True), + ('7.7.0', True), + ('7.8.0', True), + ('7.9.0', True), + ('7.10.0', True), + ('7.11.0', True), + ('7.12.0', True), + ('7.13.0', True) +]) +def test_is_atleastsixtwo(es_version, expected): + result = elastalert.create_index.is_atleastsixtwo(es_version) + assert result == expected + + +@pytest.mark.parametrize('es_version, expected', [ + ('5.6.0', False), + ('6.0.0', False), + ('6.1.0', False), + ('6.2.0', False), + ('6.3.0', False), + ('6.4.0', False), + ('6.5.0', False), + ('6.6.0', False), + ('6.7.0', False), + ('6.8.0', False), + ('7.0.0', True), + ('7.1.0', True), + ('7.2.0', True), + ('7.3.0', True), + ('7.4.0', True), + ('7.5.0', True), + ('7.6.0', True), + ('7.7.0', True), + ('7.8.0', True), + ('7.9.0', True), + ('7.10.0', True), + ('7.11.0', True), + ('7.12.0', True), + ('7.13.0', True) +]) +def test_is_atleastseven(es_version, expected): + result = elastalert.create_index.is_atleastseven(es_version) + assert result == expected diff --git a/tests/example.config.logging.yaml b/tests/example.config.logging.yaml new file mode 100644 index 000000000..01b7532c3 --- /dev/null +++ b/tests/example.config.logging.yaml @@ -0,0 +1,70 @@ +rules_folder: /opt/elastalert/rules + +run_every: + seconds: 10 + +buffer_time: + minutes: 15 + +es_host: elasticsearch +es_port: 9200 + +es_username: elastic +es_password: $ELASTIC_PASS + +writeback_index: elastalert_status + +alert_time_limit: + days: 2 + +logging: + version: 1 + incremental: false + disable_existing_loggers: false + formatters: + logline: + format: '%(asctime)s %(levelname)+8s %(name)+20s %(message)s' + + handlers: + console: + class: logging.StreamHandler + formatter: logline + level: INFO + stream: ext://sys.stderr + + logging: + version: 1 + incremental: false + disable_existing_loggers: false + formatters: + logline: + format: '%(asctime)s %(levelname)+8s %(name)+20s %(message)s' + + handlers: + console: + class: logging.StreamHandler + formatter: logline + level: INFO + stream: ext://sys.stderr + + loggers: + elastalert: + level: INFO + handlers: [] + propagate: true + + elasticsearch: + level: INFO + handlers: [] + propagate: true + + elasticsearch.trace: + level: INFO + handlers: [] + propagate: true + + '': # root logger + level: INFO + handlers: + - console + propagate: false \ No newline at end of file diff --git a/tests/example.config.not_found.buffer_time.yaml b/tests/example.config.not_found.buffer_time.yaml new file mode 100644 index 000000000..1503207e8 --- /dev/null +++ b/tests/example.config.not_found.buffer_time.yaml @@ -0,0 +1,15 @@ +rules_folder: /opt/elastalert/rules + +run_every: + seconds: 10 + +es_host: elasticsearch +es_port: 9200 + +es_username: elastic +es_password: $ELASTIC_PASS + +writeback_index: elastalert_status + +alert_time_limit: + days: 2 diff --git a/tests/example.config.not_found.es_host.yaml b/tests/example.config.not_found.es_host.yaml new file mode 100644 index 000000000..113493b69 --- /dev/null +++ b/tests/example.config.not_found.es_host.yaml @@ -0,0 +1,17 @@ +rules_folder: /opt/elastalert/rules + +run_every: + seconds: 10 + +buffer_time: + minutes: 15 + +es_port: 9200 + +es_username: elastic +es_password: $ELASTIC_PASS + +writeback_index: elastalert_status + +alert_time_limit: + days: 2 diff --git a/tests/example.config.not_found.es_port.yaml b/tests/example.config.not_found.es_port.yaml new file mode 100644 index 000000000..d6c783b06 --- /dev/null +++ b/tests/example.config.not_found.es_port.yaml @@ -0,0 +1,17 @@ +rules_folder: /opt/elastalert/rules + +run_every: + seconds: 10 + +buffer_time: + minutes: 15 + +es_host: elasticsearch + +es_username: elastic +es_password: $ELASTIC_PASS + +writeback_index: elastalert_status + +alert_time_limit: + days: 2 diff --git a/tests/example.config.not_found.run_every.yaml b/tests/example.config.not_found.run_every.yaml new file mode 100644 index 000000000..d61208933 --- /dev/null +++ b/tests/example.config.not_found.run_every.yaml @@ -0,0 +1,15 @@ +rules_folder: /opt/elastalert/rules + +buffer_time: + minutes: 15 + +es_host: elasticsearch +es_port: 9200 + +es_username: elastic +es_password: $ELASTIC_PASS + +writeback_index: elastalert_status + +alert_time_limit: + days: 2 diff --git a/tests/example.config.not_found.writeback_index.yaml b/tests/example.config.not_found.writeback_index.yaml new file mode 100644 index 000000000..1b779e0e7 --- /dev/null +++ b/tests/example.config.not_found.writeback_index.yaml @@ -0,0 +1,13 @@ +rules_folder: /opt/elastalert/rules + +run_every: + seconds: 10 + +es_host: elasticsearch +es_port: 9200 + +es_username: elastic +es_password: $ELASTIC_PASS + +alert_time_limit: + days: 2 diff --git a/tests/example.config.old_query_limit.yaml b/tests/example.config.old_query_limit.yaml new file mode 100644 index 000000000..f9235b70e --- /dev/null +++ b/tests/example.config.old_query_limit.yaml @@ -0,0 +1,21 @@ +rules_folder: /opt/elastalert/rules + +run_every: + seconds: 10 + +buffer_time: + minutes: 15 + +es_host: elasticsearch +es_port: 9200 + +es_username: elastic +es_password: $ELASTIC_PASS + +writeback_index: elastalert_status + +alert_time_limit: + days: 2 + +old_query_limit: + days: 3 \ No newline at end of file diff --git a/tests/example.config.type_error.buffer_time.yaml b/tests/example.config.type_error.buffer_time.yaml new file mode 100644 index 000000000..40474cf62 --- /dev/null +++ b/tests/example.config.type_error.buffer_time.yaml @@ -0,0 +1,17 @@ +rules_folder: /opt/elastalert/rules + +run_every: + seconds: 10 + +buffer_time: + +es_host: elasticsearch +es_port: 9200 + +es_username: elastic +es_password: $ELASTIC_PASS + +writeback_index: elastalert_status + +alert_time_limit: + days: 2 diff --git a/tests/example.config.type_error.run_every.yaml b/tests/example.config.type_error.run_every.yaml new file mode 100644 index 000000000..088e27d4b --- /dev/null +++ b/tests/example.config.type_error.run_every.yaml @@ -0,0 +1,17 @@ +rules_folder: /opt/elastalert/rules + +run_every: + +buffer_time: + minutes: 15 + +es_host: elasticsearch +es_port: 9200 + +es_username: elastic +es_password: $ELASTIC_PASS + +writeback_index: elastalert_status + +alert_time_limit: + days: 2 diff --git a/tests/kibana_test.py b/tests/kibana_test.py index 646c569e9..941bdb006 100644 --- a/tests/kibana_test.py +++ b/tests/kibana_test.py @@ -5,6 +5,7 @@ from elastalert.kibana import dashboard_temp from elastalert.kibana import filters_from_dashboard from elastalert.kibana import kibana4_dashboard_link +from elastalert.util import EAException # Dashboard schema with only filters section @@ -39,23 +40,96 @@ "active": true, "alias": "", "id": 2 + }, + "3": { + "type": "range", + "field": "@timestamp", + "mandate": "must", + "active": true, + "alias": "", + "from": "2014-09-27T12:34:45Z", + "to": "2014-09-26T12:34:45Z", + "id": 3 + }, + "4": { + "field": "@timestamp", + "alias": "", + "mandate": "mustNot", + "active": true, + "query": "that", + "type": "field", + "id": 4 + }, + "5": { + "field": "@timestamp", + "alias": "", + "mandate": "either", + "active": true, + "query": "that", + "type": "field", + "id": 5 } }, "ids": [ 0, 1, - 2 + 2, + 3, + 4, + 5 ] } } }''' test_dashboard = json.loads(test_dashboard) +test_dashboard2 = '''{ + "title": "AD Lock Outs", + "services": { + "filter": { + "list": { + "0": { + "type": "time", + "field": "@timestamp", + "from": "now-7d", + "to": "now", + "mandate": "must", + "active": true, + "alias": "", + "id": 0 + }, + "1": { + "type": "field", + "field": "_log_type", + "query": "\\"active_directory\\"", + "mandate": "must", + "active": true, + "alias": "", + "id": 1 + } + }, + "ids": [ + 0, + 1 + ] + } + } +}''' +test_dashboard2 = json.loads(test_dashboard2) + def test_filters_from_dashboard(): filters = filters_from_dashboard(test_dashboard) assert {'term': {'_log_type': '"active_directory"'}} in filters assert {'query': {'query_string': {'query': 'ad.security_auditing_code:4740'}}} in filters + assert {'range': {'@timestamp': {'from': '2014-09-27T12:34:45Z', 'to': '2014-09-26T12:34:45Z'}}} in filters + assert {'not': {'term': {'@timestamp': 'that'}}} in filters + assert {'or': [{'term': {'@timestamp': 'that'}}]} in filters + + +def test_filters_from_dashboard2(): + filters = filters_from_dashboard(test_dashboard2) + assert {'term': {'_log_type': '"active_directory"'}} in filters def test_add_filter(): @@ -85,6 +159,66 @@ def test_add_filter(): 'id': 1 } + not_filter = {'not': {'term': {'this': 'that'}}} + db = copy.deepcopy(dashboard_temp) + add_filter(db, not_filter) + assert db['services']['filter']['list']['1'] == { + 'field': 'this', + 'alias': '', + 'mandate': 'mustNot', + 'active': True, + 'query': '"that"', + 'type': 'field', + 'id': 1 + } + + START_TIMESTAMP = '2014-09-26T12:34:45Z' + END_TIMESTAMP = '2014-09-27T12:34:45Z' + range_filter = {'range': {'@timestamp': {'lte': END_TIMESTAMP, 'gt': START_TIMESTAMP}}} + db = copy.deepcopy(dashboard_temp) + add_filter(db, range_filter) + assert db['services']['filter']['list']['1'] == { + 'field': '@timestamp', + 'alias': '', + 'mandate': 'must', + 'active': True, + 'lte': '2014-09-27T12:34:45Z', + 'gt': '2014-09-26T12:34:45Z', + 'type': 'range', + 'id': 1 + } + + query_filter = {'query': {'wildcard': 'this*that'}} + db = copy.deepcopy(dashboard_temp) + add_filter(db, query_filter) + assert db['services']['filter']['list']['1'] == { + 'alias': '', + 'mandate': 'must', + 'active': True, + 'id': 1 + } + + query_string_filter = {'query': {'query_string': {'query': 'ad.security_auditing_code:4740'}}} + db = copy.deepcopy(dashboard_temp) + add_filter(db, query_string_filter) + assert db['services']['filter']['list']['1'] == { + 'alias': '', + 'mandate': 'must', + 'active': True, + 'query': 'ad.security_auditing_code:4740', + 'type': 'querystring', + 'id': 1 + } + + try: + error_filter = {'bool': {'must': [{'range': {'@timestamp': {'lte': END_TIMESTAMP, 'gt': START_TIMESTAMP}}}]}} + db = copy.deepcopy(dashboard_temp) + add_filter(db, error_filter) + except EAException as ea: + excepted = "Could not parse filter {'bool': {'must': [{'range': {'@timestamp': " + excepted += "{'lte': '2014-09-27T12:34:45Z', 'gt': '2014-09-26T12:34:45Z'}}}]}} for Kibana" + assert excepted == str(ea) + def test_url_encoded(): url = kibana4_dashboard_link('example.com/#/Dashboard', '2015-01-01T00:00:00Z', '2017-01-01T00:00:00Z') diff --git a/tests/loaders_test.py b/tests/loaders_test.py index 262b01253..51184aee2 100644 --- a/tests/loaders_test.py +++ b/tests/loaders_test.py @@ -11,6 +11,7 @@ from elastalert.alerters.email import EmailAlerter from elastalert.config import load_conf from elastalert.loaders import FileRulesLoader +from elastalert.loaders import RulesLoader from elastalert.util import EAException test_config = {'rules_folder': 'test_folder', @@ -445,3 +446,35 @@ def test_kibana_discover_to_timedelta(): rules_loader.load_options(test_rule_copy, test_config, 'filename.yaml') assert isinstance(test_rule_copy['kibana_discover_to_timedelta'], datetime.timedelta) assert test_rule_copy['kibana_discover_to_timedelta'] == datetime.timedelta(minutes=2) + + +def test_rulesloader_get_names(): + try: + RulesLoader.get_names('', '') + assert False + except NotImplementedError: + assert True + + +def test_rulesloader_get_hashes(): + try: + RulesLoader.get_hashes('', '') + assert False + except NotImplementedError: + assert True + + +def test_rulesloader_get_yaml(): + try: + RulesLoader.get_yaml('', '') + assert False + except NotImplementedError: + assert True + + +def test_get_import_rule(): + rule = { + 'import': 'a' + } + result = RulesLoader.get_import_rule('', rule) + assert 'a' == result diff --git a/tests/rules_test.py b/tests/rules_test.py index e9b275089..8e1401d37 100644 --- a/tests/rules_test.py +++ b/tests/rules_test.py @@ -10,12 +10,14 @@ from elastalert.ruletypes import BlacklistRule from elastalert.ruletypes import CardinalityRule from elastalert.ruletypes import ChangeRule +from elastalert.ruletypes import CompareRule from elastalert.ruletypes import EventWindow from elastalert.ruletypes import FlatlineRule from elastalert.ruletypes import FrequencyRule from elastalert.ruletypes import MetricAggregationRule from elastalert.ruletypes import NewTermsRule from elastalert.ruletypes import PercentageMatchRule +from elastalert.ruletypes import RuleType from elastalert.ruletypes import SpikeRule from elastalert.ruletypes import WhitelistRule from elastalert.util import dt_to_ts @@ -132,6 +134,13 @@ def test_freq_count(): rule.add_count_data({ts_to_dt('2014-10-10T01:00:00'): 75}) assert len(rule.matches) == 1 + # except EAException + try: + rule = FrequencyRule(rules) + rule.add_count_data('aaaa') + except EAException as ea: + assert 'add_count_data can only accept one count at a time' in str(ea) + def test_freq_out_of_order(): events = hits(60, timestamp_field='blah', username='qlo') @@ -1273,3 +1282,49 @@ def test_percentage_match(): assert '76.1589403974' in rule.get_match_str(rule.matches[0]) rules['percentage_format_string'] = '%.2f' assert '76.16' in rule.get_match_str(rule.matches[0]) + + +def test_ruletype_add_data(): + try: + RuleType.garbage_collect('', '') + RuleType.add_data('', '') + assert False + except NotImplementedError: + assert True + + +def test_ruletype_garbage_collect(): + RuleType.garbage_collect('', '') + assert True + + +def test_ruletype_add_count_data(): + try: + RuleType.add_count_data('', '') + assert False + except NotImplementedError: + assert True + + +def test_ruletype_add_terms_data(): + try: + RuleType.add_terms_data('', '') + assert False + except NotImplementedError: + assert True + + +def test_ruletype_add_aggregation_data(): + try: + RuleType.add_aggregation_data('', '') + assert False + except NotImplementedError: + assert True + + +def test_comparerule_compare(): + try: + CompareRule.compare('', '') + assert False + except NotImplementedError: + assert True From 403823ca71e42e2f104bfdeb48987c2da87fc871 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Wed, 14 Jul 2021 19:48:46 +0900 Subject: [PATCH 0376/1065] Bump sphinx from 4.0.3 to 4.1.0 --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index f295ef5b8..0bf907c13 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -10,6 +10,6 @@ pytest==6.2.4 pytest-cov==2.12.1 pytest-xdist==2.3.0 setuptools -sphinx==4.0.3 +sphinx==4.1.0 sphinx_rtd_theme tox==3.23.1 From ae90e18bae7c45068306fe02be3824f1aedd40df Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Thu, 15 Jul 2021 22:21:18 +0900 Subject: [PATCH 0377/1065] Bump sphinx from 4.1.0 to 4.1.1 --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 0bf907c13..b09179b5c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -10,6 +10,6 @@ pytest==6.2.4 pytest-cov==2.12.1 pytest-xdist==2.3.0 setuptools -sphinx==4.1.0 +sphinx==4.1.1 sphinx_rtd_theme tox==3.23.1 From 186af392cd3e06414311581fa6a5474c67cbea07 Mon Sep 17 00:00:00 2001 From: nsano-rururu Date: Thu, 15 Jul 2021 22:29:02 +0900 Subject: [PATCH 0378/1065] Bump tox from 3.23.1 to 3.24.0 --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 0bf907c13..e74a666ee 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -12,4 +12,4 @@ pytest-xdist==2.3.0 setuptools sphinx==4.1.0 sphinx_rtd_theme -tox==3.23.1 +tox==3.24.0 From 00801be058d3f7d97fbafda4e28c861a38f456c1 Mon Sep 17 00:00:00 2001 From: AntoineBlaud Date: Fri, 16 Jul 2021 15:03:37 +0200 Subject: [PATCH 0379/1065] #340 fix match["query_key"] malformation --- elastalert/ruletypes.py | 6 +++--- elastalert/util.py | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 40 insertions(+), 3 deletions(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index fce33f50b..08d02b986 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -7,7 +7,7 @@ from elastalert.util import (add_raw_postfix, dt_to_ts, EAException, elastalert_logger, elasticsearch_client, format_index, hashable, lookup_es_key, new_get_event_ts, pretty_ts, total_seconds, - ts_now, ts_to_dt) + ts_now, ts_to_dt, expand_string_into_dict) class RuleType(object): @@ -1096,7 +1096,7 @@ def check_matches(self, timestamp, query_key, aggregation_data): match = {self.rules['timestamp_field']: timestamp, self.metric_key: metric_val} if query_key is not None: - match[self.rules['query_key']] = query_key + match = expand_string_into_dict(match, self.rules['query_key'], query_key) self.add_match(match) def check_matches_recursive(self, timestamp, query_key, aggregation_data, compound_keys, match_data): @@ -1286,7 +1286,7 @@ def check_matches(self, timestamp, query_key, aggregation_data): if self.percentage_violation(match_percentage): match = {self.rules['timestamp_field']: timestamp, 'percentage': match_percentage, 'denominator': total_count} if query_key is not None: - match[self.rules['query_key']] = query_key + match = expand_string_into_dict(match, self.rules['query_key'], query_key) self.add_match(match) def percentage_violation(self, match_percentage): diff --git a/elastalert/util.py b/elastalert/util.py index 949639bdc..4549e6ea0 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -484,3 +484,40 @@ def should_scrolling_continue(rule_conf): stop_the_scroll = 0 < max_scrolling <= rule_conf.get('scrolling_cycle') return not stop_the_scroll + + +def _expand_string_into_dict(string, value, sep='.'): + """ + Converts a encapsulated string-dict to a sequence of dict. Use separator (default '.') to split the string. + Example: + string1.string2.stringN : value -> {string1: {string2: {string3: value}} + + :param string: The encapsulated "string-dict" + :param value: Value associated to the last field of the "string-dict" + :param sep: Separator character. Default: '.' + :rtype: dict + """ + if sep not in string: + return {string : value} + key, val = string.split(sep, 1) + return {key: _expand_string_into_dict(val, value)} + + +def expand_string_into_dict(dictionary, string , value, sep='.'): + """ + Useful function to "compile" a string-dict string used in metric and percentage rules into a dictionary sequence. + + :param dictionary: The dictionary dict + :param string: String Key + :param value: String Value + :param sep: Separator character. Default: '.' + :rtype: dict + """ + + if sep not in string: + dictionary[string] = value + return dictionary + else: + field1, new_string = string.split(sep, 1) + dictionary[field1] = _expand_string_into_dict(new_string, value) + return dictionary From 45beaae20736b97b8c291f24a8dade2adb0fc43b Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 17 Jul 2021 10:48:10 -0400 Subject: [PATCH 0380/1065] Add PR template --- .github/pull_request_template.md | 30 ++++++++++++++++++++++++++++++ CONTRIBUTING.md | 2 +- 2 files changed, 31 insertions(+), 1 deletion(-) create mode 100644 .github/pull_request_template.md diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 000000000..2f33f1ef6 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,30 @@ +## Description + + + +## Checklist + + + +- [ ] I have reviewed the [contributing guidelines](../blob/master/CONTRIBUTING.md). +- [ ] I have included unit tests for my changes or additions. +- [ ] I have successfully run `make test-docker` with my changes +- [ ] I have updated the [documentation](docs/docs) (if applicable). +- [ ] I have updated the [changelog](../blob/master/CHANGELOG.md) (if applicable). + + +## Questions or Comments + + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5b0b2997a..1a3c9c9d4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,6 +1,6 @@ # Contributing to ElastAlert 2 -## Introduction +## Guidelines PRs are welcome, but must include tests, when possible. PRs will not be merged if they do not pass the automated CI workflows. To test your changes before creating a PR, run From 58c36b7e5fb195610c92d51a559efda4df5dfddc Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 17 Jul 2021 10:53:44 -0400 Subject: [PATCH 0381/1065] Add PR template --- .github/pull_request_template.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 2f33f1ef6..504c5d5d3 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -10,11 +10,11 @@ Provide a description for your pull request. Note any breaking changes. The following checklist items must be completed before PRs can be merged. --> -- [ ] I have reviewed the [contributing guidelines](../blob/master/CONTRIBUTING.md). +- [ ] I have reviewed the [contributing guidelines](CONTRIBUTING.md). - [ ] I have included unit tests for my changes or additions. - [ ] I have successfully run `make test-docker` with my changes -- [ ] I have updated the [documentation](docs/docs) (if applicable). -- [ ] I have updated the [changelog](../blob/master/CHANGELOG.md) (if applicable). +- [ ] I have updated the [documentation](docs) (if applicable). +- [ ] I have updated the [changelog](CHANGELOG.md) (if applicable). ## Questions or Comments From d57b615a45e1e3dfe9861689e2553bf61bd64f9d Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 17 Jul 2021 10:56:34 -0400 Subject: [PATCH 0382/1065] Add PR template --- .github/pull_request_template.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 504c5d5d3..4f17158cd 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -10,11 +10,11 @@ Provide a description for your pull request. Note any breaking changes. The following checklist items must be completed before PRs can be merged. --> -- [ ] I have reviewed the [contributing guidelines](CONTRIBUTING.md). +- [ ] I have reviewed the [contributing guidelines](/jertel/elastalert2/blob/master/CONTRIBUTING.md). - [ ] I have included unit tests for my changes or additions. - [ ] I have successfully run `make test-docker` with my changes -- [ ] I have updated the [documentation](docs) (if applicable). -- [ ] I have updated the [changelog](CHANGELOG.md) (if applicable). +- [ ] I have updated the [documentation](https://elastalert2.readthedocs.io) (if applicable). +- [ ] I have updated the [changelog](/jertel/elastalert2/blob/master/CHANGELOG.md) (if applicable). ## Questions or Comments From 2adba89f6f2374ca0a09ca5a06b3b7cd6fdf56d6 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 17 Jul 2021 10:57:30 -0400 Subject: [PATCH 0383/1065] Add PR template --- .github/pull_request_template.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 4f17158cd..a791a1386 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -10,11 +10,11 @@ Provide a description for your pull request. Note any breaking changes. The following checklist items must be completed before PRs can be merged. --> -- [ ] I have reviewed the [contributing guidelines](/jertel/elastalert2/blob/master/CONTRIBUTING.md). +- [ ] I have reviewed the [contributing guidelines](https://github.com/jertel/elastalert2/blob/master/CONTRIBUTING.md). - [ ] I have included unit tests for my changes or additions. - [ ] I have successfully run `make test-docker` with my changes - [ ] I have updated the [documentation](https://elastalert2.readthedocs.io) (if applicable). -- [ ] I have updated the [changelog](/jertel/elastalert2/blob/master/CHANGELOG.md) (if applicable). +- [ ] I have updated the [changelog](https://github.com/jertel/elastalert2/blob/master/CHANGELOG.md) (if applicable). ## Questions or Comments From f14e4ed9152b3783fd11140584478aae8c2a7e80 Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Sat, 17 Jul 2021 10:59:23 -0400 Subject: [PATCH 0384/1065] Add PR template --- .github/pull_request_template.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index a791a1386..5d37975df 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -26,5 +26,5 @@ items are not checked. If you have questions about completing this PR, or about the process, note them here. -If you are not ready for this PR to be reviewed please mention that here.. +If you are not ready for this PR to be reviewed please mention that here. --> From 8aa27c7f8c8791379c3ff0fa0455221a676b6e52 Mon Sep 17 00:00:00 2001 From: Cedric Charest Date: Mon, 19 Jul 2021 11:30:04 -0400 Subject: [PATCH 0385/1065] Bump Jinja2 in setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index f1ce6db00..039dcca31 100644 --- a/setup.py +++ b/setup.py @@ -34,7 +34,7 @@ 'envparse>=0.2.0', 'exotel>=0.1.3', 'jira>=2.0.0', - 'Jinja2==2.11.3', + 'Jinja2==3.0.1', 'jsonschema>=3.0.2', 'prison>=0.1.2', 'prometheus_client>=0.10.1', From b52860e33ee0813d4faf24c98df68d7b3398813d Mon Sep 17 00:00:00 2001 From: Jason Ertel Date: Mon, 19 Jul 2021 13:13:39 -0400 Subject: [PATCH 0386/1065] Clarified PR requirements --- .github/pull_request_template.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 5d37975df..c28b37316 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -12,9 +12,9 @@ The following checklist items must be completed before PRs can be merged. - [ ] I have reviewed the [contributing guidelines](https://github.com/jertel/elastalert2/blob/master/CONTRIBUTING.md). - [ ] I have included unit tests for my changes or additions. -- [ ] I have successfully run `make test-docker` with my changes -- [ ] I have updated the [documentation](https://elastalert2.readthedocs.io) (if applicable). -- [ ] I have updated the [changelog](https://github.com/jertel/elastalert2/blob/master/CHANGELOG.md) (if applicable). +- [ ] I have successfully run `make test-docker` with my changes. +- [ ] I have updated the [documentation](https://elastalert2.readthedocs.io). +- [ ] I have updated the [changelog](https://github.com/jertel/elastalert2/blob/master/CHANGELOG.md). ## Questions or Comments @@ -22,7 +22,7 @@ The following checklist items must be completed before PRs can be merged. 'None' + key_value = unicode(key_value) + except KeyError: + # Some matches may not have the specified key + # use a special token for these + key_value = '_missing' + else: + key_value = None + + return key_value + + def enhance_filter(self, rule): + """ If there is a blacklist or whitelist in rule then we add it to the filter. + It adds it as a query_string. If there is already an query string its is appended + with blacklist or whitelist. + + :param rule: + :return: + """ + if not rule.get('filter_by_list', True): + return + if 'blacklist' in rule: + listname = 'blacklist' + elif 'whitelist' in rule: + listname = 'whitelist' + else: + return + + filters = rule['filter'] + additional_terms = [(rule['compare_key'] + ':"' + term + '"') for term in rule[listname]] + if listname == 'whitelist': + query = "NOT " + " AND NOT ".join(additional_terms) + else: + query = " OR ".join(additional_terms) + query_str_filter = {'query_string': {'query': query}} + if self.is_atleastfive(): + filters.append(query_str_filter) + else: + filters.append({'query': query_str_filter}) + logging.debug("Enhanced filter with {} terms: {}".format(listname, str(query_str_filter))) + + def run_rule(self, rule, endtime, starttime=None): + """ Run a rule for a given time period, including querying and alerting on results. + + :param rule: The rule configuration. + :param starttime: The earliest timestamp to query. + :param endtime: The latest timestamp to query. + :return: The number of matches that the rule produced. + """ + run_start = time.time() + + self.current_es = kibana_adapter_client(rule) + self.current_es_addr = (rule['es_host'], rule['es_port']) + + # If there are pending aggregate matches, try processing them + for x in range(len(rule['agg_matches'])): + match = rule['agg_matches'].pop() + self.add_aggregated_alert(match, rule) + + # Start from provided time if it's given + if starttime: + rule['starttime'] = starttime + else: + self.set_starttime(rule, endtime) + + rule['original_starttime'] = rule['starttime'] + + # Don't run if starttime was set to the future + if ts_now() <= rule['starttime']: + logging.warning("Attempted to use query start time in the future (%s), sleeping instead" % (starttime)) + return 0 + + # Run the rule. If querying over a large time period, split it up into segments + self.num_hits = 0 + self.num_dupes = 0 + self.cumulative_hits = 0 + segment_size = self.get_segment_size(rule) + + tmp_endtime = rule['starttime'] + + while endtime - rule['starttime'] > segment_size: + tmp_endtime = tmp_endtime + segment_size + if not self.run_query(rule, rule['starttime'], tmp_endtime): + return 0 + self.cumulative_hits += self.num_hits + self.num_hits = 0 + rule['starttime'] = tmp_endtime + rule['type'].garbage_collect(tmp_endtime) + + if rule.get('aggregation_query_element'): + if endtime - tmp_endtime == segment_size: + self.run_query(rule, tmp_endtime, endtime) + self.cumulative_hits += self.num_hits + elif total_seconds(rule['original_starttime'] - tmp_endtime) == 0: + rule['starttime'] = rule['original_starttime'] + return 0 + else: + endtime = tmp_endtime + else: + if not self.run_query(rule, rule['starttime'], endtime): + return 0 + self.cumulative_hits += self.num_hits + rule['type'].garbage_collect(endtime) + + # Process any new matches + num_matches = len(rule['type'].matches) + while rule['type'].matches: + match = rule['type'].matches.pop(0) + match['num_hits'] = self.cumulative_hits + match['num_matches'] = num_matches + + # If realert is set, silence the rule for that duration + # Silence is cached by query_key, if it exists + # Default realert time is 0 seconds + silence_cache_key = rule['name'] + query_key_value = self.get_query_key_value(rule, match) + if query_key_value is not None: + silence_cache_key += '.' + query_key_value + + if self.is_silenced(rule['name'] + "._silence") or self.is_silenced(silence_cache_key): + elastalert_logger.info('Ignoring match for silenced rule %s' % (silence_cache_key,)) + continue + + if rule['realert']: + next_alert, exponent = self.next_alert_time(rule, silence_cache_key, ts_now()) + self.set_realert(silence_cache_key, next_alert, exponent) + + if rule.get('run_enhancements_first'): + try: + for enhancement in rule['match_enhancements']: + try: + enhancement.process(match) + except EAException as e: + self.handle_error("Error running match enhancement: %s" % (e), {'rule': rule['name']}) + except DropMatchException: + continue + + # If no aggregation, alert immediately + if not rule['aggregation']: + self.alert([match], rule) + continue + + # Add it as an aggregated match + self.add_aggregated_alert(match, rule) + + # Mark this endtime for next run's start + rule['previous_endtime'] = endtime + + time_taken = time.time() - run_start + # Write to ES that we've run this rule against this time period + body = {'rule_name': rule['name'], + 'endtime': endtime, + 'starttime': rule['original_starttime'], + 'matches': num_matches, + 'hits': max(self.num_hits, self.cumulative_hits), + '@timestamp': ts_now(), + 'time_taken': time_taken} + self.writeback('elastalert_status', body) + + return num_matches + + def init_rule(self, new_rule, new=True): + ''' Copies some necessary non-config state from an exiting rule to a new rule. ''' + try: + self.modify_rule_for_ES5(new_rule) + except TransportError as e: + elastalert_logger.warning('Error connecting to Elasticsearch for rule {}. ' + 'The rule has been disabled.'.format(new_rule['name'])) + self.send_notification_email(exception=e, rule=new_rule) + return False + + self.enhance_filter(new_rule) + + # Change top_count_keys to .raw + if 'top_count_keys' in new_rule and new_rule.get('raw_count_keys', True): + if self.string_multi_field_name: + string_multi_field_name = self.string_multi_field_name + elif self.is_atleastfive(): + string_multi_field_name = '.keyword' + else: + string_multi_field_name = '.raw' + + for i, key in enumerate(new_rule['top_count_keys']): + if not key.endswith(string_multi_field_name): + new_rule['top_count_keys'][i] += string_multi_field_name + + if 'download_dashboard' in new_rule['filter']: + # Download filters from Kibana and set the rules filters to them + db_filters = self.filters_from_kibana(new_rule, new_rule['filter']['download_dashboard']) + if db_filters is not None: + new_rule['filter'] = db_filters + else: + raise EAException("Could not download filters from %s" % (new_rule['filter']['download_dashboard'])) + + blank_rule = {'agg_matches': [], + 'aggregate_alert_time': {}, + 'current_aggregate_id': {}, + 'processed_hits': {}} + rule = blank_rule + + # Set rule to either a blank template or existing rule with same name + if not new: + for rule in self.rules: + if rule['name'] == new_rule['name']: + break + else: + rule = blank_rule + + copy_properties = ['agg_matches', + 'current_aggregate_id', + 'aggregate_alert_time', + 'processed_hits', + 'starttime', + 'minimum_starttime'] + for prop in copy_properties: + if prop not in rule: + continue + new_rule[prop] = rule[prop] + + return new_rule + + @staticmethod + def modify_rule_for_ES5(new_rule): + new_rule['five'] = True + # return + # Get ES version per rule + # rule_es = elasticsearch_client(new_rule) + # if int(rule_es.info()['version']['number'].split(".")[0]) >= 5: + # new_rule['five'] = True + # else: + # new_rule['five'] = False + # return + + # In ES5, filters starting with 'query' should have the top wrapper removed + new_filters = [] + for es_filter in new_rule.get('filter', []): + if es_filter.get('query'): + new_filters.append(es_filter['query']) + else: + new_filters.append(es_filter) + new_rule['filter'] = new_filters + + def load_rule_changes(self): + ''' Using the modification times of rule config files, syncs the running rules + to match the files in rules_folder by removing, adding or reloading rules. ''' + new_rule_hashes = get_rule_hashes(self.conf, self.args.rule) + + # Check each current rule for changes + for rule_file, hash_value in self.rule_hashes.iteritems(): + if rule_file not in new_rule_hashes: + # Rule file was deleted + elastalert_logger.info('Rule file %s not found, stopping rule execution' % (rule_file)) + self.rules = [rule for rule in self.rules if rule['rule_file'] != rule_file] + continue + if hash_value != new_rule_hashes[rule_file]: + # Rule file was changed, reload rule + try: + new_rule = load_configuration(rule_file, self.conf) + if 'is_enabled' in new_rule and not new_rule['is_enabled']: + elastalert_logger.info('Rule file %s is now disabled.' % (rule_file)) + # Remove this rule if it's been disabled + self.rules = [rule for rule in self.rules if rule['rule_file'] != rule_file] + continue + except EAException as e: + message = 'Could not load rule %s: %s' % (rule_file, e) + self.handle_error(message) + # Want to send email to address specified in the rule. Try and load the YAML to find it. + with open(rule_file) as f: + try: + rule_yaml = yaml.load(f) + except yaml.scanner.ScannerError: + self.send_notification_email(exception=e) + continue + + self.send_notification_email(exception=e, rule=rule_yaml) + continue + elastalert_logger.info("Reloading configuration for rule %s" % (rule_file)) + + # Re-enable if rule had been disabled + for disabled_rule in self.disabled_rules: + if disabled_rule['name'] == new_rule['name']: + self.rules.append(disabled_rule) + self.disabled_rules.remove(disabled_rule) + break + + # Initialize the rule that matches rule_file + new_rule = self.init_rule(new_rule, False) + self.rules = [rule for rule in self.rules if rule['rule_file'] != rule_file] + if new_rule: + self.rules.append(new_rule) + + # Load new rules + if not self.args.rule: + for rule_file in set(new_rule_hashes.keys()) - set(self.rule_hashes.keys()): + try: + new_rule = load_configuration(rule_file, self.conf) + if 'is_enabled' in new_rule and not new_rule['is_enabled']: + continue + if new_rule['name'] in [rule['name'] for rule in self.rules]: + raise EAException("A rule with the name %s already exists" % (new_rule['name'])) + except EAException as e: + self.handle_error('Could not load rule %s: %s' % (rule_file, e)) + self.send_notification_email(exception=e, rule_file=rule_file) + continue + if self.init_rule(new_rule): + elastalert_logger.info('Loaded new rule %s' % (rule_file)) + self.rules.append(new_rule) + + self.rule_hashes = new_rule_hashes + + def start(self): + """ Periodically go through each rule and run it """ + if self.starttime: + if self.starttime == 'NOW': + self.starttime = ts_now() + else: + try: + self.starttime = ts_to_dt(self.starttime) + except (TypeError, ValueError): + self.handle_error("%s is not a valid ISO8601 timestamp (YYYY-MM-DDTHH:MM:SS+XX:00)" % (self.starttime)) + exit(1) + self.wait_until_responsive(timeout=self.args.timeout) + self.running = True + elastalert_logger.info("Starting up") + while self.running: + next_run = datetime.datetime.utcnow() + self.run_every + + self.run_all_rules() + + # Quit after end_time has been reached + if self.args.end: + endtime = ts_to_dt(self.args.end) + + if next_run.replace(tzinfo=dateutil.tz.tzutc()) > endtime: + exit(0) + + if next_run < datetime.datetime.utcnow(): + continue + + # Wait before querying again + sleep_duration = total_seconds(next_run - datetime.datetime.utcnow()) + self.sleep_for(sleep_duration) + + def wait_until_responsive(self, timeout, clock=timeit.default_timer): + """Wait until ElasticSearch becomes responsive (or too much time passes).""" + + # Elapsed time is a floating point number of seconds. + timeout = timeout.total_seconds() + + # Don't poll unless we're asked to. + if timeout <= 0.0: + return + + # Periodically poll ElasticSearch. Keep going until ElasticSearch is + # responsive *and* the writeback index exists. + ref = clock() + while (clock() - ref) < timeout: + try: + if self.writeback_es.indices.exists(self.writeback_index): + return + except ConnectionError: + pass + time.sleep(1.0) + + if self.writeback_es.ping(): + logging.error( + 'Writeback index "%s" does not exist, did you run `elastalert-create-index`?', + self.writeback_index, + ) + else: + logging.error( + 'Could not reach ElasticSearch at "%s:%d".', + self.conf['es_host'], + self.conf['es_port'], + ) + exit(1) + + def run_all_rules(self): + """ Run each rule one time """ + self.send_pending_alerts() + + next_run = datetime.datetime.utcnow() + self.run_every + + for rule in self.rules: + # Set endtime based on the rule's delay + delay = rule.get('query_delay') + if hasattr(self.args, 'end') and self.args.end: + endtime = ts_to_dt(self.args.end) + elif delay: + endtime = ts_now() - delay + else: + endtime = ts_now() + + try: + num_matches = self.run_rule(rule, endtime, self.starttime) + except EAException as e: + self.handle_error("Error running rule %s: %s" % (rule['name'], e), {'rule': rule['name']}) + except Exception as e: + self.handle_uncaught_exception(e, rule) + else: + old_starttime = pretty_ts(rule.get('original_starttime'), rule.get('use_local_time')) + total_hits = max(self.num_hits, self.cumulative_hits) + elastalert_logger.info("Ran %s from %s to %s: %s query hits (%s already seen), %s matches," + " %s alerts sent" % (rule['name'], old_starttime, pretty_ts(endtime, rule.get('use_local_time')), + total_hits, self.num_dupes, num_matches, self.alerts_sent)) + self.alerts_sent = 0 + + if next_run < datetime.datetime.utcnow(): + # We were processing for longer than our refresh interval + # This can happen if --start was specified with a large time period + # or if we are running too slow to process events in real time. + logging.warning( + "Querying from %s to %s took longer than %s!" % ( + old_starttime, + pretty_ts(endtime, rule.get('use_local_time')), + self.run_every + ) + ) + + self.remove_old_events(rule) + + # Only force starttime once + self.starttime = None + + if not self.args.pin_rules: + self.load_rule_changes() + + def stop(self): + """ Stop an ElastAlert runner that's been started """ + self.running = False + + def sleep_for(self, duration): + """ Sleep for a set duration """ + elastalert_logger.info("Sleeping for %s seconds" % (duration)) + time.sleep(duration) + + def generate_kibana4_db(self, rule, match): + ''' Creates a link for a kibana4 dashboard which has time set to the match. ''' + db_name = rule.get('use_kibana4_dashboard') + start = ts_add( + lookup_es_key(match, rule['timestamp_field']), + -rule.get('kibana4_start_timedelta', rule.get('timeframe', datetime.timedelta(minutes=10))) + ) + end = ts_add( + lookup_es_key(match, rule['timestamp_field']), + rule.get('kibana4_end_timedelta', rule.get('timeframe', datetime.timedelta(minutes=10))) + ) + return kibana.kibana4_dashboard_link(db_name, start, end) + + def generate_kibana_db(self, rule, match): + ''' Uses a template dashboard to upload a temp dashboard showing the match. + Returns the url to the dashboard. ''' + db = copy.deepcopy(kibana.dashboard_temp) + + # Set timestamp fields to match our rule especially if + # we have configured something other than @timestamp + kibana.set_timestamp_field(db, rule['timestamp_field']) + + # Set filters + for filter in rule['filter']: + if filter: + kibana.add_filter(db, filter) + kibana.set_included_fields(db, rule['include']) + + # Set index + index = self.get_index(rule) + kibana.set_index_name(db, index) + + return self.upload_dashboard(db, rule, match) + + def upload_dashboard(self, db, rule, match): + ''' Uploads a dashboard schema to the kibana-int Elasticsearch index associated with rule. + Returns the url to the dashboard. ''' + # Set time range + start = ts_add(lookup_es_key(match, rule['timestamp_field']), -rule.get('timeframe', datetime.timedelta(minutes=10))) + end = ts_add(lookup_es_key(match, rule['timestamp_field']), datetime.timedelta(minutes=10)) + kibana.set_time(db, start, end) + + # Set dashboard name + db_name = 'ElastAlert - %s - %s' % (rule['name'], end) + kibana.set_name(db, db_name) + + # Add filter for query_key value + if 'query_key' in rule: + for qk in rule.get('compound_query_key', [rule['query_key']]): + if qk in match: + term = {'term': {qk: match[qk]}} + kibana.add_filter(db, term) + + # Add filter for aggregation_key value + if 'aggregation_key' in rule: + for qk in rule.get('compound_aggregation_key', [rule['aggregation_key']]): + if qk in match: + term = {'term': {qk: match[qk]}} + kibana.add_filter(db, term) + + # Convert to json + db_js = json.dumps(db) + db_body = {'user': 'guest', + 'group': 'guest', + 'title': db_name, + 'dashboard': db_js} + + # Upload + es = elasticsearch_client(rule) + + res = es.index(index='kibana-int', + doc_type='temp', + body=db_body) + + # Return dashboard URL + kibana_url = rule.get('kibana_url') + if not kibana_url: + kibana_url = 'http://%s:%s/_plugin/kibana/' % (rule['es_host'], + rule['es_port']) + return kibana_url + '#/dashboard/temp/%s' % (res['_id']) + + def get_dashboard(self, rule, db_name): + """ Download dashboard which matches use_kibana_dashboard from Elasticsearch. """ + es = elasticsearch_client(rule) + if not db_name: + raise EAException("use_kibana_dashboard undefined") + query = {'query': {'term': {'_id': db_name}}} + try: + res = es.search(index='kibana-int', doc_type='dashboard', body=query, _source_include=['dashboard']) + except ElasticsearchException as e: + raise EAException("Error querying for dashboard: %s" % (e)), None, sys.exc_info()[2] + + if res['hits']['hits']: + return json.loads(res['hits']['hits'][0]['_source']['dashboard']) + else: + raise EAException("Could not find dashboard named %s" % (db_name)) + + def use_kibana_link(self, rule, match): + """ Uploads an existing dashboard as a temp dashboard modified for match time. + Returns the url to the dashboard. """ + # Download or get cached dashboard + dashboard = rule.get('dashboard_schema') + if not dashboard: + db_name = rule.get('use_kibana_dashboard') + dashboard = self.get_dashboard(rule, db_name) + if dashboard: + rule['dashboard_schema'] = dashboard + else: + return None + dashboard = copy.deepcopy(dashboard) + return self.upload_dashboard(dashboard, rule, match) + + def filters_from_kibana(self, rule, db_name): + """ Downloads a dashboard from Kibana and returns corresponding filters, None on error. """ + try: + db = rule.get('dashboard_schema') + if not db: + db = self.get_dashboard(rule, db_name) + filters = kibana.filters_from_dashboard(db) + except EAException: + return None + return filters + + def alert(self, matches, rule, alert_time=None, retried=False): + """ Wraps alerting, Kibana linking and enhancements in an exception handler """ + try: + return self.send_alert(matches, rule, alert_time=alert_time, retried=retried) + except Exception as e: + self.handle_uncaught_exception(e, rule) + + def send_alert(self, matches, rule, alert_time=None, retried=False): + """ Send out an alert. + + :param matches: A list of matches. + :param rule: A rule configuration. + """ + if not matches: + return + + if alert_time is None: + alert_time = ts_now() + + # Compute top count keys + if rule.get('top_count_keys'): + for match in matches: + if 'query_key' in rule and rule['query_key'] in match: + qk = match[rule['query_key']] + else: + qk = None + + if isinstance(rule['type'], FlatlineRule): + # flatline rule triggers when there have been no events from now()-timeframe to now(), + # so using now()-timeframe will return no results. for now we can just mutliple the timeframe + # by 2, but this could probably be timeframe+run_every to prevent too large of a lookup? + timeframe = datetime.timedelta(seconds=2 * rule.get('timeframe').total_seconds()) + else: + timeframe = rule.get('timeframe', datetime.timedelta(minutes=10)) + + start = ts_to_dt(lookup_es_key(match, rule['timestamp_field'])) - timeframe + end = ts_to_dt(lookup_es_key(match, rule['timestamp_field'])) + datetime.timedelta(minutes=10) + keys = rule.get('top_count_keys') + counts = self.get_top_counts(rule, start, end, keys, qk=qk) + match.update(counts) + + # Generate a kibana3 dashboard for the first match + if rule.get('generate_kibana_link') or rule.get('use_kibana_dashboard'): + try: + if rule.get('generate_kibana_link'): + kb_link = self.generate_kibana_db(rule, matches[0]) + else: + kb_link = self.use_kibana_link(rule, matches[0]) + except EAException as e: + self.handle_error("Could not generate Kibana dash for %s match: %s" % (rule['name'], e)) + else: + if kb_link: + matches[0]['kibana_link'] = kb_link + + if rule.get('use_kibana4_dashboard'): + kb_link = self.generate_kibana4_db(rule, matches[0]) + if kb_link: + matches[0]['kibana_link'] = kb_link + + # Enhancements were already run at match time if + # run_enhancements_first is set or + # retried==True, which means this is a retry of a failed alert + if not rule.get('run_enhancements_first') and not retried: + for enhancement in rule['match_enhancements']: + valid_matches = [] + for match in matches: + try: + enhancement.process(match) + valid_matches.append(match) + except DropMatchException as e: + pass + except EAException as e: + self.handle_error("Error running match enhancement: %s" % (e), {'rule': rule['name']}) + matches = valid_matches + if not matches: + return None + + # Don't send real alerts in debug mode + if self.debug: + alerter = DebugAlerter(rule) + alerter.alert(matches) + return None + + # Run the alerts + alert_sent = False + alert_exception = None + # Alert.pipeline is a single object shared between every alerter + # This allows alerters to pass objects and data between themselves + alert_pipeline = {"alert_time": alert_time} + for alert in rule['alert']: + alert.pipeline = alert_pipeline + try: + alert.alert(matches) + except EAException as e: + self.handle_error('Error while running alert %s: %s' % (alert.get_info()['type'], e), {'rule': rule['name']}) + alert_exception = str(e) + else: + self.alerts_sent += 1 + alert_sent = True + + # Write the alert(s) to ES + agg_id = None + for match in matches: + alert_body = self.get_alert_body(match, rule, alert_sent, alert_time, alert_exception) + # Set all matches to aggregate together + if agg_id: + alert_body['aggregate_id'] = agg_id + res = self.writeback('elastalert', alert_body) + if res and not agg_id: + agg_id = res['_id'] + + def get_alert_body(self, match, rule, alert_sent, alert_time, alert_exception=None): + body = { + 'match_body': match, + 'rule_name': rule['name'], + 'alert_info': rule['alert'][0].get_info() if not self.debug else {}, + 'alert_sent': alert_sent, + 'alert_time': alert_time + } + + match_time = lookup_es_key(match, rule['timestamp_field']) + if match_time is not None: + body['match_time'] = match_time + + # TODO record info about multiple alerts + + # If the alert failed to send, record the exception + if not alert_sent: + body['alert_exception'] = alert_exception + return body + + def writeback(self, doc_type, body): + writeback_index = self.writeback_index + if(self.is_atleastsix()): + writeback_index = self.get_six_index(doc_type) + + # ES 2.0 - 2.3 does not support dots in field names. + if self.replace_dots_in_field_names: + writeback_body = replace_dots_in_field_names(body) + else: + writeback_body = body + + for key in writeback_body.keys(): + # Convert any datetime objects to timestamps + if isinstance(writeback_body[key], datetime.datetime): + writeback_body[key] = dt_to_ts(writeback_body[key]) + + if self.debug: + elastalert_logger.info("Skipping writing to ES: %s" % (writeback_body)) + return None + + if '@timestamp' not in writeback_body: + writeback_body['@timestamp'] = dt_to_ts(ts_now()) + + try: + res = self.writeback_es.index(index=writeback_index, + doc_type=doc_type, body=body) + return res + except ElasticsearchException as e: + logging.exception("Error writing alert info to Elasticsearch: %s" % (e)) + + def find_recent_pending_alerts(self, time_limit): + """ Queries writeback_es to find alerts that did not send + and are newer than time_limit """ + + # XXX only fetches 1000 results. If limit is reached, next loop will catch them + # unless there is constantly more than 1000 alerts to send. + + # Fetch recent, unsent alerts that aren't part of an aggregate, earlier alerts first. + inner_query = {'query_string': {'query': '!_exists_:aggregate_id AND alert_sent:false'}} + time_filter = {'range': {'alert_time': {'from': dt_to_ts(ts_now() - time_limit), + 'to': dt_to_ts(ts_now())}}} + sort = {'sort': {'alert_time': {'order': 'asc'}}} + if self.is_atleastfive(): + query = {'query': {'bool': {'must': inner_query, 'filter': time_filter}}} + else: + query = {'query': inner_query, 'filter': time_filter} + query.update(sort) + try: + res = self.writeback_es.search(index=self.writeback_index, + doc_type='elastalert', + body=query, + size=1000) + if res['hits']['hits']: + return res['hits']['hits'] + except ElasticsearchException as e: + logging.exception("Error finding recent pending alerts: %s %s" % (e, query)) + return [] + + def send_pending_alerts(self): + pending_alerts = self.find_recent_pending_alerts(self.alert_time_limit) + for alert in pending_alerts: + _id = alert['_id'] + alert = alert['_source'] + try: + rule_name = alert.pop('rule_name') + alert_time = alert.pop('alert_time') + match_body = alert.pop('match_body') + except KeyError: + # Malformed alert, drop it + continue + + # Find original rule + for rule in self.rules: + if rule['name'] == rule_name: + break + else: + # Original rule is missing, keep alert for later if rule reappears + continue + + # Set current_es for top_count_keys query + self.current_es = elasticsearch_client(rule) + self.current_es_addr = (rule['es_host'], rule['es_port']) + + # Send the alert unless it's a future alert + if ts_now() > ts_to_dt(alert_time): + aggregated_matches = self.get_aggregated_matches(_id) + if aggregated_matches: + matches = [match_body] + [agg_match['match_body'] for agg_match in aggregated_matches] + self.alert(matches, rule, alert_time=alert_time) + else: + # If this rule isn't using aggregation, this must be a retry of a failed alert + retried = False + if not rule.get('aggregation'): + retried = True + self.alert([match_body], rule, alert_time=alert_time, retried=retried) + + if rule['current_aggregate_id']: + for qk, agg_id in rule['current_aggregate_id'].iteritems(): + if agg_id == _id: + rule['current_aggregate_id'].pop(qk) + break + + # Delete it from the index + try: + self.writeback_es.delete(index=self.writeback_index, + doc_type='elastalert', + id=_id) + except ElasticsearchException: # TODO: Give this a more relevant exception, try:except: is evil. + self.handle_error("Failed to delete alert %s at %s" % (_id, alert_time)) + + # Send in memory aggregated alerts + for rule in self.rules: + if rule['agg_matches']: + for aggregation_key_value, aggregate_alert_time in rule['aggregate_alert_time'].iteritems(): + if ts_now() > aggregate_alert_time: + alertable_matches = [ + agg_match + for agg_match + in rule['agg_matches'] + if self.get_aggregation_key_value(rule, agg_match) == aggregation_key_value + ] + self.alert(alertable_matches, rule) + rule['agg_matches'] = [ + agg_match + for agg_match + in rule['agg_matches'] + if self.get_aggregation_key_value(rule, agg_match) != aggregation_key_value + ] + + def get_aggregated_matches(self, _id): + """ Removes and returns all matches from writeback_es that have aggregate_id == _id """ + + # XXX if there are more than self.max_aggregation matches, you have big alerts and we will leave entries in ES. + query = {'query': {'query_string': {'query': 'aggregate_id:%s' % (_id)}}, 'sort': {'@timestamp': 'asc'}} + matches = [] + try: + res = self.writeback_es.search(index=self.writeback_index, + doc_type='elastalert', + body=query, + size=self.max_aggregation) + for match in res['hits']['hits']: + matches.append(match['_source']) + self.writeback_es.delete(index=self.writeback_index, + doc_type='elastalert', + id=match['_id']) + except (KeyError, ElasticsearchException) as e: + self.handle_error("Error fetching aggregated matches: %s" % (e), {'id': _id}) + return matches + + def find_pending_aggregate_alert(self, rule, aggregation_key_value=None): + query = {'filter': {'bool': {'must': [{'term': {'rule_name': rule['name']}}, + {'range': {'alert_time': {'gt': ts_now()}}}, + {'term': {'alert_sent': 'false'}}], + 'must_not': [{'exists': {'field': 'aggregate_id'}}]}}} + if aggregation_key_value: + query['filter']['bool']['must'].append({'term': {'aggregation_key': aggregation_key_value}}) + if self.is_atleastfive(): + query = {'query': {'bool': query}} + query['sort'] = {'alert_time': {'order': 'desc'}} + try: + res = self.writeback_es.search(index=self.writeback_index, + doc_type='elastalert', + body=query, + size=1) + if len(res['hits']['hits']) == 0: + return None + except (KeyError, ElasticsearchException) as e: + self.handle_error("Error searching for pending aggregated matches: %s" % (e), {'rule_name': rule['name']}) + return None + + return res['hits']['hits'][0] + + def add_aggregated_alert(self, match, rule): + """ Save a match as a pending aggregate alert to Elasticsearch. """ + + # Optionally include the 'aggregation_key' as a dimension for aggregations + aggregation_key_value = self.get_aggregation_key_value(rule, match) + + if (not rule['current_aggregate_id'].get(aggregation_key_value) or + ('aggregate_alert_time' in rule and aggregation_key_value in rule['aggregate_alert_time'] and rule[ + 'aggregate_alert_time'].get(aggregation_key_value) < ts_to_dt(lookup_es_key(match, rule['timestamp_field'])))): + + # ElastAlert may have restarted while pending alerts exist + pending_alert = self.find_pending_aggregate_alert(rule, aggregation_key_value) + if pending_alert: + alert_time = ts_to_dt(pending_alert['_source']['alert_time']) + rule['aggregate_alert_time'][aggregation_key_value] = alert_time + agg_id = pending_alert['_id'] + rule['current_aggregate_id'] = {aggregation_key_value: agg_id} + elastalert_logger.info( + 'Adding alert for %s to aggregation(id: %s, aggregation_key: %s), next alert at %s' % ( + rule['name'], + agg_id, + aggregation_key_value, + alert_time + ) + ) + else: + # First match, set alert_time + alert_time = '' + if isinstance(rule['aggregation'], dict) and rule['aggregation'].get('schedule'): + croniter._datetime_to_timestamp = cronite_datetime_to_timestamp # For Python 2.6 compatibility + try: + iter = croniter(rule['aggregation']['schedule'], ts_now()) + alert_time = unix_to_dt(iter.get_next()) + except Exception as e: + self.handle_error("Error parsing aggregate send time Cron format %s" % (e), rule['aggregation']['schedule']) + else: + if rule.get('aggregate_by_match_time', False): + match_time = ts_to_dt(lookup_es_key(match, rule['timestamp_field'])) + alert_time = match_time + rule['aggregation'] + else: + alert_time = ts_now() + rule['aggregation'] + + rule['aggregate_alert_time'][aggregation_key_value] = alert_time + agg_id = None + elastalert_logger.info( + 'New aggregation for %s, aggregation_key: %s. next alert at %s.' % (rule['name'], aggregation_key_value, alert_time) + ) + else: + # Already pending aggregation, use existing alert_time + alert_time = rule['aggregate_alert_time'].get(aggregation_key_value) + agg_id = rule['current_aggregate_id'].get(aggregation_key_value) + elastalert_logger.info( + 'Adding alert for %s to aggregation(id: %s, aggregation_key: %s), next alert at %s' % ( + rule['name'], + agg_id, + aggregation_key_value, + alert_time + ) + ) + + alert_body = self.get_alert_body(match, rule, False, alert_time) + if agg_id: + alert_body['aggregate_id'] = agg_id + if aggregation_key_value: + alert_body['aggregation_key'] = aggregation_key_value + res = self.writeback('elastalert', alert_body) + + # If new aggregation, save _id + if res and not agg_id: + rule['current_aggregate_id'][aggregation_key_value] = res['_id'] + + # Couldn't write the match to ES, save it in memory for now + if not res: + rule['agg_matches'].append(match) + + return res + + def silence(self, silence_cache_key=None): + """ Silence an alert for a period of time. --silence and --rule must be passed as args. """ + if self.debug: + logging.error('--silence not compatible with --debug') + exit(1) + + if not self.args.rule: + logging.error('--silence must be used with --rule') + exit(1) + + # With --rule, self.rules will only contain that specific rule + if not silence_cache_key: + silence_cache_key = self.rules[0]['name'] + "._silence" + + try: + silence_ts = parse_deadline(self.args.silence) + except (ValueError, TypeError): + logging.error('%s is not a valid time period' % (self.args.silence)) + exit(1) + + if not self.set_realert(silence_cache_key, silence_ts, 0): + logging.error('Failed to save silence command to Elasticsearch') + exit(1) + + elastalert_logger.info('Success. %s will be silenced until %s' % (silence_cache_key, silence_ts)) + + def set_realert(self, silence_cache_key, timestamp, exponent): + """ Write a silence to Elasticsearch for silence_cache_key until timestamp. """ + body = {'exponent': exponent, + 'rule_name': silence_cache_key, + '@timestamp': ts_now(), + 'until': timestamp} + + self.silence_cache[silence_cache_key] = (timestamp, exponent) + return self.writeback('silence', body) + + def is_silenced(self, rule_name): + """ Checks if rule_name is currently silenced. Returns false on exception. """ + if rule_name in self.silence_cache: + if ts_now() < self.silence_cache[rule_name][0]: + return True + + if self.debug: + return False + query = {'term': {'rule_name': rule_name}} + sort = {'sort': {'until': {'order': 'desc'}}} + if self.is_atleastfive(): + query = {'query': query} + else: + query = {'filter': query} + query.update(sort) + + try: + index = self.get_six_index('silence') + res = self.writeback_es.search(index=index, doc_type='silence', + size=1, body=query, _source_include=['until', 'exponent']) + except ElasticsearchException as e: + self.handle_error("Error while querying for alert silence status: %s" % (e), {'rule': rule_name}) + + return False + if res['hits']['hits']: + until_ts = res['hits']['hits'][0]['_source']['until'] + exponent = res['hits']['hits'][0]['_source'].get('exponent', 0) + if rule_name not in self.silence_cache.keys(): + self.silence_cache[rule_name] = (ts_to_dt(until_ts), exponent) + else: + self.silence_cache[rule_name] = (ts_to_dt(until_ts), self.silence_cache[rule_name][1]) + if ts_now() < ts_to_dt(until_ts): + return True + return False + + def handle_error(self, message, data=None): + ''' Logs message at error level and writes message, data and traceback to Elasticsearch. ''' + logging.error(message) + body = {'message': message} + tb = traceback.format_exc() + body['traceback'] = tb.strip().split('\n') + if data: + body['data'] = data + self.writeback('elastalert_error', body) + + def handle_uncaught_exception(self, exception, rule): + """ Disables a rule and sends a notification. """ + logging.error(traceback.format_exc()) + self.handle_error('Uncaught exception running rule %s: %s' % (rule['name'], exception), {'rule': rule['name']}) + if self.disable_rules_on_error: + self.rules = [running_rule for running_rule in self.rules if running_rule['name'] != rule['name']] + self.disabled_rules.append(rule) + elastalert_logger.info('Rule %s disabled', rule['name']) + if self.notify_email: + self.send_notification_email(exception=exception, rule=rule) + + def send_notification_email(self, text='', exception=None, rule=None, subject=None, rule_file=None): + email_body = text + rule_name = None + if rule: + rule_name = rule['name'] + elif rule_file: + rule_name = rule_file + if exception and rule_name: + if not subject: + subject = 'Uncaught exception in ElastAlert - %s' % (rule_name) + email_body += '\n\n' + email_body += 'The rule %s has raised an uncaught exception.\n\n' % (rule_name) + if self.disable_rules_on_error: + modified = ' or if the rule config file has been modified' if not self.args.pin_rules else '' + email_body += 'It has been disabled and will be re-enabled when ElastAlert restarts%s.\n\n' % (modified) + tb = traceback.format_exc() + email_body += tb + + if isinstance(self.notify_email, basestring): + self.notify_email = [self.notify_email] + email = MIMEText(email_body) + email['Subject'] = subject if subject else 'ElastAlert notification' + recipients = self.notify_email + if rule and rule.get('notify_email'): + if isinstance(rule['notify_email'], basestring): + rule['notify_email'] = [rule['notify_email']] + recipients = recipients + rule['notify_email'] + recipients = list(set(recipients)) + email['To'] = ', '.join(recipients) + email['From'] = self.from_addr + email['Reply-To'] = self.conf.get('email_reply_to', email['To']) + + try: + smtp = SMTP(self.smtp_host) + smtp.sendmail(self.from_addr, recipients, email.as_string()) + except (SMTPException, error) as e: + self.handle_error('Error connecting to SMTP host: %s' % (e), {'email_body': email_body}) + + def get_top_counts(self, rule, starttime, endtime, keys, number=None, qk=None): + """ Counts the number of events for each unique value for each key field. + Returns a dictionary with top_events_ mapped to the top 5 counts for each key. """ + all_counts = {} + if not number: + number = rule.get('top_count_number', 5) + for key in keys: + index = self.get_index(rule, starttime, endtime) + + hits_terms = self.get_hits_terms(rule, starttime, endtime, index, key, qk, number) + if hits_terms is None: + top_events_count = {} + else: + buckets = hits_terms.values()[0] + + # get_hits_terms adds to num_hits, but we don't want to count these + self.num_hits -= len(buckets) + terms = {} + for bucket in buckets: + terms[bucket['key']] = bucket['doc_count'] + counts = terms.items() + counts.sort(key=lambda x: x[1], reverse=True) + top_events_count = dict(counts[:number]) + + # Save a dict with the top 5 events by key + all_counts['top_events_%s' % (key)] = top_events_count + + return all_counts + + def next_alert_time(self, rule, name, timestamp): + """ Calculate an 'until' time and exponent based on how much past the last 'until' we are. """ + if name in self.silence_cache: + last_until, exponent = self.silence_cache[name] + else: + # If this isn't cached, this is the first alert or writeback_es is down, normal realert + return timestamp + rule['realert'], 0 + + if not rule.get('exponential_realert'): + return timestamp + rule['realert'], 0 + diff = seconds(timestamp - last_until) + # Increase exponent if we've alerted recently + if diff < seconds(rule['realert']) * 2 ** exponent: + exponent += 1 + else: + # Continue decreasing exponent the longer it's been since the last alert + while diff > seconds(rule['realert']) * 2 ** exponent and exponent > 0: + diff -= seconds(rule['realert']) * 2 ** exponent + exponent -= 1 + + wait = datetime.timedelta(seconds=seconds(rule['realert']) * 2 ** exponent) + if wait >= rule['exponential_realert']: + return timestamp + rule['exponential_realert'], exponent - 1 + return timestamp + wait, exponent + + +def handle_signal(signal, frame): + elastalert_logger.info('SIGINT received, stopping ElastAlert...') + # use os._exit to exit immediately and avoid someone catching SystemExit + os._exit(0) + + +def main(args=None): + signal.signal(signal.SIGINT, handle_signal) + if not args: + args = sys.argv[1:] + client = ElastAlerter(args) + if not client.debug: + p = PrometheusWrapper(client) + p.start() + if not client.args.silence: + client.start() + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) \ No newline at end of file diff --git a/comparisonFile2 b/comparisonFile2 new file mode 100644 index 000000000..1f19780fc --- /dev/null +++ b/comparisonFile2 @@ -0,0 +1,517 @@ +import collections +import datetime +import logging +import os +import types + +import dateutil.parser +import dateutil.tz +from auth import Auth +from elasticsearch import RequestsHttpConnection +from elasticsearch.client import Elasticsearch +from six import string_types + +logging.basicConfig() +elastalert_logger = logging.getLogger('elastalert') + + +def new_get_event_ts(ts_field): + """ Constructs a lambda that may be called to extract the timestamp field + from a given event. + + :returns: A callable function that takes an event and outputs that event's + timestamp field. + """ + return lambda event: lookup_es_key(event[0], ts_field) + + +def _find_es_dict_by_key(lookup_dict, term): + """ Performs iterative dictionary search based upon the following conditions: + + 1. Subkeys may either appear behind a full stop (.) or at one lookup_dict level lower in the tree. + 2. No wildcards exist within the provided ES search terms (these are treated as string literals) + + This is necessary to get around inconsistencies in ES data. + + For example: + {'ad.account_name': 'bob'} + Or: + {'csp_report': {'blocked_uri': 'bob.com'}} + And even: + {'juniper_duo.geoip': {'country_name': 'Democratic People's Republic of Korea'}} + + We want a search term of form "key.subkey.subsubkey" to match in all cases. + :returns: A tuple with the first element being the dict that contains the key and the second + element which is the last subkey used to access the target specified by the term. None is + returned for both if the key can not be found. + """ + if term in lookup_dict: + return lookup_dict, term + # If the term does not match immediately, perform iterative lookup: + # 1. Split the search term into tokens + # 2. Recurrently concatenate these together to traverse deeper into the dictionary, + # clearing the subkey at every successful lookup. + # + # This greedy approach is correct because subkeys must always appear in order, + # preferring full stops and traversal interchangeably. + # + # Subkeys will NEVER be duplicated between an alias and a traversal. + # + # For example: + # {'foo.bar': {'bar': 'ray'}} to look up foo.bar will return {'bar': 'ray'}, not 'ray' + dict_cursor = lookup_dict + subkeys = term.split('.') + subkey = '' + + while len(subkeys) > 0: + if not dict_cursor: + return {}, None + + subkey += subkeys.pop(0) + + if subkey in dict_cursor: + if len(subkeys) == 0: + break + + dict_cursor = dict_cursor[subkey] + subkey = '' + elif len(subkeys) == 0: + # If there are no keys left to match, return None values + dict_cursor = None + subkey = None + else: + subkey += '.' + + return dict_cursor, subkey + + +def set_es_key(lookup_dict, term, value): + """ Looks up the location that the term maps to and sets it to the given value. + :returns: True if the value was set successfully, False otherwise. + """ + value_dict, value_key = _find_es_dict_by_key(lookup_dict, term) + + if value_dict is not None: + value_dict[value_key] = value + return True + + return False + + +def lookup_es_key(lookup_dict, term): + """ Performs iterative dictionary search for the given term. + :returns: The value identified by term or None if it cannot be found. + """ + value_dict, value_key = _find_es_dict_by_key(lookup_dict, term) + return None if value_key is None else value_dict[value_key] + + +def ts_to_dt(timestamp): + if isinstance(timestamp, datetime.datetime): + return timestamp + dt = dateutil.parser.parse(timestamp) + # Implicitly convert local timestamps to UTC + if dt.tzinfo is None: + dt = dt.replace(tzinfo=dateutil.tz.tzutc()) + return dt + + +def dt_to_ts(dt): + if not isinstance(dt, datetime.datetime): + logging.warning('Expected datetime, got %s' % (type(dt))) + return dt + ts = dt.isoformat() + # Round microseconds to milliseconds + if dt.tzinfo is None: + # Implicitly convert local times to UTC + return ts + 'Z' + # isoformat() uses microsecond accuracy and timezone offsets + # but we should try to use millisecond accuracy and Z to indicate UTC + return ts.replace('000+00:00', 'Z').replace('+00:00', 'Z') + + +def ts_to_dt_with_format(timestamp, ts_format): + if isinstance(timestamp, datetime.datetime): + return timestamp + dt = datetime.datetime.strptime(timestamp, ts_format) + # Implicitly convert local timestamps to UTC + if dt.tzinfo is None: + dt = dt.replace(tzinfo=dateutil.tz.tzutc()) + return dt + + +def dt_to_ts_with_format(dt, ts_format): + if not isinstance(dt, datetime.datetime): + logging.warning('Expected datetime, got %s' % (type(dt))) + return dt + ts = dt.strftime(ts_format) + return ts + + +def ts_now(): + return datetime.datetime.utcnow().replace(tzinfo=dateutil.tz.tzutc()) + + +def inc_ts(timestamp, milliseconds=1): + """Increment a timestamp by milliseconds.""" + dt = ts_to_dt(timestamp) + dt += datetime.timedelta(milliseconds=milliseconds) + return dt_to_ts(dt) + + +def pretty_ts(timestamp, tz=True): + """Pretty-format the given timestamp (to be printed or logged hereafter). + If tz, the timestamp will be converted to local time. + Format: YYYY-MM-DD HH:MM TZ""" + dt = timestamp + if not isinstance(timestamp, datetime.datetime): + dt = ts_to_dt(timestamp) + if tz: + dt = dt.astimezone(dateutil.tz.tzlocal()) + return dt.strftime('%Y-%m-%d %H:%M %Z') + + +def ts_add(ts, td): + """ Allows a timedelta (td) add operation on a string timestamp (ts) """ + return dt_to_ts(ts_to_dt(ts) + td) + + +def hashable(obj): + """ Convert obj to a hashable obj. + We use the value of some fields from Elasticsearch as keys for dictionaries. This means + that whatever Elasticsearch returns must be hashable, and it sometimes returns a list or dict.""" + if not obj.__hash__: + return str(obj) + return obj + + +def format_index(index, start, end, add_extra=False): + """ Takes an index, specified using strftime format, start and end time timestamps, + and outputs a wildcard based index string to match all possible timestamps. """ + # Convert to UTC + start -= start.utcoffset() + end -= end.utcoffset() + original_start = start + indices = set() + while start.date() <= end.date(): + indices.add(start.strftime(index)) + start += datetime.timedelta(days=1) + num = len(indices) + if add_extra: + while len(indices) == num: + original_start -= datetime.timedelta(days=1) + new_index = original_start.strftime(index) + assert new_index != index, "You cannot use a static index with search_extra_index" + indices.add(new_index) + + return ','.join(indices) + + +class EAException(Exception): + pass + + +def seconds(td): + return td.seconds + td.days * 24 * 3600 + + +def total_seconds(dt): + # For python 2.6 compatability + if dt is None: + return 0 + elif hasattr(dt, 'total_seconds'): + return dt.total_seconds() + else: + return (dt.microseconds + (dt.seconds + dt.days * 24 * 3600) * 10**6) / 10**6 + + +def dt_to_int(dt): + dt = dt.replace(tzinfo=None) + return int(total_seconds((dt - datetime.datetime.utcfromtimestamp(0))) * 1000) + + +def unixms_to_dt(ts): + return unix_to_dt(float(ts) / 1000) + +def unix_to_dt(ts): + if(type(ts) == types.UnicodeType): + dt = datetime.datetime.strptime(ts, '%Y-%m-%d %H:%M:%S.%f') + else: + dt = datetime.datetime.utcfromtimestamp(float(ts)) + dt = dt.replace(tzinfo=dateutil.tz.tzutc()) + return dt + + +def dt_to_unix(dt): + return int(total_seconds(dt - datetime.datetime(1970, 1, 1, tzinfo=dateutil.tz.tzutc()))) + + +def dt_to_unixms(dt): + return int(dt_to_unix(dt) * 1000) + + +def cronite_datetime_to_timestamp(self, d): + """ + Converts a `datetime` object `d` into a UNIX timestamp. + """ + if d.tzinfo is not None: + d = d.replace(tzinfo=None) - d.utcoffset() + + return total_seconds((d - datetime.datetime(1970, 1, 1))) + + +def add_raw_postfix(field, is_five_or_above): + if is_five_or_above: + end = '.keyword' + else: + end = '.raw' + if not field.endswith(end): + field += end + return field + + +def replace_dots_in_field_names(document): + """ This method destructively modifies document by replacing any dots in + field names with an underscore. """ + for key, value in list(document.items()): + if isinstance(value, dict): + value = replace_dots_in_field_names(value) + if isinstance(key, string_types) and key.find('.') != -1: + del document[key] + document[key.replace('.', '_')] = value + return document + + +def elasticsearch_client(conf): + """ returns an Elasticsearch instance configured using an es_conn_config """ + es_conn_conf = build_es_conn_config(conf) + auth = Auth() + es_conn_conf['http_auth'] = auth(host=es_conn_conf['es_host'], + username=es_conn_conf['es_username'], + password=es_conn_conf['es_password'], + aws_region=es_conn_conf['aws_region'], + profile_name=es_conn_conf['profile']) + + return Elasticsearch(host=es_conn_conf['es_host'], + port=es_conn_conf['es_port'], + url_prefix=es_conn_conf['es_url_prefix'], + use_ssl=es_conn_conf['use_ssl'], + verify_certs=es_conn_conf['verify_certs'], + ca_certs=es_conn_conf['ca_certs'], + connection_class=RequestsHttpConnection, + http_auth=es_conn_conf['http_auth'], + timeout=es_conn_conf['es_conn_timeout'], + send_get_body_as=es_conn_conf['send_get_body_as'], + client_cert=es_conn_conf['client_cert'], + client_key=es_conn_conf['client_key']) + +def kibana_adapter_client(conf): + """ returns an Elasticsearch instance configured using an es_conn_config """ + es_conn_conf = build_adapter_conn_config(conf) + auth = Auth() + es_conn_conf['http_auth'] = auth(host=es_conn_conf['es_host'], + username=es_conn_conf['es_username'], + password=es_conn_conf['es_password'], + aws_region=es_conn_conf['aws_region'], + profile_name=es_conn_conf['profile']) + + return Elasticsearch(host=es_conn_conf['es_host'], + port=es_conn_conf['es_port'], + url_prefix=es_conn_conf['es_url_prefix'], + use_ssl=es_conn_conf['use_ssl'], + verify_certs=es_conn_conf['verify_certs'], + ca_certs=es_conn_conf['ca_certs'], + connection_class=RequestsHttpConnection, + http_auth=es_conn_conf['http_auth'], + timeout=es_conn_conf['es_conn_timeout'], + send_get_body_as=es_conn_conf['send_get_body_as'], + client_cert=es_conn_conf['client_cert'], + client_key=es_conn_conf['client_key']) + +def build_adapter_conn_config(conf): + """ Given a conf dictionary w/ raw config properties 'use_ssl', 'es_host', 'es_port' + 'es_username' and 'es_password', this will return a new dictionary + with properly initialized values for 'es_host', 'es_port', 'use_ssl' and 'http_auth' which + will be a basicauth username:password formatted string """ + parsed_conf = {} + parsed_conf['use_ssl'] = os.environ.get('ES_USE_SSL', False) + parsed_conf['verify_certs'] = True + parsed_conf['ca_certs'] = None + parsed_conf['client_cert'] = None + parsed_conf['client_key'] = None + parsed_conf['http_auth'] = None + parsed_conf['es_username'] = None + parsed_conf['es_password'] = None + parsed_conf['es_api_key'] = None + parsed_conf['es_bearer'] = None + parsed_conf['aws_region'] = None + parsed_conf['profile'] = None + parsed_conf['headers'] = None + parsed_conf['es_host'] = os.environ.get('ES_HOST', conf['es_host']) + parsed_conf['es_port'] = int(os.environ.get('ES_PORT', conf['es_port'])) + + es_hosts = os.environ.get('ES_HOSTS') + es_hosts = parse_hosts(es_hosts, parsed_conf.get('es_port')) if es_hosts else conf.get('es_hosts') + parsed_conf['es_hosts'] = es_hosts + + parsed_conf['es_url_prefix'] = '' + parsed_conf['es_conn_timeout'] = conf.get('es_conn_timeout', 20) + parsed_conf['send_get_body_as'] = conf.get('es_send_get_body_as', 'GET') + parsed_conf['ssl_show_warn'] = conf.get('ssl_show_warn', True) + + if os.environ.get('ES_USERNAME'): + parsed_conf['es_username'] = os.environ.get('ES_USERNAME') + parsed_conf['es_password'] = os.environ.get('ES_PASSWORD') + elif 'es_username' in conf: + parsed_conf['es_username'] = conf['es_username'] + parsed_conf['es_password'] = conf['es_password'] + + if os.environ.get('ES_API_KEY'): + parsed_conf['es_api_key'] = os.environ.get('ES_API_KEY') + elif 'es_api_key' in conf: + parsed_conf['es_api_key'] = conf['es_api_key'] + + if os.environ.get('ES_BEARER'): + parsed_conf['es_bearer'] = os.environ.get('ES_BEARER') + elif 'es_bearer' in conf: + parsed_conf['es_bearer'] = conf['es_bearer'] + + if 'aws_region' in conf: + parsed_conf['aws_region'] = conf['aws_region'] + + if 'profile' in conf: + parsed_conf['profile'] = conf['profile'] + + if 'use_ssl' in conf: + parsed_conf['use_ssl'] = conf['use_ssl'] + + if 'verify_certs' in conf: + parsed_conf['verify_certs'] = conf['verify_certs'] + + if 'ca_certs' in conf: + parsed_conf['ca_certs'] = conf['ca_certs'] + + if 'client_cert' in conf: + parsed_conf['client_cert'] = conf['client_cert'] + + if 'client_key' in conf: + parsed_conf['client_key'] = conf['client_key'] + + if 'es_url_prefix' in conf: + parsed_conf['es_url_prefix'] = conf['es_url_prefix'] + + return parsed_conf + + + +def build_es_conn_config(conf): + """ Given a conf dictionary w/ raw config properties 'use_ssl', 'es_host', 'es_port' + 'es_username' and 'es_password', this will return a new dictionary + with properly initialized values for 'es_host', 'es_port', 'use_ssl' and 'http_auth' which + will be a basicauth username:password formatted string """ + parsed_conf = {} + parsed_conf['use_ssl'] = os.environ.get('ES_USE_SSL', False) + parsed_conf['verify_certs'] = True + parsed_conf['ca_certs'] = None + parsed_conf['client_cert'] = None + parsed_conf['client_key'] = None + parsed_conf['http_auth'] = None + parsed_conf['es_username'] = None + parsed_conf['es_password'] = None + parsed_conf['aws_region'] = None + parsed_conf['profile'] = None + parsed_conf['es_host'] = os.environ.get('ES_HOST', conf['es_host']) + parsed_conf['es_port'] = int(os.environ.get('ES_PORT', conf['es_port'])) + parsed_conf['es_url_prefix'] = '' + parsed_conf['es_conn_timeout'] = conf.get('es_conn_timeout', 20) + parsed_conf['send_get_body_as'] = conf.get('es_send_get_body_as', 'GET') + + if os.environ.get('ES_USERNAME'): + parsed_conf['es_username'] = os.environ.get('ES_USERNAME') + parsed_conf['es_password'] = os.environ.get('ES_PASSWORD') + elif 'es_username' in conf: + parsed_conf['es_username'] = conf['es_username'] + parsed_conf['es_password'] = conf['es_password'] + + if 'aws_region' in conf: + parsed_conf['aws_region'] = conf['aws_region'] + + # Deprecated + if 'boto_profile' in conf: + logging.warning('Found deprecated "boto_profile", use "profile" instead!') + parsed_conf['profile'] = conf['boto_profile'] + + if 'profile' in conf: + parsed_conf['profile'] = conf['profile'] + + if 'use_ssl' in conf: + parsed_conf['use_ssl'] = conf['use_ssl'] + + if 'verify_certs' in conf: + parsed_conf['verify_certs'] = conf['verify_certs'] + + if 'ca_certs' in conf: + parsed_conf['ca_certs'] = conf['ca_certs'] + + if 'client_cert' in conf: + parsed_conf['client_cert'] = conf['client_cert'] + + if 'client_key' in conf: + parsed_conf['client_key'] = conf['client_key'] + + if 'es_url_prefix' in conf: + parsed_conf['es_url_prefix'] = conf['es_url_prefix'] + + return parsed_conf + + +def parse_duration(value): + """Convert ``unit=num`` spec into a ``timedelta`` object.""" + unit, num = value.split('=') + return datetime.timedelta(**{unit: int(num)}) + + +def parse_deadline(value): + """Convert ``unit=num`` spec into a ``datetime`` object.""" + duration = parse_duration(value) + return ts_now() + duration + + +def flatten_dict(dct, delim='.', prefix=''): + ret = {} + for key, val in dct.items(): + if type(val) == dict: + ret.update(flatten_dict(val, prefix=prefix + key + delim)) + else: + ret[prefix + key] = val + return ret + + +def resolve_string(string, match, missing_text=''): + """ + Given a python string that may contain references to fields on the match dictionary, + the strings are replaced using the corresponding values. + However, if the referenced field is not found on the dictionary, + it is replaced by a default string. + Strings can be formatted using the old-style format ('%(field)s') or + the new-style format ('{match[field]}'). + + :param string: A string that may contain references to values of the 'match' dictionary. + :param match: A dictionary with the values to replace where referenced by keys in the string. + :param missing_text: The default text to replace a formatter with if the field doesnt exist. + """ + flat_match = flatten_dict(match) + flat_match.update(match) + dd_match = collections.defaultdict(lambda: missing_text, flat_match) + dd_match['_missing_value'] = missing_text + while True: + try: + string = string % dd_match + string = string.format(**dd_match) + break + except KeyError as e: + if '{%s}' % e.message not in string: + break + string = string.replace('{%s}' % e.message, '{_missing_value}') + + return string \ No newline at end of file diff --git a/elastalert/alerters/alertmanager.py b/elastalert/alerters/alertmanager.py index 70eb1598c..c5f2784f4 100644 --- a/elastalert/alerters/alertmanager.py +++ b/elastalert/alerters/alertmanager.py @@ -30,7 +30,7 @@ def __init__(self, rule): self.timeout = self.rule.get('alertmanager_timeout', 10) self.alertmanager_basic_auth_login = self.rule.get('alertmanager_basic_auth_login', None) self.alertmanager_basic_auth_password = self.rule.get('alertmanager_basic_auth_password', None) - + self.tenant = self.rule.get('tenant', "haystack") @staticmethod def _json_or_string(obj): @@ -41,6 +41,7 @@ def _json_or_string(obj): def alert(self, matches): headers = {'content-type': 'application/json'} + headers.update({"X-Scope-OrgID": self.tenant}) proxies = {'https': self.proxies} if self.proxies else None auth = HTTPBasicAuth(self.alertmanager_basic_auth_login, self.alertmanager_basic_auth_password) if self.alertmanager_basic_auth_login else None @@ -60,7 +61,7 @@ def alert(self, matches): for host in self.hosts: try: - url = '{}/api/{}/alerts'.format(host, self.api_version) + url = host if self.ca_certs: verify = self.ca_certs diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index e34781054..5689af4fa 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -12,6 +12,7 @@ import time import timeit import traceback +import requests from email.mime.text import MIMEText from smtplib import SMTP from smtplib import SMTPException @@ -28,6 +29,7 @@ from elasticsearch.exceptions import ElasticsearchException from elasticsearch.exceptions import NotFoundError from elasticsearch.exceptions import TransportError +from ruletypes import ErrorRateRule from elastalert.alerters.debug import DebugAlerter from elastalert.config import load_conf @@ -37,7 +39,7 @@ from elastalert.prometheus_wrapper import PrometheusWrapper from elastalert.ruletypes import FlatlineRule from elastalert.util import (add_raw_postfix, cronite_datetime_to_timestamp, dt_to_ts, dt_to_unix, EAException, - elastalert_logger, elasticsearch_client, format_index, lookup_es_key, parse_deadline, + elastalert_logger, elasticsearch_client,kibana_adapter_client, format_index, lookup_es_key, parse_deadline, parse_duration, pretty_ts, replace_dots_in_field_names, seconds, set_es_key, should_scrolling_continue, total_seconds, ts_add, ts_now, ts_to_dt, unix_to_dt, ts_utc_to_tz) @@ -91,7 +93,7 @@ def parse_args(self, args): dest='es_debug_trace', help='Enable logging from Elasticsearch queries as curl command. Queries will be logged to file. Note that ' 'this will incorrectly display localhost:9200 as the host/port') - parser.add_argument('--prometheus_port', type=int, dest='prometheus_port', help='Enables Prometheus metrics on specified port.') + parser.add_argument('--prometheus_port', type=int, dest='prometheus_port', default=9090, help='Enables Prometheus metrics on specified port.') self.args = parser.parse_args(args) def __init__(self, args): @@ -170,6 +172,10 @@ def __init__(self, args): self.pretty_ts_format = self.conf.get('custom_pretty_ts_format') self.writeback_es = elasticsearch_client(self.conf) + self.kibana_adapter = kibana_adapter_client(self.conf) + self._es_version = None + + self.query_endpoint = self.conf['query_endpoint'] remove = [] for rule in self.rules: @@ -203,7 +209,23 @@ def get_index(rule, starttime=None, endtime=None): return index @staticmethod - def get_query(filters, starttime=None, endtime=None, sort=True, timestamp_field='@timestamp', to_ts_func=dt_to_ts, desc=False): + def get_msearch_query(query, rule): + search_arr = [] + search_arr.append({'index': [rule['index']]}) + if rule.get('use_count_query'): + query['size'] = 0 + if rule['include']: + query['_source'] = {} + query['_source']['includes'] = rule['include'] + search_arr.append(query) + request = '' + for each in search_arr: + request += '%s \n' %json.dumps(each) + return request + + @staticmethod + def get_query(filters, starttime=None, endtime=None, sort=True, timestamp_field='@timestamp', to_ts_func=dt_to_ts, desc=False, + five=False): """ Returns a query dict that will apply a list of filters, filter by start and end time, and sort results by timestamp. @@ -241,9 +263,8 @@ def get_terms_query(self, query, rule, size, field): if 'sort' in query_element: query_element.pop('sort') aggs_query = query - aggs_query['aggs'] = {'counts': {'terms': {'field': field, - 'size': size, - 'min_doc_count': rule.get('min_doc_count', 1)}}} + aggs_query['aggs'] = {'counts': {'terms': {'field': field, 'size': size}}} + return aggs_query def get_aggregation_query(self, query, rule, query_key, terms_size, timestamp_field='@timestamp'): @@ -357,28 +378,34 @@ def get_hits(self, rule, starttime, endtime, index, scroll=False): timestamp_field=rule['timestamp_field'], to_ts_func=rule['dt_to_ts'], ) - extra_args = {'_source_includes': rule['include']} - scroll_keepalive = rule.get('scroll_keepalive', self.scroll_keepalive) - if not rule.get('_source_enabled'): - query['stored_fields'] = rule['include'] - extra_args = {} - try: - if scroll: - res = self.thread_data.current_es.scroll(scroll_id=rule['scroll_id'], scroll=scroll_keepalive) - else: - res = self.thread_data.current_es.search( - scroll=scroll_keepalive, - index=index, - size=rule.get('max_query_size', self.max_query_size), - body=query, - ignore_unavailable=True, - **extra_args - ) - if '_scroll_id' in res: - rule['scroll_id'] = res['_scroll_id'] + request = self.get_msearch_query(query,rule) - self.thread_data.total_hits = int(res['hits']['total']['value']) + # extra_args = {'_source_includes': rule['include']} + # scroll_keepalive = rule.get('scroll_keepalive', self.scroll_keepalive) + # if not rule.get('_source_enabled'): + # query['stored_fields'] = rule['include'] + # extra_args = {} + + try: + res = self.thread_data.current_es.msearch(body=request) + res = res['responses'][0] + self.total_hits = int(res['hits']['total']) + # if scroll: + # res = self.thread_data.current_es.scroll(scroll_id=rule['scroll_id'], scroll=scroll_keepalive) + # else: + # res = self.thread_data.current_es.search( + # scroll=scroll_keepalive, + # index=index, + # size=rule.get('max_query_size', self.max_query_size), + # body=query, + # ignore_unavailable=True, + # **extra_args + # ) + # if '_scroll_id' in res: + # rule['scroll_id'] = res['_scroll_id'] + + # self.thread_data.total_hits = int(res['hits']['total']['value']) if len(res.get('_shards', {}).get('failures', [])) > 0: try: @@ -435,13 +462,11 @@ def get_hits_count(self, rule, starttime, endtime, index): to_ts_func=rule['dt_to_ts'], ) - es_client = self.thread_data.current_es + request = self.get_msearch_query(query,rule) + try: - res = es_client.count( - index=index, - body=query, - ignore_unavailable=True - ) + res = self.thread_data.current_es.msearch(body=request) + res = res['responses'][0] except ElasticsearchException as e: # Elasticsearch sometimes gives us GIGANTIC error messages # (so big that they will fill the entire terminal buffer) @@ -450,13 +475,13 @@ def get_hits_count(self, rule, starttime, endtime, index): self.handle_error('Error running count query: %s' % (e), {'rule': rule['name'], 'query': query}) return None - self.thread_data.num_hits += res['count'] + self.thread_data.num_hits += res['hits']['total'] lt = rule.get('use_local_time') elastalert_logger.info( "Queried rule %s from %s to %s: %s hits" % (rule['name'], pretty_ts(starttime, lt, self.pretty_ts_format), - pretty_ts(endtime, lt, self.pretty_ts_format), res['count']) + pretty_ts(endtime, lt, self.pretty_ts_format), res['hits']['total']) ) - return {endtime: res['count']} + return {endtime: res['hits']['total']} def get_hits_terms(self, rule, starttime, endtime, index, key, qk=None, size=None): rule_filter = copy.copy(rule['filter']) @@ -489,9 +514,12 @@ def get_hits_terms(self, rule, starttime, endtime, index, key, qk=None, size=Non if size is None: size = rule.get('terms_size', 50) query = self.get_terms_query(base_query, rule, size, key) + request = self.get_msearch_query(query,rule) try: - res = self.thread_data.current_es.search(index=index, body=query, size=0, ignore_unavailable=True) + res = self.thread_data.current_es.msearch(body=request) + res = res['responses'][0] + except ElasticsearchException as e: # Elasticsearch sometimes gives us GIGANTIC error messages # (so big that they will fill the entire terminal buffer) @@ -525,8 +553,10 @@ def get_hits_aggregation(self, rule, starttime, endtime, index, query_key, term_ if term_size is None: term_size = rule.get('terms_size', 50) query = self.get_aggregation_query(base_query, rule, query_key, term_size, rule['timestamp_field']) + request = self.get_msearch_query(query,rule) try: - res = self.thread_data.current_es.search(index=index, body=query, size=0, ignore_unavailable=True) + res = self.thread_data.current_es.msearch(body=request) + res = res['responses'][0] except ElasticsearchException as e: if len(str(e)) > 1024: e = str(e)[:1024] + '... (%d characters removed)' % (len(str(e)) - 1024) @@ -536,10 +566,70 @@ def get_hits_aggregation(self, rule, starttime, endtime, index, query_key, term_ return {} payload = res['aggregations'] - self.thread_data.num_hits += res['hits']['total']['value'] + self.thread_data.num_hits += res['hits']['total'] + return {endtime: payload} + + def get_error_rate(self, rule, starttime, endtime): + agg_key = '{}({})'.format(rule['total_agg_type'],rule['total_agg_key']) + query = self.get_query_string(rule) + aggregation = {"function": rule['total_agg_type'].upper(), "field": rule['total_agg_key']} + + total_data, total_count = self.get_ch_data(rule, starttime, endtime, agg_key, query, aggregation) + + if total_data is None: + return {} + + if(query): + query = '{} AND {}'.format(query,rule['error_condition']) + else: + query = rule['error_condition'] + + if rule['count_all_errors']: + agg_key = "count()" + aggregation = {"function": "COUNT", "field": "1"} + + error_data, error_count = self.get_ch_data(rule, starttime, endtime, agg_key, query, aggregation) + + if error_data is None: + return {} + + payload = {'error_count': error_data, 'total_count': total_data, 'start_time': starttime, 'end_time': endtime} + elastalert_logger.info("query start time and endtime %s at %s , error_count %d ,total_count %d" % (starttime, endtime, error_data, total_data)) + + self.num_hits += int(error_count) + return {endtime: payload} + def get_query_string(self, rule): + if rule['filter'] and ('query_string' in rule['filter'][0]) and ('query' in rule['filter'][0]['query_string']): + return rule['filter'][0]['query_string']['query'] + return "" + + def get_ch_data(self, rule, starttime, endtime, agg_key, freshquery,aggregation): + data = { + "selects":[], + "start_time":dt_to_ts_with_format(starttime,"%Y-%m-%dT%H:%M:%S.%f")[:-3]+'Z', + "end_time":dt_to_ts_with_format(endtime,"%Y-%m-%dT%H:%M:%S.%f")[:-3]+'Z', + "freshquery": freshquery, + "group_bys":[], + "sort_orders":[{"sort_by": agg_key,"sort_direction":"desc"}], + "aggregations":[aggregation] + } + try: + res = requests.post(self.query_endpoint, json=data) + res.raise_for_status() + except requests.exceptions.RequestException as e: + if len(str(e)) > 1024: + e = str(e)[:1024] + '... (%d characters removed)' % (len(str(e)) - 1024) + self.handle_error('Error running query: %s' % (e), {'rule': rule['name']}) + return None,0 + res = json.loads(res.content) + return int(res['data'][0][agg_key]), res['rows'] + elastalert_logger.info("request data is %s" % json.dumps(data)) + # res = requests.post(self.query_endpoint, json=data) + # return None, None + def remove_duplicate_events(self, data, rule): new_events = [] for event in data: @@ -593,6 +683,8 @@ def run_query(self, rule, start=None, end=None, scroll=False): data = self.get_hits_count(rule, start, end, index) elif rule.get('use_terms_query'): data = self.get_hits_terms(rule, start, end, index, rule['query_key']) + elif isinstance(rule_inst, ErrorRateRule): + data = self.get_error_rate(rule, start, end) elif rule.get('aggregation_query_element'): data = self.get_hits_aggregation(rule, start, end, index, rule.get('query_key', None)) else: @@ -610,25 +702,27 @@ def run_query(self, rule, start=None, end=None, scroll=False): rule_inst.add_count_data(data) elif rule.get('use_terms_query'): rule_inst.add_terms_data(data) + elif isinstance(rule_inst, ErrorRateRule): + rule_inst.calculate_err_rate(data) elif rule.get('aggregation_query_element'): rule_inst.add_aggregation_data(data) else: rule_inst.add_data(data) - try: - if rule.get('scroll_id') and self.thread_data.num_hits < self.thread_data.total_hits and should_scrolling_continue(rule): - if not self.run_query(rule, start, end, scroll=True): - return False - except RuntimeError: - # It's possible to scroll far enough to hit max recursive depth - pass - - if 'scroll_id' in rule: - scroll_id = rule.pop('scroll_id') - try: - self.thread_data.current_es.clear_scroll(scroll_id=scroll_id) - except NotFoundError: - pass + # try: + # if rule.get('scroll_id') and self.thread_data.num_hits < self.thread_data.total_hits and should_scrolling_continue(rule): + # if not self.run_query(rule, start, end, scroll=True): + # return False + # except RuntimeError: + # # It's possible to scroll far enough to hit max recursive depth + # pass + + # if 'scroll_id' in rule: + # scroll_id = rule.pop('scroll_id') + # try: + # self.thread_data.current_es.clear_scroll(scroll_id=scroll_id) + # except NotFoundError: + # pass return True @@ -816,7 +910,9 @@ def run_rule(self, rule, endtime, starttime=None): :return: The number of matches that the rule produced. """ run_start = time.time() - self.thread_data.current_es = self.get_elasticsearch_client(rule) + + self.thread_data.current_es = kibana_adapter_client(rule) + self.current_es_addr = (rule['es_host'], rule['es_port']) # If there are pending aggregate matches, try processing them for x in range(len(rule['agg_matches'])): @@ -1716,8 +1812,7 @@ def is_silenced(self, rule_name): query.update(sort) try: - doc_type = 'silence' - index = self.writeback_es.resolve_writeback_index(self.writeback_index, doc_type) + index = self.get_six_index('silence') res = self.writeback_es.search(index=index, size=1, body=query, _source_includes=['until', 'exponent']) except ElasticsearchException as e: diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 2366a144b..4f219c56a 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -93,6 +93,7 @@ class RulesLoader(object): 'metric_aggregation': ruletypes.MetricAggregationRule, 'percentage_match': ruletypes.PercentageMatchRule, 'spike_aggregation': ruletypes.SpikeMetricAggregationRule, + 'error_rate': ruletypes.ErrorRateRule } # Used to map names of alerts to their classes @@ -252,6 +253,9 @@ def load_yaml(self, filename): while True: loaded = self.get_yaml(current_path) + if 'query_string' in loaded['filter'][0]: + loaded['filter'][0]['query_string']['default_operator'] = "AND" + # Special case for merging filters - if both files specify a filter merge (AND) them if 'filter' in rule and 'filter' in loaded: rule['filter'] = loaded['filter'] + rule['filter'] @@ -393,6 +397,10 @@ def _dt_to_ts_with_format(dt): if 'include' in rule and type(rule['include']) != list: raise EAException('include option must be a list') + if (rule['type'] == 'error_rate'): + rule.setdefault('error_condition','exception.type:*') + rule.setdefault('unique_column','traceID') + raw_query_key = rule.get('query_key') if isinstance(raw_query_key, list): if len(raw_query_key) > 1: diff --git a/elastalert/prometheus_wrapper.py b/elastalert/prometheus_wrapper.py index 5b84d9358..e20ac0b88 100644 --- a/elastalert/prometheus_wrapper.py +++ b/elastalert/prometheus_wrapper.py @@ -36,7 +36,7 @@ def metrics_run_rule(self, rule, endtime, starttime=None): def metrics_writeback(self, doc_type, body, rule=None, match_body=None): """ Update various prometheus metrics accoording to the doc_type """ - res = self.writeback(doc_type, body, rule, match_body) + res = self.writeback(doc_type, body) try: if doc_type == 'elastalert_status': self.prom_hits.labels(body['rule_name']).inc(int(body['hits'])) @@ -52,4 +52,4 @@ def metrics_writeback(self, doc_type, body, rule=None, match_body=None): elif doc_type == 'silence': self.prom_alerts_silenced.labels(body['rule_name']).inc() finally: - return res + return res \ No newline at end of file diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 35fe4c14a..9c8402152 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -2,6 +2,7 @@ import copy import datetime import sys +import time from sortedcontainers import SortedKeyList as sortedlist @@ -1046,6 +1047,32 @@ def check_matches(self, timestamp, query_key, aggregation_data): raise NotImplementedError() +class ErrorRateRule(BaseAggregationRule): + """ A rule that determines error rate with sampling rate""" + required_options = frozenset(['sampling', 'threshold','error_condition','unique_column']) + def __init__(self, *args): + super(ErrorRateRule, self).__init__(*args) + + self.ts_field = self.rules.get('timestamp_field', '@timestamp') + self.rules['total_agg_key'] = self.rules['unique_column'] + self.rules['count_all_errors'] = True + + if (self.rules.has_key('error_calculation_method') and self.rules['error_calculation_method']=='count_traces_with_errors' ): + self.rules['count_all_errors'] = False + + # hardcoding uniq aggregation for total count + self.rules['total_agg_type'] = "uniq" + + def calculate_err_rate(self,payload): + for timestamp, payload_data in payload.iteritems(): + if int(payload_data['total_count']) > 0: + rate = float(payload_data['error_count'])/float(payload_data['total_count']) + rate = float(rate)/float(self.rules['sampling']) + rate = rate*100 + if 'threshold' in self.rules and rate > self.rules['threshold']: + match = {self.rules['timestamp_field']: timestamp, 'error_rate': rate, 'from': payload_data['start_time'], 'to': payload_data['end_time']} + self.add_match(match) + class MetricAggregationRule(BaseAggregationRule): """ A rule that matches when there is a low number of events given a timeframe. """ required_options = frozenset(['metric_agg_key', 'metric_agg_type']) diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 0b1707022..46217cc79 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -72,7 +72,7 @@ definitions: filter: &filter {} -required: [type, index, alert] +required: [type, alert] type: object ### Rule Types section @@ -211,6 +211,14 @@ oneOf: properties: # custom rules include a period in the rule type type: {pattern: "[.]"} + + - title: Error Rate + required: [sampling, threshold] + properties: + sampling: {type: integer} + threshold: {type: number} + error_condition: {type: string} + unique_column: {type: string} properties: diff --git a/elastalert/util.py b/elastalert/util.py index 3ab2489ab..39e6e58de 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -6,6 +6,7 @@ import re import sys import time +import types import dateutil.parser import pytz @@ -277,7 +278,10 @@ def unixms_to_dt(ts): def unix_to_dt(ts): - dt = datetime.datetime.utcfromtimestamp(float(ts)) + if(type(ts) == types.UnicodeType): + dt = datetime.datetime.strptime(ts, '%Y-%m-%d %H:%M:%S.%f') + else: + dt = datetime.datetime.utcfromtimestamp(float(ts)) dt = dt.replace(tzinfo=dateutil.tz.tzutc()) return dt @@ -344,6 +348,93 @@ def elasticsearch_client(conf): return ElasticSearchClient(es_conn_conf) +def kibana_adapter_client(conf): + """ returns an Elasticsearch instance configured using an es_conn_config """ + es_conn_conf = build_adapter_conn_config(conf) + auth = Auth() + es_conn_conf['http_auth'] = auth(host=es_conn_conf['es_host'], + username=es_conn_conf['es_username'], + password=es_conn_conf['es_password'], + aws_region=es_conn_conf['aws_region'], + profile_name=es_conn_conf['profile']) + + return ElasticSearchClient(es_conn_conf) + +def build_adapter_conn_config(conf): + """ Given a conf dictionary w/ raw config properties 'use_ssl', 'es_host', 'es_port' + 'es_username' and 'es_password', this will return a new dictionary + with properly initialized values for 'es_host', 'es_port', 'use_ssl' and 'http_auth' which + will be a basicauth username:password formatted string """ + parsed_conf = {} + parsed_conf['use_ssl'] = os.environ.get('ES_USE_SSL', False) + parsed_conf['verify_certs'] = True + parsed_conf['ca_certs'] = None + parsed_conf['client_cert'] = None + parsed_conf['client_key'] = None + parsed_conf['http_auth'] = None + parsed_conf['es_username'] = None + parsed_conf['es_password'] = None + parsed_conf['es_api_key'] = None + parsed_conf['es_bearer'] = None + parsed_conf['aws_region'] = None + parsed_conf['profile'] = None + parsed_conf['headers'] = None + parsed_conf['es_host'] = conf['kibana_adapter'] + parsed_conf['es_port'] = conf['kibana_adapter_port'] + parsed_conf['es_url_prefix'] = '' + parsed_conf['es_conn_timeout'] = conf.get('es_conn_timeout', 20) + parsed_conf['send_get_body_as'] = conf.get('es_send_get_body_as', 'GET') + parsed_conf['ssl_show_warn'] = conf.get('ssl_show_warn', True) + + if os.environ.get('ES_USERNAME'): + parsed_conf['es_username'] = os.environ.get('ES_USERNAME') + parsed_conf['es_password'] = os.environ.get('ES_PASSWORD') + elif 'es_username' in conf: + parsed_conf['es_username'] = conf['es_username'] + parsed_conf['es_password'] = conf['es_password'] + + if os.environ.get('ES_API_KEY'): + parsed_conf['es_api_key'] = os.environ.get('ES_API_KEY') + elif 'es_api_key' in conf: + parsed_conf['es_api_key'] = conf['es_api_key'] + + if os.environ.get('ES_BEARER'): + parsed_conf['es_bearer'] = os.environ.get('ES_BEARER') + elif 'es_bearer' in conf: + parsed_conf['es_bearer'] = conf['es_bearer'] + + if 'aws_region' in conf: + parsed_conf['aws_region'] = conf['aws_region'] + + if 'profile' in conf: + parsed_conf['profile'] = conf['profile'] + + if 'use_ssl' in conf: + parsed_conf['use_ssl'] = conf['use_ssl'] + + if 'verify_certs' in conf: + parsed_conf['verify_certs'] = conf['verify_certs'] + + if 'ca_certs' in conf: + parsed_conf['ca_certs'] = conf['ca_certs'] + + if 'client_cert' in conf: + parsed_conf['client_cert'] = conf['client_cert'] + + if 'client_key' in conf: + parsed_conf['client_key'] = conf['client_key'] + + if 'es_url_prefix' in conf: + parsed_conf['es_url_prefix'] = conf['es_url_prefix'] + + if 'kibana_adapter_url_prefix' in conf: + parsed_conf['es_url_prefix'] = conf['kibana_adapter_url_prefix'] + + + return parsed_conf + + + def build_es_conn_config(conf): """ Given a conf dictionary w/ raw config properties 'use_ssl', 'es_host', 'es_port' 'es_username' and 'es_password', this will return a new dictionary From a35258397c30951c7f2b9c8129b5858c61153df7 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Wed, 15 Feb 2023 12:04:31 +0530 Subject: [PATCH 1017/1065] checkpoint commit --- elastalert/elastalert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 5689af4fa..7007d55c0 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -42,7 +42,7 @@ elastalert_logger, elasticsearch_client,kibana_adapter_client, format_index, lookup_es_key, parse_deadline, parse_duration, pretty_ts, replace_dots_in_field_names, seconds, set_es_key, should_scrolling_continue, total_seconds, ts_add, ts_now, ts_to_dt, unix_to_dt, - ts_utc_to_tz) + ts_utc_to_tz, dt_to_ts_with_format) class ElastAlerter(object): From b2720ac23d77ea9248c3a99dedb811ddc97585f4 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Tue, 28 Feb 2023 16:16:08 +0530 Subject: [PATCH 1018/1065] bugs fixed and all rules tested --- .gitignore | 5 + .python-version | 1 + Dockerfile | 79 +- comparisonFile | 1949 -------------------------------------- comparisonFile2 | 591 ++---------- elastalert/elastalert.py | 45 +- elastalert/ruletypes.py | 12 +- requirements.txt | 35 +- setup.py | 28 +- 9 files changed, 182 insertions(+), 2563 deletions(-) create mode 100644 .python-version diff --git a/.gitignore b/.gitignore index 8ef91bc3f..1daffea83 100644 --- a/.gitignore +++ b/.gitignore @@ -19,3 +19,8 @@ my_rules *~ /rules/ mod/ +comparisonFile +comparisonFile2 +examples/ex/* +examples/ex1/* +comparisonFile* diff --git a/.python-version b/.python-version new file mode 100644 index 000000000..2c0733315 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.11 diff --git a/Dockerfile b/Dockerfile index 044a1f400..ac5ca40ab 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,44 +1,35 @@ -FROM python:3-slim-buster as builder - -LABEL description="ElastAlert 2 Official Image" -LABEL maintainer="Jason Ertel" - -COPY . /tmp/elastalert - -RUN mkdir -p /opt/elastalert && \ - cd /tmp/elastalert && \ - pip install setuptools wheel && \ - python setup.py sdist bdist_wheel - -FROM python:3-slim-buster - -ARG GID=1000 -ARG UID=1000 -ARG USERNAME=elastalert - -COPY --from=builder /tmp/elastalert/dist/*.tar.gz /tmp/ - -RUN apt update && apt -y upgrade && \ - apt -y install jq curl gcc libffi-dev && \ - rm -rf /var/lib/apt/lists/* && \ - pip install /tmp/*.tar.gz && \ - rm -rf /tmp/* && \ - apt -y remove gcc libffi-dev && \ - apt -y autoremove && \ - mkdir -p /opt/elastalert && \ - echo "#!/bin/sh" >> /opt/elastalert/run.sh && \ - echo "set -e" >> /opt/elastalert/run.sh && \ - echo "elastalert-create-index --config /opt/elastalert/config.yaml" \ - >> /opt/elastalert/run.sh && \ - echo "elastalert --config /opt/elastalert/config.yaml \"\$@\"" \ - >> /opt/elastalert/run.sh && \ - chmod +x /opt/elastalert/run.sh && \ - groupadd -g ${GID} ${USERNAME} && \ - useradd -u ${UID} -g ${GID} -M -b /opt -s /sbin/nologin \ - -c "ElastAlert 2 User" ${USERNAME} - -USER ${USERNAME} -ENV TZ "UTC" - -WORKDIR /opt/elastalert -ENTRYPOINT ["/opt/elastalert/run.sh"] +# UNDER DEVELOPMENT + +# FROM python:3-slim as build + +# ENV ELASTALERT_HOME /opt/elastalert +# ADD . /opt/elastalert/ + +# WORKDIR /opt + +# RUN apk add --update --no-cache jq curl gcc openssl-dev libffi-dev openssl ca-certificates musl-dev python-dev +# RUN pip install "setuptools==36.2.7" "elasticsearch==6.3.1" + +# WORKDIR "${ELASTALERT_HOME}" + +# RUN pip install -r requirements.txt +# RUN python setup.py install + +# FROM gcr.io/distroless/python3:debug as runtime + +# COPY --from=build /opt/elastalert /opt/elastalert +# COPY --from=build /usr/local/lib/python3 /usr/local/lib/python3 +# COPY --from=build /usr/local/bin/elastalert* /usr/local/bin/ +# COPY --from=build /usr/local/lib/libpython2.7.so.1.0 /usr/local/lib/ +# COPY --from=build /usr/lib/libpython2.7.so.1.0 /usr/lib/ +# COPY --from=build /lib/libc.musl-x86_64.so.1 /lib/ + +# #COPY --from=build /data/elastalert /data/elastalert + +# ENV PYTHONPATH=/usr/local/lib/python2.7/site-packages +# ENV PATH=/usr/local/lib:/usr/lib:$PATH + +# WORKDIR /opt/elastalert + +# CMD ["/usr/local/bin/elastalert-create-index","--config","/data/elastalert/config.yaml", "--verbose"] +# CMD ["/usr/local/bin/elastalert","--config","/data/elastalert/config.yaml", "--verbose"] diff --git a/comparisonFile b/comparisonFile index 18e2bd65f..04e9d6e1c 100644 --- a/comparisonFile +++ b/comparisonFile @@ -1,1982 +1,33 @@ -import argparse -import copy -import datetime -import json -import logging -import os -import signal -import sys -import time -import timeit -import traceback -import requests -from email.mime.text import MIMEText -from smtplib import SMTP -from smtplib import SMTPException -from socket import error -import dateutil.tz -import kibana -import yaml -from alerts import DebugAlerter -from config import get_rule_hashes -from config import load_configuration -from config import load_rules -from croniter import croniter -from elasticsearch.exceptions import ConnectionError -from elasticsearch.exceptions import ElasticsearchException -from elasticsearch.exceptions import TransportError -from enhancements import DropMatchException -from ruletypes import FlatlineRule -from ruletypes import ErrorRateRule -from util import add_raw_postfix -from util import cronite_datetime_to_timestamp -from util import dt_to_ts -from util import dt_to_ts_with_format -from util import dt_to_unix -from util import EAException -from util import elastalert_logger -from util import elasticsearch_client -from util import kibana_adapter_client -from util import format_index -from util import lookup_es_key -from util import parse_deadline -from util import parse_duration -from util import pretty_ts -from util import replace_dots_in_field_names -from util import seconds -from util import set_es_key -from util import total_seconds -from util import ts_add -from util import ts_now -from util import ts_to_dt -from util import unix_to_dt -from prometheus_wrapper import PrometheusWrapper -class ElastAlerter(): - """ The main ElastAlert runner. This class holds all state about active rules, - controls when queries are run, and passes information between rules and alerts. - :param args: An argparse arguments instance. Should contain debug and start - :param conf: The configuration dictionary. At the top level, this - contains global options, and under 'rules', contains all state relating - to rules and alerts. In each rule in conf['rules'], the RuleType and Alerter - instances live under 'type' and 'alerts', respectively. The conf dictionary - should not be passed directly from a configuration file, but must be populated - by config.py:load_rules instead. """ - def parse_args(self, args): - parser = argparse.ArgumentParser() - parser.add_argument( - '--config', - action='store', - dest='config', - default="config.yaml", - help='Global config file (default: config.yaml)') - parser.add_argument('--debug', action='store_true', dest='debug', help='Suppresses alerts and prints information instead. ' - 'Not compatible with `--verbose`') - parser.add_argument('--rule', dest='rule', help='Run only a specific rule (by filename, must still be in rules folder)') - parser.add_argument('--silence', dest='silence', help='Silence rule for a time period. Must be used with --rule. Usage: ' - '--silence =, eg. --silence hours=2') - parser.add_argument('--start', dest='start', help='YYYY-MM-DDTHH:MM:SS Start querying from this timestamp. ' - 'Use "NOW" to start from current time. (Default: present)') - parser.add_argument('--end', dest='end', help='YYYY-MM-DDTHH:MM:SS Query to this timestamp. (Default: present)') - parser.add_argument('--verbose', action='store_true', dest='verbose', help='Increase verbosity without suppressing alerts. ' - 'Not compatible with `--debug`') - parser.add_argument('--patience', action='store', dest='timeout', - type=parse_duration, - default=datetime.timedelta(), - help='Maximum time to wait for ElasticSearch to become responsive. Usage: ' - '--patience =. e.g. --patience minutes=5') - parser.add_argument( - '--pin_rules', - action='store_true', - dest='pin_rules', - help='Stop ElastAlert from monitoring config file changes') - parser.add_argument('--es_debug', action='store_true', dest='es_debug', help='Enable verbose logging from Elasticsearch queries') - parser.add_argument( - '--es_debug_trace', - action='store', - dest='es_debug_trace', - help='Enable logging from Elasticsearch queries as curl command. Queries will be logged to file. Note that ' - 'this will incorrectly display localhost:9200 as the host/port') - parser.add_argument('--prometheus_port', type=int, dest='prometheus_port', default=9090, help='Enables Prometheus metrics on specified port.') - self.args = parser.parse_args(args) - def __init__(self, args): - self.parse_args(args) - self.debug = self.args.debug - self.verbose = self.args.verbose - if self.verbose and self.debug: - elastalert_logger.info( - "Note: --debug and --verbose flags are set. --debug takes precedent." - ) - if self.verbose or self.debug: - elastalert_logger.setLevel(logging.INFO) - if self.debug: - elastalert_logger.info( - """Note: In debug mode, alerts will be logged to console but NOT actually sent. - To send them but remain verbose, use --verbose instead.""" - ) - if not self.args.es_debug: - logging.getLogger('elasticsearch').setLevel(logging.WARNING) - if self.args.es_debug_trace: - tracer = logging.getLogger('elasticsearch.trace') - tracer.setLevel(logging.INFO) - tracer.addHandler(logging.FileHandler(self.args.es_debug_trace)) - self.conf = load_rules(self.args) - self.max_query_size = self.conf['max_query_size'] - self.scroll_keepalive = self.conf['scroll_keepalive'] - self.rules = self.conf['rules'] - self.writeback_index = self.conf['writeback_index'] - self.run_every = self.conf['run_every'] - self.alert_time_limit = self.conf['alert_time_limit'] - self.old_query_limit = self.conf['old_query_limit'] - self.disable_rules_on_error = self.conf['disable_rules_on_error'] - self.notify_email = self.conf.get('notify_email', []) - self.from_addr = self.conf.get('from_addr', 'ElastAlert') - self.smtp_host = self.conf.get('smtp_host', 'localhost') - self.max_aggregation = self.conf.get('max_aggregation', 10000) - self.alerts_sent = 0 - self.num_hits = 0 - self.num_dupes = 0 - self.current_es = None - self.current_es_addr = None - self.buffer_time = self.conf['buffer_time'] - self.silence_cache = {} - self.rule_hashes = get_rule_hashes(self.conf, self.args.rule) - self.starttime = self.args.start - self.disabled_rules = [] - self.replace_dots_in_field_names = self.conf.get('replace_dots_in_field_names', False) - self.string_multi_field_name = self.conf.get('string_multi_field_name', False) - self.prometheus_port = self.args.prometheus_port - self.writeback_es = elasticsearch_client(self.conf) - self.kibana_adapter = kibana_adapter_client(self.conf) - self._es_version = None - self.query_endpoint = self.conf['query_endpoint'] - remove = [] - for rule in self.rules: - if not self.init_rule(rule): - remove.append(rule) - map(self.rules.remove, remove) - if self.args.silence: - self.silence() - def get_version(self): - info = self.writeback_es.info() - return info['version']['number'] - @property - def es_version(self): - return "6.8.23" - def is_atleastfive(self): - return int(self.es_version.split(".")[0]) >= 5 - def is_atleastsix(self): - return int(self.es_version.split(".")[0]) >= 6 - @staticmethod - def get_index(rule, starttime=None, endtime=None): - """ Gets the index for a rule. If strftime is set and starttime and endtime - are provided, it will return a comma seperated list of indices. If strftime - is set but starttime and endtime are not provided, it will replace all format - tokens with a wildcard. """ - index = rule['index'] - add_extra = rule.get('search_extra_index', False) - if rule.get('use_strftime_index'): - if starttime and endtime: - return format_index(index, starttime, endtime, add_extra) - else: - # Replace the substring containing format characters with a * - format_start = index.find('%') - format_end = index.rfind('%') + 2 - return index[:format_start] + '*' + index[format_end:] - else: - return index - def get_six_index(self, doc_type): - """ In ES6, you cannot have multiple _types per index, - therefore we use self.writeback_index as the prefix for the actual - index name, based on doc_type. """ - writeback_index = self.writeback_index - if doc_type == 'silence': - writeback_index += '_silence' - elif doc_type == 'past_elastalert': - writeback_index += '_past' - elif doc_type == 'elastalert_status': - writeback_index += '_status' - elif doc_type == 'elastalert_error': - writeback_index += '_error' - return writeback_index - @staticmethod - def get_msearch_query(query, rule): - search_arr = [] - search_arr.append({'index': [rule['index']]}) - if rule.get('use_count_query'): - query['size'] = 0 - if rule['include']: - query['_source'] = {} - query['_source']['includes'] = rule['include'] - search_arr.append(query) - request = '' - for each in search_arr: - request += '%s \n' %json.dumps(each) - return request - @staticmethod - def get_query(filters, starttime=None, endtime=None, sort=True, timestamp_field='@timestamp', to_ts_func=dt_to_ts, desc=False, - five=False): - """ Returns a query dict that will apply a list of filters, filter by - start and end time, and sort results by timestamp. - :param filters: A list of Elasticsearch filters to use. - :param starttime: A timestamp to use as the start time of the query. - :param endtime: A timestamp to use as the end time of the query. - :param sort: If true, sort results by timestamp. (Default True) - :return: A query dictionary to pass to Elasticsearch. - """ - starttime = to_ts_func(starttime) - endtime = to_ts_func(endtime) - filters = copy.copy(filters) - es_filters = {'filter': {'bool': {'must': filters}}} - if starttime and endtime: - es_filters['filter']['bool']['must'].insert(0, {'range': {timestamp_field: {'gt': starttime, - 'lte': endtime}}}) - query = {'query': {'bool': es_filters}} - if sort: - query['sort'] = [{timestamp_field: {'order': 'desc' if desc else 'asc'}}] - return query - def get_terms_query(self, query, size, field, five=False): - """ Takes a query generated by get_query and outputs a aggregation query """ - query_element = query['query'] - if 'sort' in query_element: - query_element.pop('sort') - - aggs_query = query - aggs_query['aggs'] = {'counts': {'terms': {'field': field, 'size': size}}} - - return aggs_query - def get_aggregation_query(self, query, rule, query_key, terms_size, timestamp_field='@timestamp'): - """ Takes a query generated by get_query and outputs a aggregation query """ - query_element = query['query'] - if 'sort' in query_element: - query_element.pop('sort') - metric_agg_element = rule['aggregation_query_element'] - bucket_interval_period = rule.get('bucket_interval_period') - if bucket_interval_period is not None: - aggs_element = { - 'interval_aggs': { - 'date_histogram': { - 'field': timestamp_field, - 'interval': bucket_interval_period}, - 'aggs': metric_agg_element - } - } - if rule.get('bucket_offset_delta'): - aggs_element['interval_aggs']['date_histogram']['offset'] = '+%ss' % (rule['bucket_offset_delta']) - else: - aggs_element = metric_agg_element - if query_key is not None: - for idx, key in reversed(list(enumerate(query_key.split(',')))): - aggs_element = {'bucket_aggs': {'terms': {'field': key, 'size': terms_size}, 'aggs': aggs_element}} - aggs_query = query - aggs_query['aggs'] = aggs_element - return aggs_query - def get_index_start(self, index, timestamp_field='@timestamp'): - """ Query for one result sorted by timestamp to find the beginning of the index. - - :param index: The index of which to find the earliest event. - :return: Timestamp of the earliest event. - """ - query = {'sort': {timestamp_field: {'order': 'asc'}}} - try: - res = self.current_es.search(index=index, size=1, body=query, _source_include=[timestamp_field], ignore_unavailable=True) - except ElasticsearchException as e: - self.handle_error("Elasticsearch query error: %s" % (e), {'index': index, 'query': query}) - return '1969-12-30T00:00:00Z' - if len(res['hits']['hits']) == 0: - # Index is completely empty, return a date before the epoch - return '1969-12-30T00:00:00Z' - return res['hits']['hits'][0][timestamp_field] - - @staticmethod - def process_hits(rule, hits): - """ Update the _source field for each hit received from ES based on the rule configuration. - - This replaces timestamps with datetime objects, - folds important fields into _source and creates compound query_keys. - - :return: A list of processed _source dictionaries. - """ - - processed_hits = [] - for hit in hits: - # Merge fields and _source - hit.setdefault('_source', {}) - for key, value in hit.get('fields', {}).items(): - # Fields are returned as lists, assume any with length 1 are not arrays in _source - # Except sometimes they aren't lists. This is dependent on ES version - hit['_source'].setdefault(key, value[0] if type(value) is list and len(value) == 1 else value) - - # Convert the timestamp to a datetime - ts = lookup_es_key(hit['_source'], rule['timestamp_field']) - if not ts and not rule["_source_enabled"]: - raise EAException( - "Error: No timestamp was found for hit. '_source_enabled' is set to false, check your mappings for stored fields" - ) - - set_es_key(hit['_source'], rule['timestamp_field'], rule['ts_to_dt'](ts)) - set_es_key(hit, rule['timestamp_field'], lookup_es_key(hit['_source'], rule['timestamp_field'])) - - # Tack metadata fields into _source - for field in ['_id', '_index', '_type']: - if field in hit: - hit['_source'][field] = hit[field] - - if rule.get('compound_query_key'): - values = [lookup_es_key(hit['_source'], key) for key in rule['compound_query_key']] - hit['_source'][rule['query_key']] = ', '.join([unicode(value) for value in values]) - - if rule.get('compound_aggregation_key'): - values = [lookup_es_key(hit['_source'], key) for key in rule['compound_aggregation_key']] - hit['_source'][rule['aggregation_key']] = ', '.join([unicode(value) for value in values]) - - processed_hits.append(hit['_source']) - - return processed_hits - - def get_hits(self, rule, starttime, endtime, index, scroll=False): - """ Query Elasticsearch for the given rule and return the results. - :param rule: The rule configuration. - :param starttime: The earliest time to query. - :param endtime: The latest time to query. - :return: A list of hits, bounded by rule['max_query_size'] (or self.max_query_size). - """ - - query = self.get_query( - rule['filter'], - starttime, - endtime, - timestamp_field=rule['timestamp_field'], - to_ts_func=rule['dt_to_ts'], - five=rule['five'], - ) - - request = self.get_msearch_query(query,rule) - - # extra_args = {'_source_include': rule['include']} - # scroll_keepalive = rule.get('scroll_keepalive', self.scroll_keepalive) - # if not rule.get('_source_enabled'): - # if rule['five']: - # query['stored_fields'] = rule['include'] - # else: - # query['fields'] = rule['include'] - # extra_args = {} - - try: - res = self.current_es.msearch(body=request) - res = res['responses'][0] - self.total_hits = int(res['hits']['total']) - - if len(res.get('_shards', {}).get('failures', [])) > 0: - errs = [e['reason']['reason'] for e in res['_shards']['failures'] if 'Failed to parse' in e['reason']['reason']] - if len(errs): - raise ElasticsearchException(errs) - - logging.debug(str(res)) - except ElasticsearchException as e: - # Elasticsearch sometimes gives us GIGANTIC error messages - # (so big that they will fill the entire terminal buffer) - if len(str(e)) > 1024: - e = str(e)[:1024] + '... (%d characters removed)' % (len(str(e)) - 1024) - self.handle_error('Error running query: %s' % (e), {'rule': rule['name'], 'query': query}) - return None - hits = res['hits']['hits'] - self.num_hits += len(hits) - lt = rule.get('use_local_time') - status_log = "Queried rule %s from %s to %s: %s / %s hits" % ( - rule['name'], - pretty_ts(starttime, lt), - pretty_ts(endtime, lt), - self.num_hits, - len(hits) - ) - if self.total_hits > rule.get('max_query_size', self.max_query_size): - elastalert_logger.info("%s (scrolling..)" % status_log) - # rule['scroll_id'] = res['_scroll_id'] - else: - elastalert_logger.info(status_log) - - hits = self.process_hits(rule, hits) - - # Record doc_type for use in get_top_counts - if 'doc_type' not in rule and len(hits): - rule['doc_type'] = hits[0]['_type'] - return hits - - def get_hits_count(self, rule, starttime, endtime, index): - """ Query Elasticsearch for the count of results and returns a list of timestamps - equal to the endtime. This allows the results to be passed to rules which expect - an object for each hit. - - :param rule: The rule configuration dictionary. - :param starttime: The earliest time to query. - :param endtime: The latest time to query. - :return: A dictionary mapping timestamps to number of hits for that time period. - """ - query = self.get_query( - rule['filter'], - starttime, - endtime, - timestamp_field=rule['timestamp_field'], - sort=False, - to_ts_func=rule['dt_to_ts'], - five=rule['five'] - ) - - request = self.get_msearch_query(query,rule) - - try: - res = self.current_es.msearch(body=request) - res = res['responses'][0] - except ElasticsearchException as e: - # Elasticsearch sometimes gives us GIGANTIC error messages - # (so big that they will fill the entire terminal buffer) - if len(str(e)) > 1024: - e = str(e)[:1024] + '... (%d characters removed)' % (len(str(e)) - 1024) - self.handle_error('Error running count query: %s' % (e), {'rule': rule['name'], 'query': query}) - return None - - self.num_hits += res['hits']['total'] - lt = rule.get('use_local_time') - elastalert_logger.info( - "Queried rule %s from %s to %s: %s hits" % (rule['name'], pretty_ts(starttime, lt), pretty_ts(endtime, lt), res['hits']['total']) - ) - return {endtime: res['hits']['total']} - - def get_hits_terms(self, rule, starttime, endtime, index, key, qk=None, size=None): - rule_filter = copy.copy(rule['filter']) - if qk: - qk_list = qk.split(", ") - end = None - if rule['five']: - end = '.keyword' - else: - end = '.raw' - - if len(qk_list) == 1: - qk = qk_list[0] - filter_key = rule['query_key'] - if rule.get('raw_count_keys', True) and not rule['query_key'].endswith(end): - filter_key = add_raw_postfix(filter_key, rule['five']) - rule_filter.extend([{'term': {filter_key: qk}}]) - else: - filter_keys = rule['compound_query_key'] - for i in range(len(filter_keys)): - key_with_postfix = filter_keys[i] - if rule.get('raw_count_keys', True) and not key.endswith(end): - key_with_postfix = add_raw_postfix(key_with_postfix, rule['five']) - rule_filter.extend([{'term': {key_with_postfix: qk_list[i]}}]) - - base_query = self.get_query( - rule_filter, - dt_to_ts_with_format(starttime,"%Y-%m-%dT%H:%M:%S.%f")[:-3]+'Z', - dt_to_ts_with_format(endtime,"%Y-%m-%dT%H:%M:%S.%f")[:-3]+'Z', - timestamp_field=rule['timestamp_field'], - sort=False, - to_ts_func=rule['dt_to_ts'], - five=rule['five'] - ) - if size is None: - size = rule.get('terms_size', 50) - query = self.get_terms_query(base_query, size, key, rule['five']) - request = self.get_msearch_query(query,rule) - try: - res = self.current_es.msearch(body=request) - res = res['responses'][0] - except ElasticsearchException as e: - # Elasticsearch sometimes gives us GIGANTIC error messages - # (so big that they will fill the entire terminal buffer) - if len(str(e)) > 1024: - e = str(e)[:1024] + '... (%d characters removed)' % (len(str(e)) - 1024) - self.handle_error('Error running terms query: %s' % (e), {'rule': rule['name'], 'query': query}) - return None - - if 'aggregations' not in res: - return {} - if not rule['five']: - buckets = res['aggregations']['filtered']['counts']['buckets'] - else: - buckets = res['aggregations']['counts']['buckets'] - self.num_hits += len(buckets) - lt = rule.get('use_local_time') - elastalert_logger.info( - 'Queried rule %s from %s to %s: %s buckets' % (rule['name'], pretty_ts(starttime, lt), pretty_ts(endtime, lt), len(buckets)) - ) - return {endtime: buckets} - - def get_hits_aggregation(self, rule, starttime, endtime, index, query_key, term_size=None): - rule_filter = copy.copy(rule['filter']) - base_query = self.get_query( - rule_filter, - starttime, - endtime, - timestamp_field=rule['timestamp_field'], - sort=False, - to_ts_func=rule['dt_to_ts'], - five=rule['five'] - ) - if term_size is None: - term_size = rule.get('terms_size', 50) - query = self.get_aggregation_query(base_query, rule, query_key, term_size, rule['timestamp_field']) - request = self.get_msearch_query(query,rule) - try: - res = self.current_es.msearch(body=request) - res = res['responses'][0] - except ElasticsearchException as e: - if len(str(e)) > 1024: - e = str(e)[:1024] + '... (%d characters removed)' % (len(str(e)) - 1024) - self.handle_error('Error running query: %s' % (e), {'rule': rule['name']}) - return None - if 'aggregations' not in res: - return {} - if not rule['five']: - payload = res['aggregations']['filtered'] - else: - payload = res['aggregations'] - self.num_hits += res['hits']['total'] - return {endtime: payload} - - def get_error_rate(self, rule, starttime, endtime): - agg_key = '{}({})'.format(rule['total_agg_type'],rule['total_agg_key']) - query = self.get_query_string(rule) - aggregation = {"function": rule['total_agg_type'].upper(), "field": rule['total_agg_key']} - - total_data, total_count = self.get_ch_data(rule, starttime, endtime, agg_key, query, aggregation) - - if total_data is None: - return {} - - if(query): - query = '{} AND {}'.format(query,rule['error_condition']) - else: - query = rule['error_condition'] - - if rule['count_all_errors']: - agg_key = "count()" - aggregation = {"function": "COUNT", "field": "1"} - - - error_data, error_count = self.get_ch_data(rule, starttime, endtime, agg_key, query, aggregation) - - if error_data is None: - return {} - - payload = {'error_count': error_data, 'total_count': total_data, 'start_time': starttime, 'end_time': endtime} - elastalert_logger.info("query start time and endtime %s at %s , error_count %d ,total_count %d" % (starttime, endtime, error_data, total_data)) - - self.num_hits += int(error_count) - - return {endtime: payload} - - def get_query_string(self, rule): - if rule['filter'] and ('query_string' in rule['filter'][0]) and ('query' in rule['filter'][0]['query_string']): - return rule['filter'][0]['query_string']['query'] - return "" - - def get_ch_data(self, rule, starttime, endtime, agg_key, freshquery,aggregation): - data = { - "selects":[], - "start_time":dt_to_ts_with_format(starttime,"%Y-%m-%dT%H:%M:%S.%f")[:-3]+'Z', - "end_time":dt_to_ts_with_format(endtime,"%Y-%m-%dT%H:%M:%S.%f")[:-3]+'Z', - "freshquery": freshquery, - "group_bys":[], - "sort_orders":[{"sort_by": agg_key,"sort_direction":"desc"}], - "aggregations":[aggregation] - } - try: - res = requests.post(self.query_endpoint, json=data) - res.raise_for_status() - except requests.exceptions.RequestException as e: - if len(str(e)) > 1024: - e = str(e)[:1024] + '... (%d characters removed)' % (len(str(e)) - 1024) - self.handle_error('Error running query: %s' % (e), {'rule': rule['name']}) - return None,0 - res = json.loads(res.content) - return int(res['data'][0][agg_key]), res['rows'] - elastalert_logger.info("request data is %s" % json.dumps(data)) - # res = requests.post(self.query_endpoint, json=data) - # return None, None - - def remove_duplicate_events(self, data, rule): - new_events = [] - for event in data: - if event['_id'] in rule['processed_hits']: - continue - - # Remember the new data's IDs - rule['processed_hits'][event['_id']] = lookup_es_key(event, rule['timestamp_field']) - new_events.append(event) - - return new_events - - def remove_old_events(self, rule): - # Anything older than the buffer time we can forget - now = ts_now() - remove = [] - buffer_time = rule.get('buffer_time', self.buffer_time) - if rule.get('query_delay'): - buffer_time += rule['query_delay'] - for _id, timestamp in rule['processed_hits'].iteritems(): - if now - timestamp > buffer_time: - remove.append(_id) - map(rule['processed_hits'].pop, remove) - - def run_query(self, rule, start=None, end=None, scroll=False): - """ Query for the rule and pass all of the results to the RuleType instance. - - :param rule: The rule configuration. - :param start: The earliest time to query. - :param end: The latest time to query. - Returns True on success and False on failure. - """ - if start is None: - start = self.get_index_start(rule['index']) - if end is None: - end = ts_now() - - # Reset hit counter and query - rule_inst = rule['type'] - index = self.get_index(rule, start, end) - if rule.get('use_count_query'): - data = self.get_hits_count(rule, start, end, index) - elif rule.get('use_terms_query'): - data = self.get_hits_terms(rule, start, end, index, rule['query_key']) - elif isinstance(rule_inst, ErrorRateRule): - data = self.get_error_rate(rule, start, end) - elif rule.get('aggregation_query_element'): - data = self.get_hits_aggregation(rule, start, end, index, rule.get('query_key', None)) - else: - data = self.get_hits(rule, start, end, index, scroll) - if data: - old_len = len(data) - data = self.remove_duplicate_events(data, rule) - self.num_dupes += old_len - len(data) - - # There was an exception while querying - if data is None: - return False - elif data: - if rule.get('use_count_query'): - rule_inst.add_count_data(data) - elif rule.get('use_terms_query'): - rule_inst.add_terms_data(data) - elif isinstance(rule_inst, ErrorRateRule): - rule_inst.calculate_err_rate(data) - elif rule.get('aggregation_query_element'): - rule_inst.add_aggregation_data(data) - else: - rule_inst.add_data(data) - - # try: - # if rule.get('scroll_id') and self.num_hits < self.total_hits: - # self.run_query(rule, start, end, scroll=True) - # except RuntimeError: - # # It's possible to scroll far enough to hit max recursive depth - # pass - - # if 'scroll_id' in rule: - # rule.pop('scroll_id') - - return True - - def get_starttime(self, rule): - """ Query ES for the last time we ran this rule. - - :param rule: The rule configuration. - :return: A timestamp or None. - """ - sort = {'sort': {'@timestamp': {'order': 'desc'}}} - query = {'filter': {'term': {'rule_name': '%s' % (rule['name'])}}} - if self.is_atleastfive(): - query = {'query': {'bool': query}} - query.update(sort) - - try: - index = self.get_six_index('elastalert_status') - res = self.writeback_es.search(index=index, doc_type='elastalert_status', - size=1, body=query, _source_include=['endtime', 'rule_name']) - if res['hits']['hits']: - endtime = ts_to_dt(res['hits']['hits'][0]['_source']['endtime']) - - if ts_now() - endtime < self.old_query_limit: - return endtime - else: - elastalert_logger.info("Found expired previous run for %s at %s" % (rule['name'], endtime)) - return None - except (ElasticsearchException, KeyError) as e: - self.handle_error('Error querying for last run: %s' % (e), {'rule': rule['name']}) - - def set_starttime(self, rule, endtime): - """ Given a rule and an endtime, sets the appropriate starttime for it. """ - - # This means we are starting fresh - if 'starttime' not in rule: - if not rule.get('scan_entire_timeframe'): - # Try to get the last run from Elasticsearch - last_run_end = self.get_starttime(rule) - if last_run_end: - rule['starttime'] = last_run_end - self.adjust_start_time_for_overlapping_agg_query(rule) - self.adjust_start_time_for_interval_sync(rule, endtime) - rule['minimum_starttime'] = rule['starttime'] - return None - - # Use buffer for normal queries, or run_every increments otherwise - # or, if scan_entire_timeframe, use timeframe - - if not rule.get('use_count_query') and not rule.get('use_terms_query'): - if not rule.get('scan_entire_timeframe'): - buffer_time = rule.get('buffer_time', self.buffer_time) - buffer_delta = endtime - buffer_time - else: - buffer_delta = endtime - rule['timeframe'] - # If we started using a previous run, don't go past that - if 'minimum_starttime' in rule and rule['minimum_starttime'] > buffer_delta: - rule['starttime'] = rule['minimum_starttime'] - # If buffer_time doesn't bring us past the previous endtime, use that instead - elif 'previous_endtime' in rule: - if rule['previous_endtime'] < buffer_delta: - rule['starttime'] = rule['previous_endtime'] - self.adjust_start_time_for_overlapping_agg_query(rule) - else: - rule['starttime'] = buffer_delta - - self.adjust_start_time_for_interval_sync(rule, endtime) - - else: - if not rule.get('scan_entire_timeframe'): - # Query from the end of the last run, if it exists, otherwise a run_every sized window - rule['starttime'] = rule.get('previous_endtime', endtime - self.run_every) - else: - rule['starttime'] = rule.get('previous_endtime', endtime - rule['timeframe']) - - def adjust_start_time_for_overlapping_agg_query(self, rule): - if rule.get('aggregation_query_element'): - if rule.get('allow_buffer_time_overlap') and not rule.get('use_run_every_query_size') and ( - rule['buffer_time'] > rule['run_every']): - rule['starttime'] = rule['starttime'] - (rule['buffer_time'] - rule['run_every']) - rule['original_starttime'] = rule['starttime'] - - def adjust_start_time_for_interval_sync(self, rule, endtime): - # If aggregation query adjust bucket offset - if rule.get('aggregation_query_element'): - - if rule.get('bucket_interval'): - es_interval_delta = rule.get('bucket_interval_timedelta') - unix_starttime = dt_to_unix(rule['starttime']) - es_interval_delta_in_sec = total_seconds(es_interval_delta) - offset = int(unix_starttime % es_interval_delta_in_sec) - - if rule.get('sync_bucket_interval'): - rule['starttime'] = unix_to_dt(unix_starttime - offset) - endtime = unix_to_dt(dt_to_unix(endtime) - offset) - else: - rule['bucket_offset_delta'] = offset - - def get_segment_size(self, rule): - """ The segment size is either buffer_size for queries which can overlap or run_every for queries - which must be strictly separate. This mimicks the query size for when ElastAlert is running continuously. """ - if not rule.get('use_count_query') and not rule.get('use_terms_query') and not rule.get('aggregation_query_element'): - return rule.get('buffer_time', self.buffer_time) - elif rule.get('aggregation_query_element'): - if rule.get('use_run_every_query_size'): - return self.run_every - else: - return rule.get('buffer_time', self.buffer_time) - else: - return self.run_every - - def get_query_key_value(self, rule, match): - # get the value for the match's query_key (or none) to form the key used for the silence_cache. - # Flatline ruletype sets "key" instead of the actual query_key - if isinstance(rule['type'], FlatlineRule) and 'key' in match: - return unicode(match['key']) - return self.get_named_key_value(rule, match, 'query_key') - - def get_aggregation_key_value(self, rule, match): - # get the value for the match's aggregation_key (or none) to form the key used for grouped aggregates. - return self.get_named_key_value(rule, match, 'aggregation_key') - - def get_named_key_value(self, rule, match, key_name): - # search the match for the key specified in the rule to get the value - if key_name in rule: - try: - key_value = lookup_es_key(match, rule[key_name]) - if key_value is not None: - # Only do the unicode conversion if we actually found something) - # otherwise we might transform None --> 'None' - key_value = unicode(key_value) - except KeyError: - # Some matches may not have the specified key - # use a special token for these - key_value = '_missing' - else: - key_value = None - - return key_value - - def enhance_filter(self, rule): - """ If there is a blacklist or whitelist in rule then we add it to the filter. - It adds it as a query_string. If there is already an query string its is appended - with blacklist or whitelist. - - :param rule: - :return: - """ - if not rule.get('filter_by_list', True): - return - if 'blacklist' in rule: - listname = 'blacklist' - elif 'whitelist' in rule: - listname = 'whitelist' - else: - return - - filters = rule['filter'] - additional_terms = [(rule['compare_key'] + ':"' + term + '"') for term in rule[listname]] - if listname == 'whitelist': - query = "NOT " + " AND NOT ".join(additional_terms) - else: - query = " OR ".join(additional_terms) - query_str_filter = {'query_string': {'query': query}} - if self.is_atleastfive(): - filters.append(query_str_filter) - else: - filters.append({'query': query_str_filter}) - logging.debug("Enhanced filter with {} terms: {}".format(listname, str(query_str_filter))) - - def run_rule(self, rule, endtime, starttime=None): - """ Run a rule for a given time period, including querying and alerting on results. - - :param rule: The rule configuration. - :param starttime: The earliest timestamp to query. - :param endtime: The latest timestamp to query. - :return: The number of matches that the rule produced. - """ - run_start = time.time() - - self.current_es = kibana_adapter_client(rule) - self.current_es_addr = (rule['es_host'], rule['es_port']) - - # If there are pending aggregate matches, try processing them - for x in range(len(rule['agg_matches'])): - match = rule['agg_matches'].pop() - self.add_aggregated_alert(match, rule) - - # Start from provided time if it's given - if starttime: - rule['starttime'] = starttime - else: - self.set_starttime(rule, endtime) - - rule['original_starttime'] = rule['starttime'] - - # Don't run if starttime was set to the future - if ts_now() <= rule['starttime']: - logging.warning("Attempted to use query start time in the future (%s), sleeping instead" % (starttime)) - return 0 - - # Run the rule. If querying over a large time period, split it up into segments - self.num_hits = 0 - self.num_dupes = 0 - self.cumulative_hits = 0 - segment_size = self.get_segment_size(rule) - - tmp_endtime = rule['starttime'] - - while endtime - rule['starttime'] > segment_size: - tmp_endtime = tmp_endtime + segment_size - if not self.run_query(rule, rule['starttime'], tmp_endtime): - return 0 - self.cumulative_hits += self.num_hits - self.num_hits = 0 - rule['starttime'] = tmp_endtime - rule['type'].garbage_collect(tmp_endtime) - - if rule.get('aggregation_query_element'): - if endtime - tmp_endtime == segment_size: - self.run_query(rule, tmp_endtime, endtime) - self.cumulative_hits += self.num_hits - elif total_seconds(rule['original_starttime'] - tmp_endtime) == 0: - rule['starttime'] = rule['original_starttime'] - return 0 - else: - endtime = tmp_endtime - else: - if not self.run_query(rule, rule['starttime'], endtime): - return 0 - self.cumulative_hits += self.num_hits - rule['type'].garbage_collect(endtime) - - # Process any new matches - num_matches = len(rule['type'].matches) - while rule['type'].matches: - match = rule['type'].matches.pop(0) - match['num_hits'] = self.cumulative_hits - match['num_matches'] = num_matches - - # If realert is set, silence the rule for that duration - # Silence is cached by query_key, if it exists - # Default realert time is 0 seconds - silence_cache_key = rule['name'] - query_key_value = self.get_query_key_value(rule, match) - if query_key_value is not None: - silence_cache_key += '.' + query_key_value - - if self.is_silenced(rule['name'] + "._silence") or self.is_silenced(silence_cache_key): - elastalert_logger.info('Ignoring match for silenced rule %s' % (silence_cache_key,)) - continue - - if rule['realert']: - next_alert, exponent = self.next_alert_time(rule, silence_cache_key, ts_now()) - self.set_realert(silence_cache_key, next_alert, exponent) - - if rule.get('run_enhancements_first'): - try: - for enhancement in rule['match_enhancements']: - try: - enhancement.process(match) - except EAException as e: - self.handle_error("Error running match enhancement: %s" % (e), {'rule': rule['name']}) - except DropMatchException: - continue - - # If no aggregation, alert immediately - if not rule['aggregation']: - self.alert([match], rule) - continue - - # Add it as an aggregated match - self.add_aggregated_alert(match, rule) - - # Mark this endtime for next run's start - rule['previous_endtime'] = endtime - - time_taken = time.time() - run_start - # Write to ES that we've run this rule against this time period - body = {'rule_name': rule['name'], - 'endtime': endtime, - 'starttime': rule['original_starttime'], - 'matches': num_matches, - 'hits': max(self.num_hits, self.cumulative_hits), - '@timestamp': ts_now(), - 'time_taken': time_taken} - self.writeback('elastalert_status', body) - - return num_matches - - def init_rule(self, new_rule, new=True): - ''' Copies some necessary non-config state from an exiting rule to a new rule. ''' - try: - self.modify_rule_for_ES5(new_rule) - except TransportError as e: - elastalert_logger.warning('Error connecting to Elasticsearch for rule {}. ' - 'The rule has been disabled.'.format(new_rule['name'])) - self.send_notification_email(exception=e, rule=new_rule) - return False - - self.enhance_filter(new_rule) - - # Change top_count_keys to .raw - if 'top_count_keys' in new_rule and new_rule.get('raw_count_keys', True): - if self.string_multi_field_name: - string_multi_field_name = self.string_multi_field_name - elif self.is_atleastfive(): - string_multi_field_name = '.keyword' - else: - string_multi_field_name = '.raw' - - for i, key in enumerate(new_rule['top_count_keys']): - if not key.endswith(string_multi_field_name): - new_rule['top_count_keys'][i] += string_multi_field_name - - if 'download_dashboard' in new_rule['filter']: - # Download filters from Kibana and set the rules filters to them - db_filters = self.filters_from_kibana(new_rule, new_rule['filter']['download_dashboard']) - if db_filters is not None: - new_rule['filter'] = db_filters - else: - raise EAException("Could not download filters from %s" % (new_rule['filter']['download_dashboard'])) - - blank_rule = {'agg_matches': [], - 'aggregate_alert_time': {}, - 'current_aggregate_id': {}, - 'processed_hits': {}} - rule = blank_rule - - # Set rule to either a blank template or existing rule with same name - if not new: - for rule in self.rules: - if rule['name'] == new_rule['name']: - break - else: - rule = blank_rule - - copy_properties = ['agg_matches', - 'current_aggregate_id', - 'aggregate_alert_time', - 'processed_hits', - 'starttime', - 'minimum_starttime'] - for prop in copy_properties: - if prop not in rule: - continue - new_rule[prop] = rule[prop] - - return new_rule - - @staticmethod - def modify_rule_for_ES5(new_rule): - new_rule['five'] = True - # return - # Get ES version per rule - # rule_es = elasticsearch_client(new_rule) - # if int(rule_es.info()['version']['number'].split(".")[0]) >= 5: - # new_rule['five'] = True - # else: - # new_rule['five'] = False - # return - - # In ES5, filters starting with 'query' should have the top wrapper removed - new_filters = [] - for es_filter in new_rule.get('filter', []): - if es_filter.get('query'): - new_filters.append(es_filter['query']) - else: - new_filters.append(es_filter) - new_rule['filter'] = new_filters - - def load_rule_changes(self): - ''' Using the modification times of rule config files, syncs the running rules - to match the files in rules_folder by removing, adding or reloading rules. ''' - new_rule_hashes = get_rule_hashes(self.conf, self.args.rule) - - # Check each current rule for changes - for rule_file, hash_value in self.rule_hashes.iteritems(): - if rule_file not in new_rule_hashes: - # Rule file was deleted - elastalert_logger.info('Rule file %s not found, stopping rule execution' % (rule_file)) - self.rules = [rule for rule in self.rules if rule['rule_file'] != rule_file] - continue - if hash_value != new_rule_hashes[rule_file]: - # Rule file was changed, reload rule - try: - new_rule = load_configuration(rule_file, self.conf) - if 'is_enabled' in new_rule and not new_rule['is_enabled']: - elastalert_logger.info('Rule file %s is now disabled.' % (rule_file)) - # Remove this rule if it's been disabled - self.rules = [rule for rule in self.rules if rule['rule_file'] != rule_file] - continue - except EAException as e: - message = 'Could not load rule %s: %s' % (rule_file, e) - self.handle_error(message) - # Want to send email to address specified in the rule. Try and load the YAML to find it. - with open(rule_file) as f: - try: - rule_yaml = yaml.load(f) - except yaml.scanner.ScannerError: - self.send_notification_email(exception=e) - continue - - self.send_notification_email(exception=e, rule=rule_yaml) - continue - elastalert_logger.info("Reloading configuration for rule %s" % (rule_file)) - - # Re-enable if rule had been disabled - for disabled_rule in self.disabled_rules: - if disabled_rule['name'] == new_rule['name']: - self.rules.append(disabled_rule) - self.disabled_rules.remove(disabled_rule) - break - - # Initialize the rule that matches rule_file - new_rule = self.init_rule(new_rule, False) - self.rules = [rule for rule in self.rules if rule['rule_file'] != rule_file] - if new_rule: - self.rules.append(new_rule) - - # Load new rules - if not self.args.rule: - for rule_file in set(new_rule_hashes.keys()) - set(self.rule_hashes.keys()): - try: - new_rule = load_configuration(rule_file, self.conf) - if 'is_enabled' in new_rule and not new_rule['is_enabled']: - continue - if new_rule['name'] in [rule['name'] for rule in self.rules]: - raise EAException("A rule with the name %s already exists" % (new_rule['name'])) - except EAException as e: - self.handle_error('Could not load rule %s: %s' % (rule_file, e)) - self.send_notification_email(exception=e, rule_file=rule_file) - continue - if self.init_rule(new_rule): - elastalert_logger.info('Loaded new rule %s' % (rule_file)) - self.rules.append(new_rule) - - self.rule_hashes = new_rule_hashes - - def start(self): - """ Periodically go through each rule and run it """ - if self.starttime: - if self.starttime == 'NOW': - self.starttime = ts_now() - else: - try: - self.starttime = ts_to_dt(self.starttime) - except (TypeError, ValueError): - self.handle_error("%s is not a valid ISO8601 timestamp (YYYY-MM-DDTHH:MM:SS+XX:00)" % (self.starttime)) - exit(1) - self.wait_until_responsive(timeout=self.args.timeout) - self.running = True - elastalert_logger.info("Starting up") - while self.running: - next_run = datetime.datetime.utcnow() + self.run_every - - self.run_all_rules() - - # Quit after end_time has been reached - if self.args.end: - endtime = ts_to_dt(self.args.end) - - if next_run.replace(tzinfo=dateutil.tz.tzutc()) > endtime: - exit(0) - - if next_run < datetime.datetime.utcnow(): - continue - - # Wait before querying again - sleep_duration = total_seconds(next_run - datetime.datetime.utcnow()) - self.sleep_for(sleep_duration) - - def wait_until_responsive(self, timeout, clock=timeit.default_timer): - """Wait until ElasticSearch becomes responsive (or too much time passes).""" - - # Elapsed time is a floating point number of seconds. - timeout = timeout.total_seconds() - - # Don't poll unless we're asked to. - if timeout <= 0.0: - return - - # Periodically poll ElasticSearch. Keep going until ElasticSearch is - # responsive *and* the writeback index exists. - ref = clock() - while (clock() - ref) < timeout: - try: - if self.writeback_es.indices.exists(self.writeback_index): - return - except ConnectionError: - pass - time.sleep(1.0) - - if self.writeback_es.ping(): - logging.error( - 'Writeback index "%s" does not exist, did you run `elastalert-create-index`?', - self.writeback_index, - ) - else: - logging.error( - 'Could not reach ElasticSearch at "%s:%d".', - self.conf['es_host'], - self.conf['es_port'], - ) - exit(1) - - def run_all_rules(self): - """ Run each rule one time """ - self.send_pending_alerts() - - next_run = datetime.datetime.utcnow() + self.run_every - - for rule in self.rules: - # Set endtime based on the rule's delay - delay = rule.get('query_delay') - if hasattr(self.args, 'end') and self.args.end: - endtime = ts_to_dt(self.args.end) - elif delay: - endtime = ts_now() - delay - else: - endtime = ts_now() - - try: - num_matches = self.run_rule(rule, endtime, self.starttime) - except EAException as e: - self.handle_error("Error running rule %s: %s" % (rule['name'], e), {'rule': rule['name']}) - except Exception as e: - self.handle_uncaught_exception(e, rule) - else: - old_starttime = pretty_ts(rule.get('original_starttime'), rule.get('use_local_time')) - total_hits = max(self.num_hits, self.cumulative_hits) - elastalert_logger.info("Ran %s from %s to %s: %s query hits (%s already seen), %s matches," - " %s alerts sent" % (rule['name'], old_starttime, pretty_ts(endtime, rule.get('use_local_time')), - total_hits, self.num_dupes, num_matches, self.alerts_sent)) - self.alerts_sent = 0 - - if next_run < datetime.datetime.utcnow(): - # We were processing for longer than our refresh interval - # This can happen if --start was specified with a large time period - # or if we are running too slow to process events in real time. - logging.warning( - "Querying from %s to %s took longer than %s!" % ( - old_starttime, - pretty_ts(endtime, rule.get('use_local_time')), - self.run_every - ) - ) - - self.remove_old_events(rule) - - # Only force starttime once - self.starttime = None - - if not self.args.pin_rules: - self.load_rule_changes() - - def stop(self): - """ Stop an ElastAlert runner that's been started """ - self.running = False - - def sleep_for(self, duration): - """ Sleep for a set duration """ - elastalert_logger.info("Sleeping for %s seconds" % (duration)) - time.sleep(duration) - - def generate_kibana4_db(self, rule, match): - ''' Creates a link for a kibana4 dashboard which has time set to the match. ''' - db_name = rule.get('use_kibana4_dashboard') - start = ts_add( - lookup_es_key(match, rule['timestamp_field']), - -rule.get('kibana4_start_timedelta', rule.get('timeframe', datetime.timedelta(minutes=10))) - ) - end = ts_add( - lookup_es_key(match, rule['timestamp_field']), - rule.get('kibana4_end_timedelta', rule.get('timeframe', datetime.timedelta(minutes=10))) - ) - return kibana.kibana4_dashboard_link(db_name, start, end) - - def generate_kibana_db(self, rule, match): - ''' Uses a template dashboard to upload a temp dashboard showing the match. - Returns the url to the dashboard. ''' - db = copy.deepcopy(kibana.dashboard_temp) - - # Set timestamp fields to match our rule especially if - # we have configured something other than @timestamp - kibana.set_timestamp_field(db, rule['timestamp_field']) - - # Set filters - for filter in rule['filter']: - if filter: - kibana.add_filter(db, filter) - kibana.set_included_fields(db, rule['include']) - - # Set index - index = self.get_index(rule) - kibana.set_index_name(db, index) - - return self.upload_dashboard(db, rule, match) - - def upload_dashboard(self, db, rule, match): - ''' Uploads a dashboard schema to the kibana-int Elasticsearch index associated with rule. - Returns the url to the dashboard. ''' - # Set time range - start = ts_add(lookup_es_key(match, rule['timestamp_field']), -rule.get('timeframe', datetime.timedelta(minutes=10))) - end = ts_add(lookup_es_key(match, rule['timestamp_field']), datetime.timedelta(minutes=10)) - kibana.set_time(db, start, end) - - # Set dashboard name - db_name = 'ElastAlert - %s - %s' % (rule['name'], end) - kibana.set_name(db, db_name) - - # Add filter for query_key value - if 'query_key' in rule: - for qk in rule.get('compound_query_key', [rule['query_key']]): - if qk in match: - term = {'term': {qk: match[qk]}} - kibana.add_filter(db, term) - - # Add filter for aggregation_key value - if 'aggregation_key' in rule: - for qk in rule.get('compound_aggregation_key', [rule['aggregation_key']]): - if qk in match: - term = {'term': {qk: match[qk]}} - kibana.add_filter(db, term) - - # Convert to json - db_js = json.dumps(db) - db_body = {'user': 'guest', - 'group': 'guest', - 'title': db_name, - 'dashboard': db_js} - - # Upload - es = elasticsearch_client(rule) - - res = es.index(index='kibana-int', - doc_type='temp', - body=db_body) - - # Return dashboard URL - kibana_url = rule.get('kibana_url') - if not kibana_url: - kibana_url = 'http://%s:%s/_plugin/kibana/' % (rule['es_host'], - rule['es_port']) - return kibana_url + '#/dashboard/temp/%s' % (res['_id']) - - def get_dashboard(self, rule, db_name): - """ Download dashboard which matches use_kibana_dashboard from Elasticsearch. """ - es = elasticsearch_client(rule) - if not db_name: - raise EAException("use_kibana_dashboard undefined") - query = {'query': {'term': {'_id': db_name}}} - try: - res = es.search(index='kibana-int', doc_type='dashboard', body=query, _source_include=['dashboard']) - except ElasticsearchException as e: - raise EAException("Error querying for dashboard: %s" % (e)), None, sys.exc_info()[2] - - if res['hits']['hits']: - return json.loads(res['hits']['hits'][0]['_source']['dashboard']) - else: - raise EAException("Could not find dashboard named %s" % (db_name)) - - def use_kibana_link(self, rule, match): - """ Uploads an existing dashboard as a temp dashboard modified for match time. - Returns the url to the dashboard. """ - # Download or get cached dashboard - dashboard = rule.get('dashboard_schema') - if not dashboard: - db_name = rule.get('use_kibana_dashboard') - dashboard = self.get_dashboard(rule, db_name) - if dashboard: - rule['dashboard_schema'] = dashboard - else: - return None - dashboard = copy.deepcopy(dashboard) - return self.upload_dashboard(dashboard, rule, match) - - def filters_from_kibana(self, rule, db_name): - """ Downloads a dashboard from Kibana and returns corresponding filters, None on error. """ - try: - db = rule.get('dashboard_schema') - if not db: - db = self.get_dashboard(rule, db_name) - filters = kibana.filters_from_dashboard(db) - except EAException: - return None - return filters - - def alert(self, matches, rule, alert_time=None, retried=False): - """ Wraps alerting, Kibana linking and enhancements in an exception handler """ - try: - return self.send_alert(matches, rule, alert_time=alert_time, retried=retried) - except Exception as e: - self.handle_uncaught_exception(e, rule) - - def send_alert(self, matches, rule, alert_time=None, retried=False): - """ Send out an alert. - - :param matches: A list of matches. - :param rule: A rule configuration. - """ - if not matches: - return - - if alert_time is None: - alert_time = ts_now() - - # Compute top count keys - if rule.get('top_count_keys'): - for match in matches: - if 'query_key' in rule and rule['query_key'] in match: - qk = match[rule['query_key']] - else: - qk = None - - if isinstance(rule['type'], FlatlineRule): - # flatline rule triggers when there have been no events from now()-timeframe to now(), - # so using now()-timeframe will return no results. for now we can just mutliple the timeframe - # by 2, but this could probably be timeframe+run_every to prevent too large of a lookup? - timeframe = datetime.timedelta(seconds=2 * rule.get('timeframe').total_seconds()) - else: - timeframe = rule.get('timeframe', datetime.timedelta(minutes=10)) - - start = ts_to_dt(lookup_es_key(match, rule['timestamp_field'])) - timeframe - end = ts_to_dt(lookup_es_key(match, rule['timestamp_field'])) + datetime.timedelta(minutes=10) - keys = rule.get('top_count_keys') - counts = self.get_top_counts(rule, start, end, keys, qk=qk) - match.update(counts) - - # Generate a kibana3 dashboard for the first match - if rule.get('generate_kibana_link') or rule.get('use_kibana_dashboard'): - try: - if rule.get('generate_kibana_link'): - kb_link = self.generate_kibana_db(rule, matches[0]) - else: - kb_link = self.use_kibana_link(rule, matches[0]) - except EAException as e: - self.handle_error("Could not generate Kibana dash for %s match: %s" % (rule['name'], e)) - else: - if kb_link: - matches[0]['kibana_link'] = kb_link - - if rule.get('use_kibana4_dashboard'): - kb_link = self.generate_kibana4_db(rule, matches[0]) - if kb_link: - matches[0]['kibana_link'] = kb_link - - # Enhancements were already run at match time if - # run_enhancements_first is set or - # retried==True, which means this is a retry of a failed alert - if not rule.get('run_enhancements_first') and not retried: - for enhancement in rule['match_enhancements']: - valid_matches = [] - for match in matches: - try: - enhancement.process(match) - valid_matches.append(match) - except DropMatchException as e: - pass - except EAException as e: - self.handle_error("Error running match enhancement: %s" % (e), {'rule': rule['name']}) - matches = valid_matches - if not matches: - return None - - # Don't send real alerts in debug mode - if self.debug: - alerter = DebugAlerter(rule) - alerter.alert(matches) - return None - - # Run the alerts - alert_sent = False - alert_exception = None - # Alert.pipeline is a single object shared between every alerter - # This allows alerters to pass objects and data between themselves - alert_pipeline = {"alert_time": alert_time} - for alert in rule['alert']: - alert.pipeline = alert_pipeline - try: - alert.alert(matches) - except EAException as e: - self.handle_error('Error while running alert %s: %s' % (alert.get_info()['type'], e), {'rule': rule['name']}) - alert_exception = str(e) - else: - self.alerts_sent += 1 - alert_sent = True - - # Write the alert(s) to ES - agg_id = None - for match in matches: - alert_body = self.get_alert_body(match, rule, alert_sent, alert_time, alert_exception) - # Set all matches to aggregate together - if agg_id: - alert_body['aggregate_id'] = agg_id - res = self.writeback('elastalert', alert_body) - if res and not agg_id: - agg_id = res['_id'] - - def get_alert_body(self, match, rule, alert_sent, alert_time, alert_exception=None): - body = { - 'match_body': match, - 'rule_name': rule['name'], - 'alert_info': rule['alert'][0].get_info() if not self.debug else {}, - 'alert_sent': alert_sent, - 'alert_time': alert_time - } - - match_time = lookup_es_key(match, rule['timestamp_field']) - if match_time is not None: - body['match_time'] = match_time - - # TODO record info about multiple alerts - - # If the alert failed to send, record the exception - if not alert_sent: - body['alert_exception'] = alert_exception - return body - - def writeback(self, doc_type, body): - writeback_index = self.writeback_index - if(self.is_atleastsix()): - writeback_index = self.get_six_index(doc_type) - - # ES 2.0 - 2.3 does not support dots in field names. - if self.replace_dots_in_field_names: - writeback_body = replace_dots_in_field_names(body) - else: - writeback_body = body - - for key in writeback_body.keys(): - # Convert any datetime objects to timestamps - if isinstance(writeback_body[key], datetime.datetime): - writeback_body[key] = dt_to_ts(writeback_body[key]) - - if self.debug: - elastalert_logger.info("Skipping writing to ES: %s" % (writeback_body)) - return None - - if '@timestamp' not in writeback_body: - writeback_body['@timestamp'] = dt_to_ts(ts_now()) - - try: - res = self.writeback_es.index(index=writeback_index, - doc_type=doc_type, body=body) - return res - except ElasticsearchException as e: - logging.exception("Error writing alert info to Elasticsearch: %s" % (e)) - - def find_recent_pending_alerts(self, time_limit): - """ Queries writeback_es to find alerts that did not send - and are newer than time_limit """ - - # XXX only fetches 1000 results. If limit is reached, next loop will catch them - # unless there is constantly more than 1000 alerts to send. - - # Fetch recent, unsent alerts that aren't part of an aggregate, earlier alerts first. - inner_query = {'query_string': {'query': '!_exists_:aggregate_id AND alert_sent:false'}} - time_filter = {'range': {'alert_time': {'from': dt_to_ts(ts_now() - time_limit), - 'to': dt_to_ts(ts_now())}}} - sort = {'sort': {'alert_time': {'order': 'asc'}}} - if self.is_atleastfive(): - query = {'query': {'bool': {'must': inner_query, 'filter': time_filter}}} - else: - query = {'query': inner_query, 'filter': time_filter} - query.update(sort) - try: - res = self.writeback_es.search(index=self.writeback_index, - doc_type='elastalert', - body=query, - size=1000) - if res['hits']['hits']: - return res['hits']['hits'] - except ElasticsearchException as e: - logging.exception("Error finding recent pending alerts: %s %s" % (e, query)) - return [] - - def send_pending_alerts(self): - pending_alerts = self.find_recent_pending_alerts(self.alert_time_limit) - for alert in pending_alerts: - _id = alert['_id'] - alert = alert['_source'] - try: - rule_name = alert.pop('rule_name') - alert_time = alert.pop('alert_time') - match_body = alert.pop('match_body') - except KeyError: - # Malformed alert, drop it - continue - - # Find original rule - for rule in self.rules: - if rule['name'] == rule_name: - break - else: - # Original rule is missing, keep alert for later if rule reappears - continue - - # Set current_es for top_count_keys query - self.current_es = elasticsearch_client(rule) - self.current_es_addr = (rule['es_host'], rule['es_port']) - - # Send the alert unless it's a future alert - if ts_now() > ts_to_dt(alert_time): - aggregated_matches = self.get_aggregated_matches(_id) - if aggregated_matches: - matches = [match_body] + [agg_match['match_body'] for agg_match in aggregated_matches] - self.alert(matches, rule, alert_time=alert_time) - else: - # If this rule isn't using aggregation, this must be a retry of a failed alert - retried = False - if not rule.get('aggregation'): - retried = True - self.alert([match_body], rule, alert_time=alert_time, retried=retried) - - if rule['current_aggregate_id']: - for qk, agg_id in rule['current_aggregate_id'].iteritems(): - if agg_id == _id: - rule['current_aggregate_id'].pop(qk) - break - - # Delete it from the index - try: - self.writeback_es.delete(index=self.writeback_index, - doc_type='elastalert', - id=_id) - except ElasticsearchException: # TODO: Give this a more relevant exception, try:except: is evil. - self.handle_error("Failed to delete alert %s at %s" % (_id, alert_time)) - - # Send in memory aggregated alerts - for rule in self.rules: - if rule['agg_matches']: - for aggregation_key_value, aggregate_alert_time in rule['aggregate_alert_time'].iteritems(): - if ts_now() > aggregate_alert_time: - alertable_matches = [ - agg_match - for agg_match - in rule['agg_matches'] - if self.get_aggregation_key_value(rule, agg_match) == aggregation_key_value - ] - self.alert(alertable_matches, rule) - rule['agg_matches'] = [ - agg_match - for agg_match - in rule['agg_matches'] - if self.get_aggregation_key_value(rule, agg_match) != aggregation_key_value - ] - - def get_aggregated_matches(self, _id): - """ Removes and returns all matches from writeback_es that have aggregate_id == _id """ - - # XXX if there are more than self.max_aggregation matches, you have big alerts and we will leave entries in ES. - query = {'query': {'query_string': {'query': 'aggregate_id:%s' % (_id)}}, 'sort': {'@timestamp': 'asc'}} - matches = [] - try: - res = self.writeback_es.search(index=self.writeback_index, - doc_type='elastalert', - body=query, - size=self.max_aggregation) - for match in res['hits']['hits']: - matches.append(match['_source']) - self.writeback_es.delete(index=self.writeback_index, - doc_type='elastalert', - id=match['_id']) - except (KeyError, ElasticsearchException) as e: - self.handle_error("Error fetching aggregated matches: %s" % (e), {'id': _id}) - return matches - - def find_pending_aggregate_alert(self, rule, aggregation_key_value=None): - query = {'filter': {'bool': {'must': [{'term': {'rule_name': rule['name']}}, - {'range': {'alert_time': {'gt': ts_now()}}}, - {'term': {'alert_sent': 'false'}}], - 'must_not': [{'exists': {'field': 'aggregate_id'}}]}}} - if aggregation_key_value: - query['filter']['bool']['must'].append({'term': {'aggregation_key': aggregation_key_value}}) - if self.is_atleastfive(): - query = {'query': {'bool': query}} - query['sort'] = {'alert_time': {'order': 'desc'}} - try: - res = self.writeback_es.search(index=self.writeback_index, - doc_type='elastalert', - body=query, - size=1) - if len(res['hits']['hits']) == 0: - return None - except (KeyError, ElasticsearchException) as e: - self.handle_error("Error searching for pending aggregated matches: %s" % (e), {'rule_name': rule['name']}) - return None - - return res['hits']['hits'][0] - - def add_aggregated_alert(self, match, rule): - """ Save a match as a pending aggregate alert to Elasticsearch. """ - - # Optionally include the 'aggregation_key' as a dimension for aggregations - aggregation_key_value = self.get_aggregation_key_value(rule, match) - - if (not rule['current_aggregate_id'].get(aggregation_key_value) or - ('aggregate_alert_time' in rule and aggregation_key_value in rule['aggregate_alert_time'] and rule[ - 'aggregate_alert_time'].get(aggregation_key_value) < ts_to_dt(lookup_es_key(match, rule['timestamp_field'])))): - - # ElastAlert may have restarted while pending alerts exist - pending_alert = self.find_pending_aggregate_alert(rule, aggregation_key_value) - if pending_alert: - alert_time = ts_to_dt(pending_alert['_source']['alert_time']) - rule['aggregate_alert_time'][aggregation_key_value] = alert_time - agg_id = pending_alert['_id'] - rule['current_aggregate_id'] = {aggregation_key_value: agg_id} - elastalert_logger.info( - 'Adding alert for %s to aggregation(id: %s, aggregation_key: %s), next alert at %s' % ( - rule['name'], - agg_id, - aggregation_key_value, - alert_time - ) - ) - else: - # First match, set alert_time - alert_time = '' - if isinstance(rule['aggregation'], dict) and rule['aggregation'].get('schedule'): - croniter._datetime_to_timestamp = cronite_datetime_to_timestamp # For Python 2.6 compatibility - try: - iter = croniter(rule['aggregation']['schedule'], ts_now()) - alert_time = unix_to_dt(iter.get_next()) - except Exception as e: - self.handle_error("Error parsing aggregate send time Cron format %s" % (e), rule['aggregation']['schedule']) - else: - if rule.get('aggregate_by_match_time', False): - match_time = ts_to_dt(lookup_es_key(match, rule['timestamp_field'])) - alert_time = match_time + rule['aggregation'] - else: - alert_time = ts_now() + rule['aggregation'] - - rule['aggregate_alert_time'][aggregation_key_value] = alert_time - agg_id = None - elastalert_logger.info( - 'New aggregation for %s, aggregation_key: %s. next alert at %s.' % (rule['name'], aggregation_key_value, alert_time) - ) - else: - # Already pending aggregation, use existing alert_time - alert_time = rule['aggregate_alert_time'].get(aggregation_key_value) - agg_id = rule['current_aggregate_id'].get(aggregation_key_value) - elastalert_logger.info( - 'Adding alert for %s to aggregation(id: %s, aggregation_key: %s), next alert at %s' % ( - rule['name'], - agg_id, - aggregation_key_value, - alert_time - ) - ) - - alert_body = self.get_alert_body(match, rule, False, alert_time) - if agg_id: - alert_body['aggregate_id'] = agg_id - if aggregation_key_value: - alert_body['aggregation_key'] = aggregation_key_value - res = self.writeback('elastalert', alert_body) - - # If new aggregation, save _id - if res and not agg_id: - rule['current_aggregate_id'][aggregation_key_value] = res['_id'] - - # Couldn't write the match to ES, save it in memory for now - if not res: - rule['agg_matches'].append(match) - - return res - - def silence(self, silence_cache_key=None): - """ Silence an alert for a period of time. --silence and --rule must be passed as args. """ - if self.debug: - logging.error('--silence not compatible with --debug') - exit(1) - - if not self.args.rule: - logging.error('--silence must be used with --rule') - exit(1) - - # With --rule, self.rules will only contain that specific rule - if not silence_cache_key: - silence_cache_key = self.rules[0]['name'] + "._silence" - - try: - silence_ts = parse_deadline(self.args.silence) - except (ValueError, TypeError): - logging.error('%s is not a valid time period' % (self.args.silence)) - exit(1) - - if not self.set_realert(silence_cache_key, silence_ts, 0): - logging.error('Failed to save silence command to Elasticsearch') - exit(1) - - elastalert_logger.info('Success. %s will be silenced until %s' % (silence_cache_key, silence_ts)) - - def set_realert(self, silence_cache_key, timestamp, exponent): - """ Write a silence to Elasticsearch for silence_cache_key until timestamp. """ - body = {'exponent': exponent, - 'rule_name': silence_cache_key, - '@timestamp': ts_now(), - 'until': timestamp} - - self.silence_cache[silence_cache_key] = (timestamp, exponent) - return self.writeback('silence', body) - - def is_silenced(self, rule_name): - """ Checks if rule_name is currently silenced. Returns false on exception. """ - if rule_name in self.silence_cache: - if ts_now() < self.silence_cache[rule_name][0]: - return True - - if self.debug: - return False - query = {'term': {'rule_name': rule_name}} - sort = {'sort': {'until': {'order': 'desc'}}} - if self.is_atleastfive(): - query = {'query': query} - else: - query = {'filter': query} - query.update(sort) - - try: - index = self.get_six_index('silence') - res = self.writeback_es.search(index=index, doc_type='silence', - size=1, body=query, _source_include=['until', 'exponent']) - except ElasticsearchException as e: - self.handle_error("Error while querying for alert silence status: %s" % (e), {'rule': rule_name}) - - return False - if res['hits']['hits']: - until_ts = res['hits']['hits'][0]['_source']['until'] - exponent = res['hits']['hits'][0]['_source'].get('exponent', 0) - if rule_name not in self.silence_cache.keys(): - self.silence_cache[rule_name] = (ts_to_dt(until_ts), exponent) - else: - self.silence_cache[rule_name] = (ts_to_dt(until_ts), self.silence_cache[rule_name][1]) - if ts_now() < ts_to_dt(until_ts): - return True - return False - - def handle_error(self, message, data=None): - ''' Logs message at error level and writes message, data and traceback to Elasticsearch. ''' - logging.error(message) - body = {'message': message} - tb = traceback.format_exc() - body['traceback'] = tb.strip().split('\n') - if data: - body['data'] = data - self.writeback('elastalert_error', body) - - def handle_uncaught_exception(self, exception, rule): - """ Disables a rule and sends a notification. """ - logging.error(traceback.format_exc()) - self.handle_error('Uncaught exception running rule %s: %s' % (rule['name'], exception), {'rule': rule['name']}) - if self.disable_rules_on_error: - self.rules = [running_rule for running_rule in self.rules if running_rule['name'] != rule['name']] - self.disabled_rules.append(rule) - elastalert_logger.info('Rule %s disabled', rule['name']) - if self.notify_email: - self.send_notification_email(exception=exception, rule=rule) - - def send_notification_email(self, text='', exception=None, rule=None, subject=None, rule_file=None): - email_body = text - rule_name = None - if rule: - rule_name = rule['name'] - elif rule_file: - rule_name = rule_file - if exception and rule_name: - if not subject: - subject = 'Uncaught exception in ElastAlert - %s' % (rule_name) - email_body += '\n\n' - email_body += 'The rule %s has raised an uncaught exception.\n\n' % (rule_name) - if self.disable_rules_on_error: - modified = ' or if the rule config file has been modified' if not self.args.pin_rules else '' - email_body += 'It has been disabled and will be re-enabled when ElastAlert restarts%s.\n\n' % (modified) - tb = traceback.format_exc() - email_body += tb - - if isinstance(self.notify_email, basestring): - self.notify_email = [self.notify_email] - email = MIMEText(email_body) - email['Subject'] = subject if subject else 'ElastAlert notification' - recipients = self.notify_email - if rule and rule.get('notify_email'): - if isinstance(rule['notify_email'], basestring): - rule['notify_email'] = [rule['notify_email']] - recipients = recipients + rule['notify_email'] - recipients = list(set(recipients)) - email['To'] = ', '.join(recipients) - email['From'] = self.from_addr - email['Reply-To'] = self.conf.get('email_reply_to', email['To']) - - try: - smtp = SMTP(self.smtp_host) - smtp.sendmail(self.from_addr, recipients, email.as_string()) - except (SMTPException, error) as e: - self.handle_error('Error connecting to SMTP host: %s' % (e), {'email_body': email_body}) - - def get_top_counts(self, rule, starttime, endtime, keys, number=None, qk=None): - """ Counts the number of events for each unique value for each key field. - Returns a dictionary with top_events_ mapped to the top 5 counts for each key. """ - all_counts = {} - if not number: - number = rule.get('top_count_number', 5) - for key in keys: - index = self.get_index(rule, starttime, endtime) - - hits_terms = self.get_hits_terms(rule, starttime, endtime, index, key, qk, number) - if hits_terms is None: - top_events_count = {} - else: - buckets = hits_terms.values()[0] - - # get_hits_terms adds to num_hits, but we don't want to count these - self.num_hits -= len(buckets) - terms = {} - for bucket in buckets: - terms[bucket['key']] = bucket['doc_count'] - counts = terms.items() - counts.sort(key=lambda x: x[1], reverse=True) - top_events_count = dict(counts[:number]) - - # Save a dict with the top 5 events by key - all_counts['top_events_%s' % (key)] = top_events_count - - return all_counts - - def next_alert_time(self, rule, name, timestamp): - """ Calculate an 'until' time and exponent based on how much past the last 'until' we are. """ - if name in self.silence_cache: - last_until, exponent = self.silence_cache[name] - else: - # If this isn't cached, this is the first alert or writeback_es is down, normal realert - return timestamp + rule['realert'], 0 - - if not rule.get('exponential_realert'): - return timestamp + rule['realert'], 0 - diff = seconds(timestamp - last_until) - # Increase exponent if we've alerted recently - if diff < seconds(rule['realert']) * 2 ** exponent: - exponent += 1 - else: - # Continue decreasing exponent the longer it's been since the last alert - while diff > seconds(rule['realert']) * 2 ** exponent and exponent > 0: - diff -= seconds(rule['realert']) * 2 ** exponent - exponent -= 1 - - wait = datetime.timedelta(seconds=seconds(rule['realert']) * 2 ** exponent) - if wait >= rule['exponential_realert']: - return timestamp + rule['exponential_realert'], exponent - 1 - return timestamp + wait, exponent - - -def handle_signal(signal, frame): - elastalert_logger.info('SIGINT received, stopping ElastAlert...') - # use os._exit to exit immediately and avoid someone catching SystemExit - os._exit(0) - - -def main(args=None): - signal.signal(signal.SIGINT, handle_signal) - if not args: - args = sys.argv[1:] - client = ElastAlerter(args) - if not client.debug: - p = PrometheusWrapper(client) - p.start() - if not client.args.silence: - client.start() - - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) \ No newline at end of file diff --git a/comparisonFile2 b/comparisonFile2 index 1f19780fc..7965a9601 100644 --- a/comparisonFile2 +++ b/comparisonFile2 @@ -1,517 +1,74 @@ -import collections -import datetime -import logging -import os -import types - -import dateutil.parser -import dateutil.tz -from auth import Auth -from elasticsearch import RequestsHttpConnection -from elasticsearch.client import Elasticsearch -from six import string_types - -logging.basicConfig() -elastalert_logger = logging.getLogger('elastalert') - - -def new_get_event_ts(ts_field): - """ Constructs a lambda that may be called to extract the timestamp field - from a given event. - - :returns: A callable function that takes an event and outputs that event's - timestamp field. - """ - return lambda event: lookup_es_key(event[0], ts_field) - - -def _find_es_dict_by_key(lookup_dict, term): - """ Performs iterative dictionary search based upon the following conditions: - - 1. Subkeys may either appear behind a full stop (.) or at one lookup_dict level lower in the tree. - 2. No wildcards exist within the provided ES search terms (these are treated as string literals) - - This is necessary to get around inconsistencies in ES data. - - For example: - {'ad.account_name': 'bob'} - Or: - {'csp_report': {'blocked_uri': 'bob.com'}} - And even: - {'juniper_duo.geoip': {'country_name': 'Democratic People's Republic of Korea'}} - - We want a search term of form "key.subkey.subsubkey" to match in all cases. - :returns: A tuple with the first element being the dict that contains the key and the second - element which is the last subkey used to access the target specified by the term. None is - returned for both if the key can not be found. - """ - if term in lookup_dict: - return lookup_dict, term - # If the term does not match immediately, perform iterative lookup: - # 1. Split the search term into tokens - # 2. Recurrently concatenate these together to traverse deeper into the dictionary, - # clearing the subkey at every successful lookup. - # - # This greedy approach is correct because subkeys must always appear in order, - # preferring full stops and traversal interchangeably. - # - # Subkeys will NEVER be duplicated between an alias and a traversal. - # - # For example: - # {'foo.bar': {'bar': 'ray'}} to look up foo.bar will return {'bar': 'ray'}, not 'ray' - dict_cursor = lookup_dict - subkeys = term.split('.') - subkey = '' - - while len(subkeys) > 0: - if not dict_cursor: - return {}, None - - subkey += subkeys.pop(0) - - if subkey in dict_cursor: - if len(subkeys) == 0: - break - - dict_cursor = dict_cursor[subkey] - subkey = '' - elif len(subkeys) == 0: - # If there are no keys left to match, return None values - dict_cursor = None - subkey = None - else: - subkey += '.' - - return dict_cursor, subkey - - -def set_es_key(lookup_dict, term, value): - """ Looks up the location that the term maps to and sets it to the given value. - :returns: True if the value was set successfully, False otherwise. - """ - value_dict, value_key = _find_es_dict_by_key(lookup_dict, term) - - if value_dict is not None: - value_dict[value_key] = value - return True - - return False - - -def lookup_es_key(lookup_dict, term): - """ Performs iterative dictionary search for the given term. - :returns: The value identified by term or None if it cannot be found. - """ - value_dict, value_key = _find_es_dict_by_key(lookup_dict, term) - return None if value_key is None else value_dict[value_key] - - -def ts_to_dt(timestamp): - if isinstance(timestamp, datetime.datetime): - return timestamp - dt = dateutil.parser.parse(timestamp) - # Implicitly convert local timestamps to UTC - if dt.tzinfo is None: - dt = dt.replace(tzinfo=dateutil.tz.tzutc()) - return dt - - -def dt_to_ts(dt): - if not isinstance(dt, datetime.datetime): - logging.warning('Expected datetime, got %s' % (type(dt))) - return dt - ts = dt.isoformat() - # Round microseconds to milliseconds - if dt.tzinfo is None: - # Implicitly convert local times to UTC - return ts + 'Z' - # isoformat() uses microsecond accuracy and timezone offsets - # but we should try to use millisecond accuracy and Z to indicate UTC - return ts.replace('000+00:00', 'Z').replace('+00:00', 'Z') - - -def ts_to_dt_with_format(timestamp, ts_format): - if isinstance(timestamp, datetime.datetime): - return timestamp - dt = datetime.datetime.strptime(timestamp, ts_format) - # Implicitly convert local timestamps to UTC - if dt.tzinfo is None: - dt = dt.replace(tzinfo=dateutil.tz.tzutc()) - return dt - - -def dt_to_ts_with_format(dt, ts_format): - if not isinstance(dt, datetime.datetime): - logging.warning('Expected datetime, got %s' % (type(dt))) - return dt - ts = dt.strftime(ts_format) - return ts - - -def ts_now(): - return datetime.datetime.utcnow().replace(tzinfo=dateutil.tz.tzutc()) - - -def inc_ts(timestamp, milliseconds=1): - """Increment a timestamp by milliseconds.""" - dt = ts_to_dt(timestamp) - dt += datetime.timedelta(milliseconds=milliseconds) - return dt_to_ts(dt) - - -def pretty_ts(timestamp, tz=True): - """Pretty-format the given timestamp (to be printed or logged hereafter). - If tz, the timestamp will be converted to local time. - Format: YYYY-MM-DD HH:MM TZ""" - dt = timestamp - if not isinstance(timestamp, datetime.datetime): - dt = ts_to_dt(timestamp) - if tz: - dt = dt.astimezone(dateutil.tz.tzlocal()) - return dt.strftime('%Y-%m-%d %H:%M %Z') - - -def ts_add(ts, td): - """ Allows a timedelta (td) add operation on a string timestamp (ts) """ - return dt_to_ts(ts_to_dt(ts) + td) - - -def hashable(obj): - """ Convert obj to a hashable obj. - We use the value of some fields from Elasticsearch as keys for dictionaries. This means - that whatever Elasticsearch returns must be hashable, and it sometimes returns a list or dict.""" - if not obj.__hash__: - return str(obj) - return obj - - -def format_index(index, start, end, add_extra=False): - """ Takes an index, specified using strftime format, start and end time timestamps, - and outputs a wildcard based index string to match all possible timestamps. """ - # Convert to UTC - start -= start.utcoffset() - end -= end.utcoffset() - original_start = start - indices = set() - while start.date() <= end.date(): - indices.add(start.strftime(index)) - start += datetime.timedelta(days=1) - num = len(indices) - if add_extra: - while len(indices) == num: - original_start -= datetime.timedelta(days=1) - new_index = original_start.strftime(index) - assert new_index != index, "You cannot use a static index with search_extra_index" - indices.add(new_index) - - return ','.join(indices) - - -class EAException(Exception): - pass - - -def seconds(td): - return td.seconds + td.days * 24 * 3600 - - -def total_seconds(dt): - # For python 2.6 compatability - if dt is None: - return 0 - elif hasattr(dt, 'total_seconds'): - return dt.total_seconds() - else: - return (dt.microseconds + (dt.seconds + dt.days * 24 * 3600) * 10**6) / 10**6 - - -def dt_to_int(dt): - dt = dt.replace(tzinfo=None) - return int(total_seconds((dt - datetime.datetime.utcfromtimestamp(0))) * 1000) - - -def unixms_to_dt(ts): - return unix_to_dt(float(ts) / 1000) - -def unix_to_dt(ts): - if(type(ts) == types.UnicodeType): - dt = datetime.datetime.strptime(ts, '%Y-%m-%d %H:%M:%S.%f') - else: - dt = datetime.datetime.utcfromtimestamp(float(ts)) - dt = dt.replace(tzinfo=dateutil.tz.tzutc()) - return dt - - -def dt_to_unix(dt): - return int(total_seconds(dt - datetime.datetime(1970, 1, 1, tzinfo=dateutil.tz.tzutc()))) - - -def dt_to_unixms(dt): - return int(dt_to_unix(dt) * 1000) - - -def cronite_datetime_to_timestamp(self, d): - """ - Converts a `datetime` object `d` into a UNIX timestamp. - """ - if d.tzinfo is not None: - d = d.replace(tzinfo=None) - d.utcoffset() - - return total_seconds((d - datetime.datetime(1970, 1, 1))) - - -def add_raw_postfix(field, is_five_or_above): - if is_five_or_above: - end = '.keyword' - else: - end = '.raw' - if not field.endswith(end): - field += end - return field - - -def replace_dots_in_field_names(document): - """ This method destructively modifies document by replacing any dots in - field names with an underscore. """ - for key, value in list(document.items()): - if isinstance(value, dict): - value = replace_dots_in_field_names(value) - if isinstance(key, string_types) and key.find('.') != -1: - del document[key] - document[key.replace('.', '_')] = value - return document - - -def elasticsearch_client(conf): - """ returns an Elasticsearch instance configured using an es_conn_config """ - es_conn_conf = build_es_conn_config(conf) - auth = Auth() - es_conn_conf['http_auth'] = auth(host=es_conn_conf['es_host'], - username=es_conn_conf['es_username'], - password=es_conn_conf['es_password'], - aws_region=es_conn_conf['aws_region'], - profile_name=es_conn_conf['profile']) - - return Elasticsearch(host=es_conn_conf['es_host'], - port=es_conn_conf['es_port'], - url_prefix=es_conn_conf['es_url_prefix'], - use_ssl=es_conn_conf['use_ssl'], - verify_certs=es_conn_conf['verify_certs'], - ca_certs=es_conn_conf['ca_certs'], - connection_class=RequestsHttpConnection, - http_auth=es_conn_conf['http_auth'], - timeout=es_conn_conf['es_conn_timeout'], - send_get_body_as=es_conn_conf['send_get_body_as'], - client_cert=es_conn_conf['client_cert'], - client_key=es_conn_conf['client_key']) - -def kibana_adapter_client(conf): - """ returns an Elasticsearch instance configured using an es_conn_config """ - es_conn_conf = build_adapter_conn_config(conf) - auth = Auth() - es_conn_conf['http_auth'] = auth(host=es_conn_conf['es_host'], - username=es_conn_conf['es_username'], - password=es_conn_conf['es_password'], - aws_region=es_conn_conf['aws_region'], - profile_name=es_conn_conf['profile']) - - return Elasticsearch(host=es_conn_conf['es_host'], - port=es_conn_conf['es_port'], - url_prefix=es_conn_conf['es_url_prefix'], - use_ssl=es_conn_conf['use_ssl'], - verify_certs=es_conn_conf['verify_certs'], - ca_certs=es_conn_conf['ca_certs'], - connection_class=RequestsHttpConnection, - http_auth=es_conn_conf['http_auth'], - timeout=es_conn_conf['es_conn_timeout'], - send_get_body_as=es_conn_conf['send_get_body_as'], - client_cert=es_conn_conf['client_cert'], - client_key=es_conn_conf['client_key']) - -def build_adapter_conn_config(conf): - """ Given a conf dictionary w/ raw config properties 'use_ssl', 'es_host', 'es_port' - 'es_username' and 'es_password', this will return a new dictionary - with properly initialized values for 'es_host', 'es_port', 'use_ssl' and 'http_auth' which - will be a basicauth username:password formatted string """ - parsed_conf = {} - parsed_conf['use_ssl'] = os.environ.get('ES_USE_SSL', False) - parsed_conf['verify_certs'] = True - parsed_conf['ca_certs'] = None - parsed_conf['client_cert'] = None - parsed_conf['client_key'] = None - parsed_conf['http_auth'] = None - parsed_conf['es_username'] = None - parsed_conf['es_password'] = None - parsed_conf['es_api_key'] = None - parsed_conf['es_bearer'] = None - parsed_conf['aws_region'] = None - parsed_conf['profile'] = None - parsed_conf['headers'] = None - parsed_conf['es_host'] = os.environ.get('ES_HOST', conf['es_host']) - parsed_conf['es_port'] = int(os.environ.get('ES_PORT', conf['es_port'])) - - es_hosts = os.environ.get('ES_HOSTS') - es_hosts = parse_hosts(es_hosts, parsed_conf.get('es_port')) if es_hosts else conf.get('es_hosts') - parsed_conf['es_hosts'] = es_hosts - - parsed_conf['es_url_prefix'] = '' - parsed_conf['es_conn_timeout'] = conf.get('es_conn_timeout', 20) - parsed_conf['send_get_body_as'] = conf.get('es_send_get_body_as', 'GET') - parsed_conf['ssl_show_warn'] = conf.get('ssl_show_warn', True) - - if os.environ.get('ES_USERNAME'): - parsed_conf['es_username'] = os.environ.get('ES_USERNAME') - parsed_conf['es_password'] = os.environ.get('ES_PASSWORD') - elif 'es_username' in conf: - parsed_conf['es_username'] = conf['es_username'] - parsed_conf['es_password'] = conf['es_password'] - - if os.environ.get('ES_API_KEY'): - parsed_conf['es_api_key'] = os.environ.get('ES_API_KEY') - elif 'es_api_key' in conf: - parsed_conf['es_api_key'] = conf['es_api_key'] - - if os.environ.get('ES_BEARER'): - parsed_conf['es_bearer'] = os.environ.get('ES_BEARER') - elif 'es_bearer' in conf: - parsed_conf['es_bearer'] = conf['es_bearer'] - - if 'aws_region' in conf: - parsed_conf['aws_region'] = conf['aws_region'] - - if 'profile' in conf: - parsed_conf['profile'] = conf['profile'] - - if 'use_ssl' in conf: - parsed_conf['use_ssl'] = conf['use_ssl'] - - if 'verify_certs' in conf: - parsed_conf['verify_certs'] = conf['verify_certs'] - - if 'ca_certs' in conf: - parsed_conf['ca_certs'] = conf['ca_certs'] - - if 'client_cert' in conf: - parsed_conf['client_cert'] = conf['client_cert'] - - if 'client_key' in conf: - parsed_conf['client_key'] = conf['client_key'] - - if 'es_url_prefix' in conf: - parsed_conf['es_url_prefix'] = conf['es_url_prefix'] - - return parsed_conf - - - -def build_es_conn_config(conf): - """ Given a conf dictionary w/ raw config properties 'use_ssl', 'es_host', 'es_port' - 'es_username' and 'es_password', this will return a new dictionary - with properly initialized values for 'es_host', 'es_port', 'use_ssl' and 'http_auth' which - will be a basicauth username:password formatted string """ - parsed_conf = {} - parsed_conf['use_ssl'] = os.environ.get('ES_USE_SSL', False) - parsed_conf['verify_certs'] = True - parsed_conf['ca_certs'] = None - parsed_conf['client_cert'] = None - parsed_conf['client_key'] = None - parsed_conf['http_auth'] = None - parsed_conf['es_username'] = None - parsed_conf['es_password'] = None - parsed_conf['aws_region'] = None - parsed_conf['profile'] = None - parsed_conf['es_host'] = os.environ.get('ES_HOST', conf['es_host']) - parsed_conf['es_port'] = int(os.environ.get('ES_PORT', conf['es_port'])) - parsed_conf['es_url_prefix'] = '' - parsed_conf['es_conn_timeout'] = conf.get('es_conn_timeout', 20) - parsed_conf['send_get_body_as'] = conf.get('es_send_get_body_as', 'GET') - - if os.environ.get('ES_USERNAME'): - parsed_conf['es_username'] = os.environ.get('ES_USERNAME') - parsed_conf['es_password'] = os.environ.get('ES_PASSWORD') - elif 'es_username' in conf: - parsed_conf['es_username'] = conf['es_username'] - parsed_conf['es_password'] = conf['es_password'] - - if 'aws_region' in conf: - parsed_conf['aws_region'] = conf['aws_region'] - - # Deprecated - if 'boto_profile' in conf: - logging.warning('Found deprecated "boto_profile", use "profile" instead!') - parsed_conf['profile'] = conf['boto_profile'] - - if 'profile' in conf: - parsed_conf['profile'] = conf['profile'] - - if 'use_ssl' in conf: - parsed_conf['use_ssl'] = conf['use_ssl'] - - if 'verify_certs' in conf: - parsed_conf['verify_certs'] = conf['verify_certs'] - - if 'ca_certs' in conf: - parsed_conf['ca_certs'] = conf['ca_certs'] - - if 'client_cert' in conf: - parsed_conf['client_cert'] = conf['client_cert'] - - if 'client_key' in conf: - parsed_conf['client_key'] = conf['client_key'] - - if 'es_url_prefix' in conf: - parsed_conf['es_url_prefix'] = conf['es_url_prefix'] - - return parsed_conf - - -def parse_duration(value): - """Convert ``unit=num`` spec into a ``timedelta`` object.""" - unit, num = value.split('=') - return datetime.timedelta(**{unit: int(num)}) - - -def parse_deadline(value): - """Convert ``unit=num`` spec into a ``datetime`` object.""" - duration = parse_duration(value) - return ts_now() + duration - - -def flatten_dict(dct, delim='.', prefix=''): - ret = {} - for key, val in dct.items(): - if type(val) == dict: - ret.update(flatten_dict(val, prefix=prefix + key + delim)) - else: - ret[prefix + key] = val - return ret - - -def resolve_string(string, match, missing_text=''): - """ - Given a python string that may contain references to fields on the match dictionary, - the strings are replaced using the corresponding values. - However, if the referenced field is not found on the dictionary, - it is replaced by a default string. - Strings can be formatted using the old-style format ('%(field)s') or - the new-style format ('{match[field]}'). - - :param string: A string that may contain references to values of the 'match' dictionary. - :param match: A dictionary with the values to replace where referenced by keys in the string. - :param missing_text: The default text to replace a formatter with if the field doesnt exist. - """ - flat_match = flatten_dict(match) - flat_match.update(match) - dd_match = collections.defaultdict(lambda: missing_text, flat_match) - dd_match['_missing_value'] = missing_text - while True: - try: - string = string % dd_match - string = string.format(**dd_match) - break - except KeyError as e: - if '{%s}' % e.message not in string: - break - string = string.replace('{%s}' % e.message, '{_missing_value}') - - return string \ No newline at end of file +# This is the folder that contains the rule yaml files +# Any .yaml file will be loaded as a rule +rules_folder: example_rules/extras +index: traces* + +# How often ElastAlert will query Elasticsearch +# The unit can be anything from weeks to seconds +run_every: + hours: 12 + +realert: + minutes: 0 + +# ElastAlert will buffer results from the most recent +# period of time, in case some log sources are not in real time +buffer_time: + minutes: 15 + +# The Elasticsearch hostname for metadata writeback +# Note that every rule can have its own Elasticsearch host +es_host: localhost + +# The Elasticsearch port +es_port: 9999 + +kibana_adapter: localhost +kibana_adapter_port: 9999 +#kibana_adapter_url_prefix: /alerts + +# The AWS region to use. Set this when using AWS-managed elasticsearch +#aws_region: us-east-1 + +# The AWS profile to use. Use this if you are using an aws-cli profile. +# See http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html +# for details +#profile: test + +# Optional URL prefix for Elasticsearch +#es_url_prefix: elasticsearch + +# Connect with TLS to Elasticsearch +#use_ssl: True + +# Verify TLS certificates +#verify_certs: True + +# GET request with body is the default option for Elasticsearch. +# If it fails for some reason, you can pass 'GET', 'POST' or 'source'. +# See http://elasticsearch-py.readthedocs.io/en/master/connection.html?highlight=send_get_body_as#transport +# for details +#es_send_get_body_as: GET + +# Option basic-auth username and password for Elasticsearch +#es_username: someusername +#es_password: somepassword + +# Use SSL authentication with client certificates client_cert must be +# a pem file containing both cert and key for client +#verify_certs: True +#ca_certs: /path/to/cacert.pem +#client_cert: /path/to/client_cert.pem +#client_key: /path/to/client_key.key + +# The index on es_host which is used for metadata storage +# This can be a unmapped index, but it is recommended that you run +# elastalert-create-index to set a mapping +writeback_index: elastalert_status + +# If an alert fails for some reason, ElastAlert will retry +# sending the alert until this time period has elapsed +alert_time_limit: + days: 2 + +query_endpoint: https://localhost:9999/v2/sherlock-alerts/traces/visualize diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 7007d55c0..10203b902 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -29,7 +29,7 @@ from elasticsearch.exceptions import ElasticsearchException from elasticsearch.exceptions import NotFoundError from elasticsearch.exceptions import TransportError -from ruletypes import ErrorRateRule +from elastalert.ruletypes import ErrorRateRule from elastalert.alerters.debug import DebugAlerter from elastalert.config import load_conf @@ -390,7 +390,7 @@ def get_hits(self, rule, starttime, endtime, index, scroll=False): try: res = self.thread_data.current_es.msearch(body=request) res = res['responses'][0] - self.total_hits = int(res['hits']['total']) + self.thread_data.total_hits = int(res['hits']['total']) # if scroll: # res = self.thread_data.current_es.scroll(scroll_id=rule['scroll_id'], scroll=scroll_keepalive) # else: @@ -597,13 +597,13 @@ def get_error_rate(self, rule, starttime, endtime): payload = {'error_count': error_data, 'total_count': total_data, 'start_time': starttime, 'end_time': endtime} elastalert_logger.info("query start time and endtime %s at %s , error_count %d ,total_count %d" % (starttime, endtime, error_data, total_data)) - self.num_hits += int(error_count) + self.thread_data.num_hits += int(error_count) return {endtime: payload} def get_query_string(self, rule): - if rule['filter'] and ('query_string' in rule['filter'][0]) and ('query' in rule['filter'][0]['query_string']): - return rule['filter'][0]['query_string']['query'] + if rule['filter'] and ('query_string' in rule['filter'][0]['query']) and ('query' in rule['filter'][0]['query']['query_string']): + return rule['filter'][0]['query']['query_string']['query'] return "" def get_ch_data(self, rule, starttime, endtime, agg_key, freshquery,aggregation): @@ -739,8 +739,8 @@ def get_starttime(self, rule): try: doc_type = 'elastalert_status' index = self.writeback_es.resolve_writeback_index(self.writeback_index, doc_type) - res = self.writeback_es.search(index=index, size=1, body=query, - _source_includes=['endtime', 'rule_name']) + res = self.writeback_es.search(index=index, doc_type='elastalert_status', + size=1, body=query, _source_include=['endtime', 'rule_name']) if res['hits']['hits']: endtime = ts_to_dt(res['hits']['hits'][0]['_source']['endtime']) @@ -1549,7 +1549,7 @@ def writeback(self, doc_type, body, rule=None, match_body=None): try: index = self.writeback_es.resolve_writeback_index(self.writeback_index, doc_type) - res = self.writeback_es.index(index=index, body=body) + res = self.writeback_es.index(index=index,doc_type=doc_type, body=body) return res except ElasticsearchException as e: elastalert_logger.exception("Error writing alert info to Elasticsearch: %s" % (e)) @@ -1569,7 +1569,10 @@ def find_recent_pending_alerts(self, time_limit): query = {'query': {'bool': {'must': inner_query, 'filter': time_filter}}} query.update(sort) try: - res = self.writeback_es.search(index=self.writeback_index, body=query, size=1000) + res = self.writeback_es.search(index=self.writeback_index, + doc_type='elastalert', + body=query, + size=1000) if res['hits']['hits']: return res['hits']['hits'] except ElasticsearchException as e: @@ -1621,7 +1624,9 @@ def send_pending_alerts(self): # Delete it from the index try: - self.writeback_es.delete(index=self.writeback_index, id=_id) + self.writeback_es.delete(index=self.writeback_index, + doc_type='elastalert', + id=_id) except ElasticsearchException: # TODO: Give this a more relevant exception, try:except: is evil. self.handle_error("Failed to delete alert %s at %s" % (_id, alert_time)) @@ -1651,11 +1656,15 @@ def get_aggregated_matches(self, _id): query = {'query': {'query_string': {'query': 'aggregate_id:"%s"' % (_id)}}, 'sort': {'@timestamp': 'asc'}} matches = [] try: - res = self.writeback_es.search(index=self.writeback_index, body=query, + res = self.writeback_es.search(index=self.writeback_index, + doc_type='elastalert', + body=query, size=self.max_aggregation) for match in res['hits']['hits']: matches.append(match['_source']) - self.writeback_es.delete(index=self.writeback_index, id=match['_id']) + self.writeback_es.delete(index=self.writeback_index, + doc_type='elastalert', + id=match['_id']) except (KeyError, ElasticsearchException) as e: self.handle_error("Error fetching aggregated matches: %s" % (e), {'id': _id}) return matches @@ -1670,7 +1679,10 @@ def find_pending_aggregate_alert(self, rule, aggregation_key_value=None): query = {'query': {'bool': query}} query['sort'] = {'alert_time': {'order': 'desc'}} try: - res = self.writeback_es.search(index=self.writeback_index, body=query, size=1) + res = self.writeback_es.search(index=self.writeback_index, + doc_type='elastalert', + body=query, + size=1) if len(res['hits']['hits']) == 0: return None except (KeyError, ElasticsearchException) as e: @@ -1812,9 +1824,10 @@ def is_silenced(self, rule_name): query.update(sort) try: - index = self.get_six_index('silence') - res = self.writeback_es.search(index=index, size=1, body=query, - _source_includes=['until', 'exponent']) + doc_type = 'silence' + index = self.writeback_es.resolve_writeback_index(self.writeback_index, doc_type) + res = self.writeback_es.search(index=index, doc_type='silence', + size=1, body=query, _source_include=['until', 'exponent']) except ElasticsearchException as e: self.handle_error("Error while querying for alert silence status: %s" % (e), {'rule': rule_name}) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 9c8402152..e37c50650 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -736,7 +736,7 @@ def get_all_terms(self, args): index = format_index(self.rules['index'], tmp_start, tmp_end) else: index = self.rules['index'] - res = self.es.search(body=query, index=index, ignore_unavailable=True, timeout='50s') + res = self.es.search(body=query, index=index, doc_type='elastalert_status', ignore_unavailable=True, timeout='50s') if 'aggregations' in res: buckets = res['aggregations']['filtered']['values']['buckets'] if type(field) == list: @@ -1057,14 +1057,14 @@ def __init__(self, *args): self.rules['total_agg_key'] = self.rules['unique_column'] self.rules['count_all_errors'] = True - if (self.rules.has_key('error_calculation_method') and self.rules['error_calculation_method']=='count_traces_with_errors' ): + if ( 'error_calculation_method' in self.rules and self.rules['error_calculation_method']=='count_traces_with_errors' ): self.rules['count_all_errors'] = False # hardcoding uniq aggregation for total count self.rules['total_agg_type'] = "uniq" def calculate_err_rate(self,payload): - for timestamp, payload_data in payload.iteritems(): + for timestamp, payload_data in payload.items(): if int(payload_data['total_count']) > 0: rate = float(payload_data['error_count'])/float(payload_data['total_count']) rate = float(rate)/float(self.rules['sampling']) @@ -1119,7 +1119,7 @@ def check_matches(self, timestamp, query_key, aggregation_data): else: if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: - metric_val = list(aggregation_data[self.metric_key]['values'].values())[0] + metric_val = list(aggregation_data[self.metric_key]['values'][0].values())[0] else: metric_val = aggregation_data[self.metric_key]['value'] if self.crossed_thresholds(metric_val): @@ -1166,9 +1166,9 @@ def check_matches_recursive(self, timestamp, query_key, aggregation_data, compou def crossed_thresholds(self, metric_value): if metric_value is None: return False - if 'max_threshold' in self.rules and metric_value > self.rules['max_threshold']: + if 'max_threshold' in self.rules and int(metric_value) > self.rules['max_threshold']: return True - if 'min_threshold' in self.rules and metric_value < self.rules['min_threshold']: + if 'min_threshold' in self.rules and int(metric_value) < self.rules['min_threshold']: return True return False diff --git a/requirements.txt b/requirements.txt index 26a894e1b..ff8a2accd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,25 +1,26 @@ -apscheduler>=3.9.1.post1,<4.0 +apscheduler>=3.8.1,<4.0 aws-requests-auth>=0.4.3 -boto3>=1.26.30 -cffi>=1.15.1 -croniter>=1.3.8 -elasticsearch==7.10.1 +sortedcontainers>=2.4.0 +boto3>=1.20.53 +cffi>=1.15.0 +croniter>=1.2.0 +elasticsearch==6.3.1 envparse>=0.2.0 exotel==0.1.5 -Jinja2>=3.1.2 -jira>=3.4.1 -jsonpointer>=2.3 -jsonschema>=4.17.3 +Jinja2==3.1.2 +jira>=3.1.1 +jsonschema>=4.4.0 +mock>=2.0.0 prison>=0.2.1 -prometheus_client>=0.15.0 +prometheus_client>=0.13.1 +py-zabbix>=1.1.7 python-dateutil>=2.8.2 PyYAML>=6.0 -py-zabbix>=1.1.7 -requests>=2.28.1 -sortedcontainers>=2.4.0 +requests>=2.27.1 +stomp.py>=8.0.1 +texttable>=1.6.4 statsd-tags==3.2.1.post1 -stomp.py>=8.1.0 -tencentcloud-sdk-python>=3.0.795 -texttable>=1.6.7 -twilio>=7.16.0 +twilio==6.57.0 +tencentcloud-sdk-python>=3.0.577 +jsonpointer>=2.2 tzlocal==2.1 diff --git a/setup.py b/setup.py index 5f0292ec3..b3db43383 100644 --- a/setup.py +++ b/setup.py @@ -32,30 +32,30 @@ packages=find_packages(exclude=["tests"]), package_data={'elastalert': ['schema.yaml', 'es_mappings/**/*.json']}, install_requires=[ - 'apscheduler>=3.9.1.post1,<4.0', + 'apscheduler>=3.8.1.post1,<4.0', 'aws-requests-auth>=0.4.3', - 'boto3>=1.26.30', - 'cffi>=1.15.1', - 'croniter>=1.3.8', - 'elasticsearch==7.10.1', + 'boto3>=1.20.53', + 'cffi>=1.15.0', + 'croniter>=1.2.0', + 'elasticsearch==6.3.1', 'envparse>=0.2.0', 'exotel==0.1.5', 'Jinja2>=3.1.2', - 'jira>=3.4.1', - 'jsonpointer>=2.3', - 'jsonschema>=4.17.3', + 'jira>=3.1.1', + 'jsonpointer>=2.2', + 'jsonschema>=4.4.0', 'prison>=0.2.1', - 'prometheus_client>=0.15.0', + 'prometheus_client>=0.13.1', 'python-dateutil>=2.8.2', 'PyYAML>=6.0', 'py-zabbix>=1.1.7', - 'requests>=2.8.2', + 'requests>=2.27.1', 'sortedcontainers>=2.4.0', 'statsd-tags==3.2.1.post1', - 'stomp.py>=8.1.0', - 'tencentcloud-sdk-python>=3.0.795', - 'texttable>=1.6.7', - 'twilio>=7.16.0', + 'stomp.py>=8.0.1', + 'tencentcloud-sdk-python>=3.0.577', + 'texttable>=1.6.4', + 'twilio==6.57.0', 'tzlocal==2.1' ] ) From 00d7f82e92b13447421b991113594f5f589574b7 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Fri, 3 Mar 2023 14:55:50 +0530 Subject: [PATCH 1019/1065] Error rate testcases Added --- elastalert/elastalert.py | 2 +- tests/conftest.py | 3 + tests/rules_test.py | 137 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 141 insertions(+), 1 deletion(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 10203b902..00c2ce8d3 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -597,7 +597,7 @@ def get_error_rate(self, rule, starttime, endtime): payload = {'error_count': error_data, 'total_count': total_data, 'start_time': starttime, 'end_time': endtime} elastalert_logger.info("query start time and endtime %s at %s , error_count %d ,total_count %d" % (starttime, endtime, error_data, total_data)) - self.thread_data.num_hits += int(error_count) + self.thread_data.num_hits += int(error_data) return {endtime: payload} diff --git a/tests/conftest.py b/tests/conftest.py index 64e25546a..389a48511 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -127,12 +127,15 @@ def ea(): 'alert_time_limit': datetime.timedelta(hours=24), 'es_host': 'es', 'es_port': 14900, + 'kibana_adapter': '', + 'kibana_adapter_port': 14900, 'writeback_index': 'wb', 'rules': rules, 'max_query_size': 10000, 'old_query_limit': datetime.timedelta(weeks=1), 'disable_rules_on_error': False, 'scroll_keepalive': '30s', + 'query_endpoint': 'http://localhost:9999/v2/sherlock-alerts/traces/visualize', 'custom_pretty_ts_format': '%Y-%m-%d %H:%M'} elastalert.util.elasticsearch_client = mock_es_client conf['rules_loader'] = mock_rule_loader(conf) diff --git a/tests/rules_test.py b/tests/rules_test.py index 137bab137..17abe459c 100644 --- a/tests/rules_test.py +++ b/tests/rules_test.py @@ -4,6 +4,8 @@ from unittest import mock import pytest +from datetime import datetime as dt +from tests.conftest import ea from elastalert.ruletypes import AnyRule from elastalert.ruletypes import BaseAggregationRule @@ -15,6 +17,7 @@ from elastalert.ruletypes import FlatlineRule from elastalert.ruletypes import FrequencyRule from elastalert.ruletypes import MetricAggregationRule +from elastalert.ruletypes import ErrorRateRule from elastalert.ruletypes import NewTermsRule from elastalert.ruletypes import PercentageMatchRule from elastalert.ruletypes import RuleType @@ -1280,6 +1283,140 @@ def test_metric_aggregation_scripted(): assert rule.matches[0]['metric_cpu_pct_avg'] == -0.5 +def _mock_response( + status=200, + content='{"test": "test"}', + json_data=None, + raise_for_status= None): + + mock_resp = mock.Mock() + # mock raise_for_status call w/optional error + mock_resp.raise_for_status = mock.Mock() + if raise_for_status: + mock_resp.raise_for_status.side_effect = raise_for_status + # set status code and content + mock_resp.status_code = status + mock_resp.content = content + # add json data if provided + if json_data: + mock_resp.json = mock.Mock(return_value=json_data) + return mock_resp + +def get_error_rate_tester(ea,total_count= 5,error_count= 10, count_all_errors=True): + #testing elastalert function that hits query_endpoint and gets aggregation data + rules = [{'es_host': '', + 'es_port': 14900, + 'name': 'error rate', + 'index': 'idx', + 'filter': [], + 'include': ['@timestamp'], + 'aggregation': datetime.timedelta(0), + 'realert': datetime.timedelta(0), + 'processed_hits': {}, + 'timestamp_field': '@timestamp', + 'match_enhancements': [], + 'rule_file': 'blah.yaml', + 'max_query_size': 10000, + 'ts_to_dt': ts_to_dt, + 'dt_to_ts': dt_to_ts, + '_source_enabled': True, + 'buffer_time': datetime.timedelta(minutes=5), + 'sampling' : 100, + 'threshold': 0.5, + 'error_condition': 'exception.message: *', + 'timestamp_field':'timestamp', + 'type':'error_rate', + 'total_agg_type': 'uniq', + 'total_agg_key': 'traceID', + 'count_all_errors': count_all_errors + }] + + ts = dt.now() + mock_responses = [ + _mock_response(content = '{"data":[{"uniq(traceID)":'+ str(total_count)+'}],"rows":[] }'), + _mock_response(content = '{"data":[{"count()":'+ str(error_count)+'}],"rows":[] }') + ] + + if(not count_all_errors): + mock_responses[1] = _mock_response(content = '{"data":[{"uniq(traceID)":'+ str(error_count)+'}],"rows":[] }') + + with mock.patch('requests.post') as mock_post: + mock_post.side_effect = mock_responses + ea.get_error_rate(rules[0],ts,ts) + calls = mock_post.call_args_list + assert calls[0][0][0] == "http://localhost:9999/v2/sherlock-alerts/traces/visualize" + assert calls[0][1]['json']['aggregations'] == [{'function': 'UNIQ', 'field': 'traceID'}] + assert calls[1][0][0] == "http://localhost:9999/v2/sherlock-alerts/traces/visualize" + if count_all_errors: + assert calls[1][1]['json']['aggregations'] == [{'function': 'COUNT', 'field': '1'}] + else: + assert calls[1][1]['json']['aggregations'] == [{'function': 'UNIQ', 'field': 'traceID'}] + assert calls[1][1]['json']['freshquery'] == rules[0]['error_condition'] + + +@pytest.mark.usefixtures("ea") +def test_error_rate_rule(ea): + rules = { + 'buffer_time': datetime.timedelta(minutes=5), + 'sampling' : 100, + 'threshold': 0.5, + 'error_condition': "exception.message: *", + 'unique_column': 'traceID', + 'timestamp_field':'timestamp' + } + + + #testing default initialization baesd on error_calculation_method method + + rule = ErrorRateRule(rules) + assert rule.rules['count_all_errors'] == True + + rules["error_calculation_method"] = 'count_all_errors' + rule = ErrorRateRule(rules) + assert rule.rules['count_all_errors'] == True + + rules["error_calculation_method"] = 'count_all_errors' + rule = ErrorRateRule(rules) + assert rule.rules['count_all_errors'] == True + + rules["error_calculation_method"] = 'count_traces_with_errors' + rule = ErrorRateRule(rules) + assert rule.rules['count_all_errors'] == False + + timestamp = ts_now() + + payload = { + timestamp : + { + 'total_count': 0, + 'start_time': timestamp, + 'error_count': 0, + 'end_time': timestamp + } + } + + rule.calculate_err_rate(payload) + assert len(rule.matches) == 0 + + payload[timestamp]['total_count'] = 10 + payload[timestamp]['error_count'] = 6 + rule.calculate_err_rate(payload) + assert len(rule.matches) == 1 + + payload[timestamp]['total_count'] = 10 + payload[timestamp]['error_count'] = 4 + rule.calculate_err_rate(payload) + assert len(rule.matches) == 1 + + payload[timestamp]['total_count'] = 10 + payload[timestamp]['error_count'] = 8 + rule.calculate_err_rate(payload) + assert len(rule.matches) == 2 + + get_error_rate_tester(ea=ea,count_all_errors= True) + get_error_rate_tester(ea=ea,count_all_errors= False) + + def test_percentage_match(): rules = {'match_bucket_filter': {'term': 'term_val'}, 'buffer_time': datetime.timedelta(minutes=5), From f97dcdee5e7e4629a56fddff95fcb0fad2ee816b Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Mon, 6 Mar 2023 09:04:29 +0530 Subject: [PATCH 1020/1065] Dockerfile added --- Dockerfile | 57 ++++++++++++++++++++++++++++++++---------------------- 1 file changed, 34 insertions(+), 23 deletions(-) diff --git a/Dockerfile b/Dockerfile index ac5ca40ab..a0c207e30 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,35 +1,46 @@ -# UNDER DEVELOPMENT +FROM python:3.9-slim-buster as build -# FROM python:3-slim as build +ENV ELASTALERT_HOME /opt/elastalert +ADD . /opt/elastalert/ -# ENV ELASTALERT_HOME /opt/elastalert -# ADD . /opt/elastalert/ +WORKDIR /opt -# WORKDIR /opt +RUN apt-get update +RUN apt-get -y install jq curl gcc openssl libssl-dev libffi-dev ca-certificates musl-dev python-dev +RUN pip install "setuptools==65.5.0" "elasticsearch==6.3.1" -# RUN apk add --update --no-cache jq curl gcc openssl-dev libffi-dev openssl ca-certificates musl-dev python-dev -# RUN pip install "setuptools==36.2.7" "elasticsearch==6.3.1" +WORKDIR "${ELASTALERT_HOME}" -# WORKDIR "${ELASTALERT_HOME}" +RUN pip install -r requirements.txt +RUN python setup.py install -# RUN pip install -r requirements.txt -# RUN python setup.py install +RUN pip show elastalert2 -# FROM gcr.io/distroless/python3:debug as runtime +RUN echo "coming here..." +RUN ls /usr/local/lib/ +RUN ls /usr/lib/ +RUN ls /lib/ -# COPY --from=build /opt/elastalert /opt/elastalert -# COPY --from=build /usr/local/lib/python3 /usr/local/lib/python3 -# COPY --from=build /usr/local/bin/elastalert* /usr/local/bin/ -# COPY --from=build /usr/local/lib/libpython2.7.so.1.0 /usr/local/lib/ -# COPY --from=build /usr/lib/libpython2.7.so.1.0 /usr/lib/ -# COPY --from=build /lib/libc.musl-x86_64.so.1 /lib/ +FROM gcr.io/distroless/python3:debug as runtime -# #COPY --from=build /data/elastalert /data/elastalert +COPY --from=build /opt/elastalert /opt/elastalert +COPY --from=build /usr/local/bin/elastalert* /usr/local/bin/ -# ENV PYTHONPATH=/usr/local/lib/python2.7/site-packages -# ENV PATH=/usr/local/lib:/usr/lib:$PATH +COPY --from=build /opt/elastalert /opt/elastalert +COPY --from=build /usr/local/lib/python3.9 /usr/local/lib/python3.9 +COPY --from=build /usr/local/bin/elastalert* /usr/local/bin/ +COPY --from=build /usr/local/lib/libpython3.9.so.1.0 /usr/local/lib/ +COPY --from=build /lib/ld-musl-aarch64.so.1 /lib/ -# WORKDIR /opt/elastalert +#COPY --from=build /data/elastalert /data/elastalert -# CMD ["/usr/local/bin/elastalert-create-index","--config","/data/elastalert/config.yaml", "--verbose"] -# CMD ["/usr/local/bin/elastalert","--config","/data/elastalert/config.yaml", "--verbose"] +ENV PYTHONPATH=/usr/local/lib/python3.9/site-packages +ENV PATH=/usr/local/lib:/usr/lib:$PATH + +RUN ls /usr/local/bin/ +RUN python --version + +WORKDIR /opt/elastalert + +ENTRYPOINT ["python","-m","elastalert.create_index","--config","/data/elastalert/config.yaml", "--verbose"] +ENTRYPOINT ["python","-m","elastalert.elastalert","--config","/data/elastalert/config.yaml", "--verbose"] From d550d91599c0718cca32dbbfb3f569dba5219a59 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Mon, 6 Mar 2023 10:05:56 +0530 Subject: [PATCH 1021/1065] changed libc image to arm libc.musl-x86_64.so.1 --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index a0c207e30..66d997285 100644 --- a/Dockerfile +++ b/Dockerfile @@ -30,7 +30,7 @@ COPY --from=build /opt/elastalert /opt/elastalert COPY --from=build /usr/local/lib/python3.9 /usr/local/lib/python3.9 COPY --from=build /usr/local/bin/elastalert* /usr/local/bin/ COPY --from=build /usr/local/lib/libpython3.9.so.1.0 /usr/local/lib/ -COPY --from=build /lib/ld-musl-aarch64.so.1 /lib/ +COPY --from=build /lib/libc.musl-x86_64.so.1 /lib/ #COPY --from=build /data/elastalert /data/elastalert From 68ea0f9ec42ca8f807651d6e64cfa1b48f09589f Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Mon, 6 Mar 2023 10:19:21 +0530 Subject: [PATCH 1022/1065] removed arm libc.musl-x86_64.so.1 --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 66d997285..182fe3667 100644 --- a/Dockerfile +++ b/Dockerfile @@ -30,7 +30,7 @@ COPY --from=build /opt/elastalert /opt/elastalert COPY --from=build /usr/local/lib/python3.9 /usr/local/lib/python3.9 COPY --from=build /usr/local/bin/elastalert* /usr/local/bin/ COPY --from=build /usr/local/lib/libpython3.9.so.1.0 /usr/local/lib/ -COPY --from=build /lib/libc.musl-x86_64.so.1 /lib/ +# COPY --from=build /lib/libc.musl-x86_64.so.1 /lib/ #COPY --from=build /data/elastalert /data/elastalert From 53cf1a56760078d2c002fd099dce1e50d28deb4f Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Mon, 6 Mar 2023 11:57:20 +0530 Subject: [PATCH 1023/1065] Dockerfile - making base image 3.9-alpine --- Dockerfile | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index 182fe3667..dd94d386c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,12 +1,11 @@ -FROM python:3.9-slim-buster as build +FROM python:3.9-alpine as build ENV ELASTALERT_HOME /opt/elastalert ADD . /opt/elastalert/ WORKDIR /opt -RUN apt-get update -RUN apt-get -y install jq curl gcc openssl libssl-dev libffi-dev ca-certificates musl-dev python-dev +RUN apk add --update --no-cache jq curl gcc openssl-dev libffi-dev ca-certificates musl-dev RUN pip install "setuptools==65.5.0" "elasticsearch==6.3.1" WORKDIR "${ELASTALERT_HOME}" @@ -26,11 +25,11 @@ FROM gcr.io/distroless/python3:debug as runtime COPY --from=build /opt/elastalert /opt/elastalert COPY --from=build /usr/local/bin/elastalert* /usr/local/bin/ -COPY --from=build /opt/elastalert /opt/elastalert +COPY --from=build /opt/elastalert /opt/elastalert COPY --from=build /usr/local/lib/python3.9 /usr/local/lib/python3.9 COPY --from=build /usr/local/bin/elastalert* /usr/local/bin/ COPY --from=build /usr/local/lib/libpython3.9.so.1.0 /usr/local/lib/ -# COPY --from=build /lib/libc.musl-x86_64.so.1 /lib/ +COPY --from=build /lib/libc.musl-x86_64.so.1 /lib/ #COPY --from=build /data/elastalert /data/elastalert @@ -42,5 +41,5 @@ RUN python --version WORKDIR /opt/elastalert -ENTRYPOINT ["python","-m","elastalert.create_index","--config","/data/elastalert/config.yaml", "--verbose"] -ENTRYPOINT ["python","-m","elastalert.elastalert","--config","/data/elastalert/config.yaml", "--verbose"] +ENTRYPOINT ["python","-m","elastalert.create_index","--config","config.yaml", "--verbose"] +ENTRYPOINT ["python","-m","elastalert.elastalert","--config","config.yaml", "--verbose"] From 495d5ec44e6f99d0cdbcc5694d1d5ebdfbda12d1 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Mon, 6 Mar 2023 11:58:50 +0530 Subject: [PATCH 1024/1065] Dockerfile - config.yaml path update --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index dd94d386c..4653c49de 100644 --- a/Dockerfile +++ b/Dockerfile @@ -41,5 +41,5 @@ RUN python --version WORKDIR /opt/elastalert -ENTRYPOINT ["python","-m","elastalert.create_index","--config","config.yaml", "--verbose"] -ENTRYPOINT ["python","-m","elastalert.elastalert","--config","config.yaml", "--verbose"] +ENTRYPOINT ["python","-m","elastalert.create_index","--config","/data/elastalert/config.yaml", "--verbose"] +ENTRYPOINT ["python","-m","elastalert.elastalert","--config","/data/elastalert/config.yaml", "--verbose"] From eb2b982076b7a4c67e78ba1f58561b9b212c64b5 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Tue, 21 Mar 2023 09:33:40 +0530 Subject: [PATCH 1025/1065] removed unnecessary changes --- comparisonFile | 33 ---------------------- comparisonFile2 | 74 ------------------------------------------------- 2 files changed, 107 deletions(-) delete mode 100644 comparisonFile delete mode 100644 comparisonFile2 diff --git a/comparisonFile b/comparisonFile deleted file mode 100644 index 04e9d6e1c..000000000 --- a/comparisonFile +++ /dev/null @@ -1,33 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/comparisonFile2 b/comparisonFile2 deleted file mode 100644 index 7965a9601..000000000 --- a/comparisonFile2 +++ /dev/null @@ -1,74 +0,0 @@ -# This is the folder that contains the rule yaml files -# Any .yaml file will be loaded as a rule -rules_folder: example_rules/extras -index: traces* - -# How often ElastAlert will query Elasticsearch -# The unit can be anything from weeks to seconds -run_every: - hours: 12 - -realert: - minutes: 0 - -# ElastAlert will buffer results from the most recent -# period of time, in case some log sources are not in real time -buffer_time: - minutes: 15 - -# The Elasticsearch hostname for metadata writeback -# Note that every rule can have its own Elasticsearch host -es_host: localhost - -# The Elasticsearch port -es_port: 9999 - -kibana_adapter: localhost -kibana_adapter_port: 9999 -#kibana_adapter_url_prefix: /alerts - -# The AWS region to use. Set this when using AWS-managed elasticsearch -#aws_region: us-east-1 - -# The AWS profile to use. Use this if you are using an aws-cli profile. -# See http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html -# for details -#profile: test - -# Optional URL prefix for Elasticsearch -#es_url_prefix: elasticsearch - -# Connect with TLS to Elasticsearch -#use_ssl: True - -# Verify TLS certificates -#verify_certs: True - -# GET request with body is the default option for Elasticsearch. -# If it fails for some reason, you can pass 'GET', 'POST' or 'source'. -# See http://elasticsearch-py.readthedocs.io/en/master/connection.html?highlight=send_get_body_as#transport -# for details -#es_send_get_body_as: GET - -# Option basic-auth username and password for Elasticsearch -#es_username: someusername -#es_password: somepassword - -# Use SSL authentication with client certificates client_cert must be -# a pem file containing both cert and key for client -#verify_certs: True -#ca_certs: /path/to/cacert.pem -#client_cert: /path/to/client_cert.pem -#client_key: /path/to/client_key.key - -# The index on es_host which is used for metadata storage -# This can be a unmapped index, but it is recommended that you run -# elastalert-create-index to set a mapping -writeback_index: elastalert_status - -# If an alert fails for some reason, ElastAlert will retry -# sending the alert until this time period has elapsed -alert_time_limit: - days: 2 - -query_endpoint: https://localhost:9999/v2/sherlock-alerts/traces/visualize From c6d3e03fdba8e29e2572eac67709d6fc2c68944e Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Tue, 21 Mar 2023 09:50:48 +0530 Subject: [PATCH 1026/1065] changed the files accordingly --- .scratch | 1212 ++++++++++++++++++++++++++++++++++++++ elastalert/elastalert.py | 31 +- elastalert/ruletypes.py | 31 +- 3 files changed, 1265 insertions(+), 9 deletions(-) create mode 100644 .scratch diff --git a/.scratch b/.scratch new file mode 100644 index 000000000..f0635f3d7 --- /dev/null +++ b/.scratch @@ -0,0 +1,1212 @@ +# -*- coding: utf-8 -*- +import copy +import datetime +import sys +import time +import json + +from blist import sortedlist +from util import add_raw_postfix +from util import dt_to_ts +from util import EAException +from util import elastalert_logger +from util import elasticsearch_client +from util import format_index +from util import hashable +from util import lookup_es_key +from util import new_get_event_ts +from util import pretty_ts +from util import total_seconds +from util import ts_now +from util import ts_to_dt + + +class RuleType(object): + """ The base class for a rule type. + The class must implement add_data and add any matches to self.matches. + + :param rules: A rule configuration. + """ + required_options = frozenset() + + def __init__(self, rules, args=None): + self.matches = [] + self.rules = rules + self.occurrences = {} + self.rules['owner'] = self.rules.get('owner', '') + self.rules['priority'] = self.rules.get('priority', '2') + + def add_data(self, data): + """ The function that the ElastAlert client calls with results from ES. + Data is a list of dictionaries, from Elasticsearch. + + :param data: A list of events, each of which is a dictionary of terms. + """ + raise NotImplementedError() + + def add_match(self, event): + """ This function is called on all matching events. Rules use it to add + extra information about the context of a match. Event is a dictionary + containing terms directly from Elasticsearch and alerts will report + all of the information. + + :param event: The matching event, a dictionary of terms. + """ + # Convert datetime's back to timestamps + ts = self.rules.get('timestamp_field') + if ts in event: + event[ts] = dt_to_ts(event[ts]) + + self.matches.append(copy.deepcopy(event)) + + def get_match_str(self, match): + """ Returns a string that gives more context about a match. + + :param match: The matching event, a dictionary of terms. + :return: A user facing string describing the match. + """ + return '' + + def garbage_collect(self, timestamp): + """ Gets called periodically to remove old data that is useless beyond given timestamp. + May also be used to compute things in the absence of new data. + + :param timestamp: A timestamp indicating the rule has been run up to that point. + """ + pass + + def add_count_data(self, counts): + """ Gets called when a rule has use_count_query set to True. Called to add data from querying to the rule. + + :param counts: A dictionary mapping timestamps to hit counts. + """ + raise NotImplementedError() + + def add_terms_data(self, terms): + """ Gets called when a rule has use_terms_query set to True. + + :param terms: A list of buckets with a key, corresponding to query_key, and the count """ + raise NotImplementedError() + + def add_aggregation_data(self, payload): + """ Gets called when a rule has use_terms_query set to True. + :param terms: A list of buckets with a key, corresponding to query_key, and the count """ + raise NotImplementedError() + + +class CompareRule(RuleType): + """ A base class for matching a specific term by passing it to a compare function """ + required_options = frozenset(['compound_compare_key']) + + def expand_entries(self, list_type): + """ Expand entries specified in files using the '!file' directive, if there are + any, then add everything to a set. + """ + entries_set = set() + for entry in self.rules[list_type]: + if entry.startswith("!file"): # - "!file /path/to/list" + filename = entry.split()[1] + with open(filename, 'r') as f: + for line in f: + entries_set.add(line.rstrip()) + else: + entries_set.add(entry) + self.rules[list_type] = entries_set + + def compare(self, event): + """ An event is a match if this returns true """ + raise NotImplementedError() + + def add_data(self, data): + # If compare returns true, add it as a match + for event in data: + if self.compare(event): + self.add_match(event) + + +class BlacklistRule(CompareRule): + """ A CompareRule where the compare function checks a given key against a blacklist """ + required_options = frozenset(['compare_key', 'blacklist']) + + def __init__(self, rules, args=None): + super(BlacklistRule, self).__init__(rules, args=None) + self.expand_entries('blacklist') + + def compare(self, event): + term = lookup_es_key(event, self.rules['compare_key']) + if term in self.rules['blacklist']: + return True + return False + + +class WhitelistRule(CompareRule): + """ A CompareRule where the compare function checks a given term against a whitelist """ + required_options = frozenset(['compare_key', 'whitelist', 'ignore_null']) + + def __init__(self, rules, args=None): + super(WhitelistRule, self).__init__(rules, args=None) + self.expand_entries('whitelist') + + def compare(self, event): + term = lookup_es_key(event, self.rules['compare_key']) + if term is None: + return not self.rules['ignore_null'] + if term not in self.rules['whitelist']: + return True + return False + + +class ChangeRule(CompareRule): + """ A rule that will store values for a certain term and match if those values change """ + required_options = frozenset(['query_key', 'compound_compare_key', 'ignore_null']) + change_map = {} + occurrence_time = {} + + def compare(self, event): + key = hashable(lookup_es_key(event, self.rules['query_key'])) + values = [] + elastalert_logger.debug(" Previous Values of compare keys " + str(self.occurrences)) + for val in self.rules['compound_compare_key']: + lookup_value = lookup_es_key(event, val) + values.append(lookup_value) + elastalert_logger.debug(" Current Values of compare keys " + str(values)) + + changed = False + for val in values: + if not isinstance(val, bool) and not val and self.rules['ignore_null']: + return False + # If we have seen this key before, compare it to the new value + if key in self.occurrences: + for idx, previous_values in enumerate(self.occurrences[key]): + elastalert_logger.debug(" " + str(previous_values) + " " + str(values[idx])) + changed = previous_values != values[idx] + if changed: + break + if changed: + self.change_map[key] = (self.occurrences[key], values) + # If using timeframe, only return true if the time delta is < timeframe + if key in self.occurrence_time: + changed = event[self.rules['timestamp_field']] - self.occurrence_time[key] <= self.rules['timeframe'] + + # Update the current value and time + elastalert_logger.debug(" Setting current value of compare keys values " + str(values)) + self.occurrences[key] = values + if 'timeframe' in self.rules: + self.occurrence_time[key] = event[self.rules['timestamp_field']] + elastalert_logger.debug("Final result of comparision between previous and current values " + str(changed)) + return changed + + def add_match(self, match): + # TODO this is not technically correct + # if the term changes multiple times before an alert is sent + # this data will be overwritten with the most recent change + change = self.change_map.get(hashable(lookup_es_key(match, self.rules['query_key']))) + extra = {} + if change: + extra = {'old_value': change[0], + 'new_value': change[1]} + elastalert_logger.debug("Description of the changed records " + str(dict(match.items() + extra.items()))) + super(ChangeRule, self).add_match(dict(match.items() + extra.items())) + + +class FrequencyRule(RuleType): + """ A rule that matches if num_events number of events occur within a timeframe """ + required_options = frozenset(['num_events', 'timeframe']) + + def __init__(self, *args): + super(FrequencyRule, self).__init__(*args) + self.ts_field = self.rules.get('timestamp_field', '@timestamp') + self.get_ts = new_get_event_ts(self.ts_field) + self.attach_related = self.rules.get('attach_related', False) + + def add_count_data(self, data): + """ Add count data to the rule. Data should be of the form {ts: count}. """ + if len(data) > 1: + raise EAException('add_count_data can only accept one count at a time') + + (ts, count), = data.items() + + event = ({self.ts_field: ts}, count) + self.occurrences.setdefault('all', EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append(event) + self.check_for_match('all') + + def add_terms_data(self, terms): + if 'nested_query_key' in self.rules and self.rules['nested_query_key'] == True: + #letting this log message stay inorder to debug issues in future + elastalert_logger.info(terms) + for timestamp, buckets in terms.iteritems(): + self.flatten_nested_aggregations(timestamp,buckets) + else: + for timestamp, buckets in terms.iteritems(): + for bucket in buckets: + event = ({self.ts_field: timestamp, + self.rules['query_key']: bucket['key']}, bucket['doc_count']) + self.occurrences.setdefault(bucket['key'], EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append(event) + self.check_for_match(bucket['key']) + + def flatten_nested_aggregations(self,timestamp,buckets,key=None): + for bucket in buckets: + if key == None: + nestedkey = str(bucket['key']) + else: + nestedkey = key + ',' + str(bucket['key']) + if 'counts' in bucket: + self.flatten_nested_aggregations(timestamp,bucket['counts']['buckets'],nestedkey) + else: + event = ({self.ts_field: timestamp, + self.rules['query_key']: nestedkey}, bucket['doc_count']) + self.occurrences.setdefault(nestedkey, EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append(event) + self.check_for_match(nestedkey) + + + def add_data(self, data): + if 'query_key' in self.rules: + qk = self.rules['query_key'] + else: + qk = None + + for event in data: + if qk: + key = hashable(lookup_es_key(event, qk)) + else: + # If no query_key, we use the key 'all' for all events + key = 'all' + + # Store the timestamps of recent occurrences, per key + self.occurrences.setdefault(key, EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append((event, 1)) + self.check_for_match(key, end=False) + + # We call this multiple times with the 'end' parameter because subclasses + # may or may not want to check while only partial data has been added + if key in self.occurrences: # could have been emptied by previous check + self.check_for_match(key, end=True) + + def check_for_match(self, key, end=False): + # Match if, after removing old events, we hit num_events. + # the 'end' parameter depends on whether this was called from the + # middle or end of an add_data call and is used in subclasses + if self.occurrences[key].count() >= self.rules['num_events']: + event = self.occurrences[key].data[-1][0] + if self.attach_related: + event['related_events'] = [data[0] for data in self.occurrences[key].data[:-1]] + self.add_match(event) + self.occurrences.pop(key) + + def garbage_collect(self, timestamp): + """ Remove all occurrence data that is beyond the timeframe away """ + stale_keys = [] + for key, window in self.occurrences.iteritems(): + if timestamp - lookup_es_key(window.data[-1][0], self.ts_field) > self.rules['timeframe']: + stale_keys.append(key) + map(self.occurrences.pop, stale_keys) + + def get_match_str(self, match): + lt = self.rules.get('use_local_time') + match_ts = lookup_es_key(match, self.ts_field) + starttime = pretty_ts(dt_to_ts(ts_to_dt(match_ts) - self.rules['timeframe']), lt) + endtime = pretty_ts(match_ts, lt) + message = 'At least %d events occurred between %s and %s\n\n' % (self.rules['num_events'], + starttime, + endtime) + return message + +class AnyRule(RuleType): + """ A rule that will match on any input data """ + + def add_data(self, data): + for datum in data: + self.add_match(datum) + + +class EventWindow(object): + """ A container for hold event counts for rules which need a chronological ordered event window. """ + + def __init__(self, timeframe, onRemoved=None, getTimestamp=new_get_event_ts('@timestamp')): + self.timeframe = timeframe + self.onRemoved = onRemoved + self.get_ts = getTimestamp + self.data = sortedlist(key=self.get_ts) + self.running_count = 0 + + def clear(self): + self.data = sortedlist(key=self.get_ts) + self.running_count = 0 + + def append(self, event): + """ Add an event to the window. Event should be of the form (dict, count). + This will also pop the oldest events and call onRemoved on them until the + window size is less than timeframe. """ + self.data.add(event) + self.running_count += event[1] + + while self.duration() >= self.timeframe: + oldest = self.data[0] + self.data.remove(oldest) + self.running_count -= oldest[1] + self.onRemoved and self.onRemoved(oldest) + + def duration(self): + """ Get the size in timedelta of the window. """ + if not self.data: + return datetime.timedelta(0) + return self.get_ts(self.data[-1]) - self.get_ts(self.data[0]) + + def count(self): + """ Count the number of events in the window. """ + return self.running_count + + def mean(self): + """ Compute the mean of the value_field in the window. """ + if len(self.data) > 0: + datasum = 0 + datalen = 0 + for dat in self.data: + if "placeholder" not in dat[0]: + datasum += dat[1] + datalen += 1 + if datalen > 0: + return datasum / float(datalen) + return None + else: + return None + + def __iter__(self): + return iter(self.data) + + def append_middle(self, event): + """ Attempt to place the event in the correct location in our deque. + Returns True if successful, otherwise False. """ + rotation = 0 + ts = self.get_ts(event) + + # Append left if ts is earlier than first event + if self.get_ts(self.data[0]) > ts: + self.data.appendleft(event) + self.running_count += event[1] + return + + # Rotate window until we can insert event + while self.get_ts(self.data[-1]) > ts: + self.data.rotate(1) + rotation += 1 + if rotation == len(self.data): + # This should never happen + return + self.data.append(event) + self.running_count += event[1] + self.data.rotate(-rotation) + + +class SpikeRule(RuleType): + """ A rule that uses two sliding windows to compare relative event frequency. """ + required_options = frozenset(['timeframe', 'spike_height', 'spike_type']) + + def __init__(self, *args): + super(SpikeRule, self).__init__(*args) + self.timeframe = self.rules['timeframe'] + + self.ref_windows = {} + self.cur_windows = {} + + self.ts_field = self.rules.get('timestamp_field', '@timestamp') + self.get_ts = new_get_event_ts(self.ts_field) + self.first_event = {} + self.skip_checks = {} + + self.field_value = self.rules.get('field_value') + + self.ref_window_filled_once = False + + def add_count_data(self, data): + """ Add count data to the rule. Data should be of the form {ts: count}. """ + if len(data) > 1: + raise EAException('add_count_data can only accept one count at a time') + for ts, count in data.iteritems(): + self.handle_event({self.ts_field: ts}, count, 'all') + + def add_terms_data(self, terms): + for timestamp, buckets in terms.iteritems(): + for bucket in buckets: + count = bucket['doc_count'] + event = {self.ts_field: timestamp, + self.rules['query_key']: bucket['key']} + key = bucket['key'] + self.handle_event(event, count, key) + + def add_data(self, data): + for event in data: + qk = self.rules.get('query_key', 'all') + if qk != 'all': + qk = hashable(lookup_es_key(event, qk)) + if qk is None: + qk = 'other' + if self.field_value is not None: + if self.field_value in event: + count = lookup_es_key(event, self.field_value) + if count is not None: + try: + count = int(count) + except ValueError: + elastalert_logger.warn('{} is not a number: {}'.format(self.field_value, count)) + else: + self.handle_event(event, count, qk) + else: + self.handle_event(event, 1, qk) + + def clear_windows(self, qk, event): + # Reset the state and prevent alerts until windows filled again + self.ref_windows[qk].clear() + self.first_event.pop(qk) + self.skip_checks[qk] = lookup_es_key(event, self.ts_field) + self.rules['timeframe'] * 2 + + def handle_event(self, event, count, qk='all'): + self.first_event.setdefault(qk, event) + + self.ref_windows.setdefault(qk, EventWindow(self.timeframe, getTimestamp=self.get_ts)) + self.cur_windows.setdefault(qk, EventWindow(self.timeframe, self.ref_windows[qk].append, self.get_ts)) + + self.cur_windows[qk].append((event, count)) + + # Don't alert if ref window has not yet been filled for this key AND + if lookup_es_key(event, self.ts_field) - self.first_event[qk][self.ts_field] < self.rules['timeframe'] * 2: + # ElastAlert has not been running long enough for any alerts OR + if not self.ref_window_filled_once: + return + # This rule is not using alert_on_new_data (with query_key) OR + if not (self.rules.get('query_key') and self.rules.get('alert_on_new_data')): + return + # An alert for this qk has recently fired + if qk in self.skip_checks and lookup_es_key(event, self.ts_field) < self.skip_checks[qk]: + return + else: + self.ref_window_filled_once = True + + if self.field_value is not None: + if self.find_matches(self.ref_windows[qk].mean(), self.cur_windows[qk].mean()): + # skip over placeholder events + for match, count in self.cur_windows[qk].data: + if "placeholder" not in match: + break + self.add_match(match, qk) + self.clear_windows(qk, match) + else: + if self.find_matches(self.ref_windows[qk].count(), self.cur_windows[qk].count()): + # skip over placeholder events which have count=0 + for match, count in self.cur_windows[qk].data: + if count: + break + + self.add_match(match, qk) + self.clear_windows(qk, match) + + def add_match(self, match, qk): + extra_info = {} + if self.field_value is None: + spike_count = self.cur_windows[qk].count() + reference_count = self.ref_windows[qk].count() + else: + spike_count = self.cur_windows[qk].mean() + reference_count = self.ref_windows[qk].mean() + extra_info = {'spike_count': spike_count, + 'reference_count': reference_count} + + match = dict(match.items() + extra_info.items()) + + super(SpikeRule, self).add_match(match) + + def find_matches(self, ref, cur): + """ Determines if an event spike or dip happening. """ + # Apply threshold limits + if self.field_value is None: + if (cur < self.rules.get('threshold_cur', 0) or + ref < self.rules.get('threshold_ref', 0)): + return False + elif ref is None or ref == 0 or cur is None or cur == 0: + return False + + spike_up, spike_down = False, False + if cur <= ref / self.rules['spike_height']: + spike_down = True + if cur >= ref * self.rules['spike_height']: + spike_up = True + + if (self.rules['spike_type'] in ['both', 'up'] and spike_up) or \ + (self.rules['spike_type'] in ['both', 'down'] and spike_down): + return True + return False + + def get_match_str(self, match): + if self.field_value is None: + message = 'An abnormal number (%d) of events occurred around %s.\n' % ( + match['spike_count'], + pretty_ts(match[self.rules['timestamp_field']], self.rules.get('use_local_time')) + ) + message += 'Preceding that time, there were only %d events within %s\n\n' % (match['reference_count'], self.rules['timeframe']) + else: + message = 'An abnormal average value (%.2f) of field \'%s\' occurred around %s.\n' % ( + match['spike_count'], + self.field_value, + pretty_ts(match[self.rules['timestamp_field']], + self.rules.get('use_local_time')) + ) + message += 'Preceding that time, the field had an average value of (%.2f) within %s\n\n' % ( + match['reference_count'], self.rules['timeframe']) + return message + + def garbage_collect(self, ts): + # Windows are sized according to their newest event + # This is a placeholder to accurately size windows in the absence of events + for qk in self.cur_windows.keys(): + # If we havn't seen this key in a long time, forget it + if qk != 'all' and self.ref_windows[qk].count() == 0 and self.cur_windows[qk].count() == 0: + self.cur_windows.pop(qk) + self.ref_windows.pop(qk) + continue + placeholder = {self.ts_field: ts, "placeholder": True} + # The placeholder may trigger an alert, in which case, qk will be expected + if qk != 'all': + placeholder.update({self.rules['query_key']: qk}) + self.handle_event(placeholder, 0, qk) + + +class FlatlineRule(FrequencyRule): + """ A rule that matches when there is a low number of events given a timeframe. """ + required_options = frozenset(['timeframe', 'threshold']) + + def __init__(self, *args): + super(FlatlineRule, self).__init__(*args) + self.threshold = self.rules['threshold'] + + # Dictionary mapping query keys to the first events + self.first_event = {} + + def check_for_match(self, key, end=True): + # This function gets called between every added document with end=True after the last + # We ignore the calls before the end because it may trigger false positives + if not end: + return + + most_recent_ts = self.get_ts(self.occurrences[key].data[-1]) + if self.first_event.get(key) is None: + self.first_event[key] = most_recent_ts + + # Don't check for matches until timeframe has elapsed + if most_recent_ts - self.first_event[key] < self.rules['timeframe']: + return + + # Match if, after removing old events, we hit num_events + count = self.occurrences[key].count() + if count < self.rules['threshold']: + # Do a deep-copy, otherwise we lose the datetime type in the timestamp field of the last event + event = copy.deepcopy(self.occurrences[key].data[-1][0]) + event.update(key=key, count=count) + self.add_match(event) + + if not self.rules.get('forget_keys'): + # After adding this match, leave the occurrences windows alone since it will + # be pruned in the next add_data or garbage_collect, but reset the first_event + # so that alerts continue to fire until the threshold is passed again. + least_recent_ts = self.get_ts(self.occurrences[key].data[0]) + timeframe_ago = most_recent_ts - self.rules['timeframe'] + self.first_event[key] = min(least_recent_ts, timeframe_ago) + else: + # Forget about this key until we see it again + self.first_event.pop(key) + self.occurrences.pop(key) + + def get_match_str(self, match): + ts = match[self.rules['timestamp_field']] + lt = self.rules.get('use_local_time') + message = 'An abnormally low number of events occurred around %s.\n' % (pretty_ts(ts, lt)) + message += 'Between %s and %s, there were less than %s events.\n\n' % ( + pretty_ts(dt_to_ts(ts_to_dt(ts) - self.rules['timeframe']), lt), + pretty_ts(ts, lt), + self.rules['threshold'] + ) + return message + + def garbage_collect(self, ts): + # We add an event with a count of zero to the EventWindow for each key. This will cause the EventWindow + # to remove events that occurred more than one `timeframe` ago, and call onRemoved on them. + default = ['all'] if 'query_key' not in self.rules else [] + for key in self.occurrences.keys() or default: + self.occurrences.setdefault( + key, + EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts) + ).append( + ({self.ts_field: ts}, 0) + ) + self.first_event.setdefault(key, ts) + self.check_for_match(key) + + +class NewTermsRule(RuleType): + """ Alerts on a new value in a list of fields. """ + + def __init__(self, rule, args=None): + super(NewTermsRule, self).__init__(rule, args) + self.seen_values = {} + # Allow the use of query_key or fields + if 'fields' not in self.rules: + if 'query_key' not in self.rules: + raise EAException("fields or query_key must be specified") + self.fields = self.rules['query_key'] + else: + self.fields = self.rules['fields'] + if not self.fields: + raise EAException("fields must not be an empty list") + if type(self.fields) != list: + self.fields = [self.fields] + if self.rules.get('use_terms_query') and \ + (len(self.fields) != 1 or (len(self.fields) == 1 and type(self.fields[0]) == list)): + raise EAException("use_terms_query can only be used with a single non-composite field") + if self.rules.get('use_terms_query'): + if [self.rules['query_key']] != self.fields: + raise EAException('If use_terms_query is specified, you cannot specify different query_key and fields') + if not self.rules.get('query_key').endswith('.keyword') and not self.rules.get('query_key').endswith('.raw'): + if self.rules.get('use_keyword_postfix', True): + elastalert_logger.warn('Warning: If query_key is a non-keyword field, you must set ' + 'use_keyword_postfix to false, or add .keyword/.raw to your query_key.') + try: + self.get_all_terms(args) + except Exception as e: + # Refuse to start if we cannot get existing terms + raise EAException('Error searching for existing terms: %s' % (repr(e))), None, sys.exc_info()[2] + + def get_all_terms(self, args): + """ Performs a terms aggregation for each field to get every existing term. """ + self.es = elasticsearch_client(self.rules) + window_size = datetime.timedelta(**self.rules.get('terms_window_size', {'days': 30})) + field_name = {"field": "", "size": 2147483647} # Integer.MAX_VALUE + query_template = {"aggs": {"values": {"terms": field_name}}} + if args and hasattr(args, 'start') and args.start: + end = ts_to_dt(args.start) + elif 'start_date' in self.rules: + end = ts_to_dt(self.rules['start_date']) + else: + end = ts_now() + start = end - window_size + step = datetime.timedelta(**self.rules.get('window_step_size', {'days': 1})) + + for field in self.fields: + tmp_start = start + tmp_end = min(start + step, end) + + time_filter = {self.rules['timestamp_field']: {'lt': self.rules['dt_to_ts'](tmp_end), 'gte': self.rules['dt_to_ts'](tmp_start)}} + query_template['filter'] = {'bool': {'must': [{'range': time_filter}]}} + query = {'aggs': {'filtered': query_template}} + + if 'filter' in self.rules: + for item in self.rules['filter']: + query_template['filter']['bool']['must'].append(item) + + # For composite keys, we will need to perform sub-aggregations + if type(field) == list: + self.seen_values.setdefault(tuple(field), []) + level = query_template['aggs'] + # Iterate on each part of the composite key and add a sub aggs clause to the elastic search query + for i, sub_field in enumerate(field): + if self.rules.get('use_keyword_postfix', True): + level['values']['terms']['field'] = add_raw_postfix(sub_field, self.is_five_or_above()) + else: + level['values']['terms']['field'] = sub_field + if i < len(field) - 1: + # If we have more fields after the current one, then set up the next nested structure + level['values']['aggs'] = {'values': {'terms': copy.deepcopy(field_name)}} + level = level['values']['aggs'] + else: + self.seen_values.setdefault(field, []) + # For non-composite keys, only a single agg is needed + if self.rules.get('use_keyword_postfix', True): + field_name['field'] = add_raw_postfix(field, self.is_five_or_above()) + else: + field_name['field'] = field + + # Query the entire time range in small chunks + while tmp_start < end: + if self.rules.get('use_strftime_index'): + index = format_index(self.rules['index'], tmp_start, tmp_end) + else: + index = self.rules['index'] + res = self.es.search(body=query, index=index, ignore_unavailable=True, timeout='50s') + if 'aggregations' in res: + buckets = res['aggregations']['filtered']['values']['buckets'] + if type(field) == list: + # For composite keys, make the lookup based on all fields + # Make it a tuple since it can be hashed and used in dictionary lookups + for bucket in buckets: + # We need to walk down the hierarchy and obtain the value at each level + self.seen_values[tuple(field)] += self.flatten_aggregation_hierarchy(bucket) + else: + keys = [bucket['key'] for bucket in buckets] + self.seen_values[field] += keys + else: + if type(field) == list: + self.seen_values.setdefault(tuple(field), []) + else: + self.seen_values.setdefault(field, []) + if tmp_start == tmp_end: + break + tmp_start = tmp_end + tmp_end = min(tmp_start + step, end) + time_filter[self.rules['timestamp_field']] = {'lt': self.rules['dt_to_ts'](tmp_end), + 'gte': self.rules['dt_to_ts'](tmp_start)} + + for key, values in self.seen_values.iteritems(): + if not values: + if type(key) == tuple: + # If we don't have any results, it could either be because of the absence of any baseline data + # OR it may be because the composite key contained a non-primitive type. Either way, give the + # end-users a heads up to help them debug what might be going on. + elastalert_logger.warning(( + 'No results were found from all sub-aggregations. This can either indicate that there is ' + 'no baseline data OR that a non-primitive field was used in a composite key.' + )) + else: + elastalert_logger.info('Found no values for %s' % (field)) + continue + self.seen_values[key] = list(set(values)) + elastalert_logger.info('Found %s unique values for %s' % (len(set(values)), key)) + + def flatten_aggregation_hierarchy(self, root, hierarchy_tuple=()): + """ For nested aggregations, the results come back in the following format: + { + "aggregations" : { + "filtered" : { + "doc_count" : 37, + "values" : { + "doc_count_error_upper_bound" : 0, + "sum_other_doc_count" : 0, + "buckets" : [ { + "key" : "1.1.1.1", # IP address (root) + "doc_count" : 13, + "values" : { + "doc_count_error_upper_bound" : 0, + "sum_other_doc_count" : 0, + "buckets" : [ { + "key" : "80", # Port (sub-aggregation) + "doc_count" : 3, + "values" : { + "doc_count_error_upper_bound" : 0, + "sum_other_doc_count" : 0, + "buckets" : [ { + "key" : "ack", # Reason (sub-aggregation, leaf-node) + "doc_count" : 3 + }, { + "key" : "syn", # Reason (sub-aggregation, leaf-node) + "doc_count" : 1 + } ] + } + }, { + "key" : "82", # Port (sub-aggregation) + "doc_count" : 3, + "values" : { + "doc_count_error_upper_bound" : 0, + "sum_other_doc_count" : 0, + "buckets" : [ { + "key" : "ack", # Reason (sub-aggregation, leaf-node) + "doc_count" : 3 + }, { + "key" : "syn", # Reason (sub-aggregation, leaf-node) + "doc_count" : 3 + } ] + } + } ] + } + }, { + "key" : "2.2.2.2", # IP address (root) + "doc_count" : 4, + "values" : { + "doc_count_error_upper_bound" : 0, + "sum_other_doc_count" : 0, + "buckets" : [ { + "key" : "443", # Port (sub-aggregation) + "doc_count" : 3, + "values" : { + "doc_count_error_upper_bound" : 0, + "sum_other_doc_count" : 0, + "buckets" : [ { + "key" : "ack", # Reason (sub-aggregation, leaf-node) + "doc_count" : 3 + }, { + "key" : "syn", # Reason (sub-aggregation, leaf-node) + "doc_count" : 3 + } ] + } + } ] + } + } ] + } + } + } + } + + Each level will either have more values and buckets, or it will be a leaf node + We'll ultimately return a flattened list with the hierarchies appended as strings, + e.g the above snippet would yield a list with: + + [ + ('1.1.1.1', '80', 'ack'), + ('1.1.1.1', '80', 'syn'), + ('1.1.1.1', '82', 'ack'), + ('1.1.1.1', '82', 'syn'), + ('2.2.2.2', '443', 'ack'), + ('2.2.2.2', '443', 'syn') + ] + + A similar formatting will be performed in the add_data method and used as the basis for comparison + + """ + results = [] + # There are more aggregation hierarchies left. Traverse them. + if 'values' in root: + results += self.flatten_aggregation_hierarchy(root['values']['buckets'], hierarchy_tuple + (root['key'],)) + else: + # We've gotten to a sub-aggregation, which may have further sub-aggregations + # See if we need to traverse further + for node in root: + if 'values' in node: + results += self.flatten_aggregation_hierarchy(node, hierarchy_tuple) + else: + results.append(hierarchy_tuple + (node['key'],)) + return results + + def add_data(self, data): + for document in data: + for field in self.fields: + value = () + lookup_field = field + if type(field) == list: + # For composite keys, make the lookup based on all fields + # Make it a tuple since it can be hashed and used in dictionary lookups + lookup_field = tuple(field) + for sub_field in field: + lookup_result = lookup_es_key(document, sub_field) + if not lookup_result: + value = None + break + value += (lookup_result,) + else: + value = lookup_es_key(document, field) + if not value and self.rules.get('alert_on_missing_field'): + document['missing_field'] = lookup_field + self.add_match(copy.deepcopy(document)) + elif value: + if value not in self.seen_values[lookup_field]: + document['new_field'] = lookup_field + self.add_match(copy.deepcopy(document)) + self.seen_values[lookup_field].append(value) + + def add_terms_data(self, terms): + # With terms query, len(self.fields) is always 1 and the 0'th entry is always a string + field = self.fields[0] + for timestamp, buckets in terms.iteritems(): + for bucket in buckets: + if bucket['doc_count']: + if bucket['key'] not in self.seen_values[field]: + match = {field: bucket['key'], + self.rules['timestamp_field']: timestamp, + 'new_field': field} + self.add_match(match) + self.seen_values[field].append(bucket['key']) + + def is_five_or_above(self): + version = self.es.info()['version']['number'] + return int(version[0]) >= 5 + + +class CardinalityRule(RuleType): + """ A rule that matches if cardinality of a field is above or below a threshold within a timeframe """ + required_options = frozenset(['timeframe', 'cardinality_field']) + + def __init__(self, *args): + super(CardinalityRule, self).__init__(*args) + if 'max_cardinality' not in self.rules and 'min_cardinality' not in self.rules: + raise EAException("CardinalityRule must have one of either max_cardinality or min_cardinality") + self.ts_field = self.rules.get('timestamp_field', '@timestamp') + self.cardinality_field = self.rules['cardinality_field'] + self.cardinality_cache = {} + self.first_event = {} + self.timeframe = self.rules['timeframe'] + + def add_data(self, data): + qk = self.rules.get('query_key') + for event in data: + if qk: + key = hashable(lookup_es_key(event, qk)) + else: + # If no query_key, we use the key 'all' for all events + key = 'all' + self.cardinality_cache.setdefault(key, {}) + self.first_event.setdefault(key, lookup_es_key(event, self.ts_field)) + value = hashable(lookup_es_key(event, self.cardinality_field)) + if value is not None: + # Store this timestamp as most recent occurence of the term + self.cardinality_cache[key][value] = lookup_es_key(event, self.ts_field) + self.check_for_match(key, event) + + def check_for_match(self, key, event, gc=True): + # Check to see if we are past max/min_cardinality for a given key + time_elapsed = lookup_es_key(event, self.ts_field) - self.first_event.get(key, lookup_es_key(event, self.ts_field)) + timeframe_elapsed = time_elapsed > self.timeframe + if (len(self.cardinality_cache[key]) > self.rules.get('max_cardinality', float('inf')) or + (len(self.cardinality_cache[key]) < self.rules.get('min_cardinality', float('-inf')) and timeframe_elapsed)): + # If there might be a match, run garbage collect first, as outdated terms are only removed in GC + # Only run it if there might be a match so it doesn't impact performance + if gc: + self.garbage_collect(lookup_es_key(event, self.ts_field)) + self.check_for_match(key, event, False) + else: + self.first_event.pop(key, None) + self.add_match(event) + + def garbage_collect(self, timestamp): + """ Remove all occurrence data that is beyond the timeframe away """ + for qk, terms in self.cardinality_cache.items(): + for term, last_occurence in terms.items(): + if timestamp - last_occurence > self.rules['timeframe']: + self.cardinality_cache[qk].pop(term) + + # Create a placeholder event for if a min_cardinality match occured + if 'min_cardinality' in self.rules: + event = {self.ts_field: timestamp} + if 'query_key' in self.rules: + event.update({self.rules['query_key']: qk}) + self.check_for_match(qk, event, False) + + def get_match_str(self, match): + lt = self.rules.get('use_local_time') + starttime = pretty_ts(dt_to_ts(ts_to_dt(match[self.ts_field]) - self.rules['timeframe']), lt) + endtime = pretty_ts(match[self.ts_field], lt) + if 'max_cardinality' in self.rules: + message = ('A maximum of %d unique %s(s) occurred since last alert or between %s and %s\n\n' % (self.rules['max_cardinality'], + self.rules['cardinality_field'], + starttime, endtime)) + else: + message = ('Less than %d unique %s(s) occurred since last alert or between %s and %s\n\n' % (self.rules['min_cardinality'], + self.rules['cardinality_field'], + starttime, endtime)) + return message + + +class BaseAggregationRule(RuleType): + def __init__(self, *args): + super(BaseAggregationRule, self).__init__(*args) + bucket_interval = self.rules.get('bucket_interval') + if bucket_interval: + if 'seconds' in bucket_interval: + self.rules['bucket_interval_period'] = str(bucket_interval['seconds']) + 's' + elif 'minutes' in bucket_interval: + self.rules['bucket_interval_period'] = str(bucket_interval['minutes']) + 'm' + elif 'hours' in bucket_interval: + self.rules['bucket_interval_period'] = str(bucket_interval['hours']) + 'h' + elif 'days' in bucket_interval: + self.rules['bucket_interval_period'] = str(bucket_interval['days']) + 'd' + elif 'weeks' in bucket_interval: + self.rules['bucket_interval_period'] = str(bucket_interval['weeks']) + 'w' + else: + raise EAException("Unsupported window size") + + if self.rules.get('use_run_every_query_size'): + if total_seconds(self.rules['run_every']) % total_seconds(self.rules['bucket_interval_timedelta']) != 0: + raise EAException("run_every must be evenly divisible by bucket_interval if specified") + else: + if total_seconds(self.rules['buffer_time']) % total_seconds(self.rules['bucket_interval_timedelta']) != 0: + raise EAException("Buffer_time must be evenly divisible by bucket_interval if specified") + + def generate_aggregation_query(self): + raise NotImplementedError() + + def add_aggregation_data(self, payload): + for timestamp, payload_data in payload.iteritems(): + if 'interval_aggs' in payload_data: + self.unwrap_interval_buckets(timestamp, None, payload_data['interval_aggs']['buckets']) + elif 'bucket_aggs' in payload_data: + self.unwrap_term_buckets(timestamp, payload_data['bucket_aggs']['buckets']) + else: + self.check_matches(timestamp, None, payload_data) + + def unwrap_interval_buckets(self, timestamp, query_key, interval_buckets): + for interval_data in interval_buckets: + # Use bucket key here instead of start_time for more accurate match timestamp + self.check_matches(ts_to_dt(interval_data['key_as_string']), query_key, interval_data) + + def unwrap_term_buckets(self, timestamp, term_buckets): + for term_data in term_buckets: + if 'interval_aggs' in term_data: + self.unwrap_interval_buckets(timestamp, term_data['key'], term_data['interval_aggs']['buckets']) + else: + self.check_matches(timestamp, term_data['key'], term_data) + + def check_matches(self, timestamp, query_key, aggregation_data): + raise NotImplementedError() + +class ErrorRateRule(BaseAggregationRule): + """ A rule that determines error rate with sampling rate""" + required_options = frozenset(['sampling', 'threshold','error_condition','unique_column']) + def __init__(self, *args): + super(ErrorRateRule, self).__init__(*args) + + self.ts_field = self.rules.get('timestamp_field', '@timestamp') + self.rules['total_agg_key'] = self.rules['unique_column'] + self.rules['count_all_errors'] = True + + if (self.rules.has_key('error_calculation_method') and self.rules['error_calculation_method']=='count_traces_with_errors' ): + self.rules['count_all_errors'] = False + + # hardcoding uniq aggregation for total count + self.rules['total_agg_type'] = "uniq" + + def calculate_err_rate(self,payload): + for timestamp, payload_data in payload.iteritems(): + if int(payload_data['total_count']) > 0: + rate = float(payload_data['error_count'])/float(payload_data['total_count']) + rate = float(rate)/float(self.rules['sampling']) + rate = rate*100 + if 'threshold' in self.rules and rate > self.rules['threshold']: + match = {self.rules['timestamp_field']: timestamp, 'error_rate': rate, 'from': payload_data['start_time'], 'to': payload_data['end_time']} + self.add_match(match) + + +class MetricAggregationRule(BaseAggregationRule): + """ A rule that matches when there is a low number of events given a timeframe. """ + required_options = frozenset(['metric_agg_key', 'metric_agg_type', 'doc_type']) + allowed_aggregations = frozenset(['min', 'max', 'avg', 'sum', 'cardinality', 'value_count']) + + def __init__(self, *args): + super(MetricAggregationRule, self).__init__(*args) + self.ts_field = self.rules.get('timestamp_field', '@timestamp') + if 'max_threshold' not in self.rules and 'min_threshold' not in self.rules: + raise EAException("MetricAggregationRule must have at least one of either max_threshold or min_threshold") + + self.metric_key = self.rules['metric_agg_key'] + '_' + self.rules['metric_agg_type'] + + if not self.rules['metric_agg_type'] in self.allowed_aggregations: + raise EAException("metric_agg_type must be one of %s" % (str(self.allowed_aggregations))) + + self.rules['aggregation_query_element'] = self.generate_aggregation_query() + + def get_match_str(self, match): + message = 'Threshold violation, %s:%s %s (min: %s max : %s) \n\n' % ( + self.rules['metric_agg_type'], + self.rules['metric_agg_key'], + match[self.metric_key], + self.rules.get('min_threshold'), + self.rules.get('max_threshold') + ) + return message + + def generate_aggregation_query(self): + return {self.metric_key: {self.rules['metric_agg_type']: {'field': self.rules['metric_agg_key']}}} + + def check_matches(self, timestamp, query_key, aggregation_data): + if "compound_query_key" in self.rules: + self.check_matches_recursive(timestamp, query_key, aggregation_data, self.rules['compound_query_key'], dict()) + + else: + metric_val = aggregation_data[self.metric_key]['value'] + if self.crossed_thresholds(metric_val): + match = {self.rules['timestamp_field']: timestamp, + self.metric_key: metric_val} + if query_key is not None: + match[self.rules['query_key']] = query_key + self.add_match(match) + + def check_matches_recursive(self, timestamp, query_key, aggregation_data, compound_keys, match_data): + if len(compound_keys) < 1: + # shouldn't get to this point, but checking for safety + return + + match_data[compound_keys[0]] = aggregation_data['key'] + if 'bucket_aggs' in aggregation_data: + for result in aggregation_data['bucket_aggs']['buckets']: + self.check_matches_recursive(timestamp, + query_key, + result, + compound_keys[1:], + match_data) + + else: + metric_val = aggregation_data[self.metric_key]['value'] + if self.crossed_thresholds(metric_val): + match_data[self.rules['timestamp_field']] = timestamp + match_data[self.metric_key] = metric_val + + # add compound key to payload to allow alerts to trigger for every unique occurence + compound_value = [match_data[key] for key in self.rules['compound_query_key']] + match_data[self.rules['query_key']] = ",".join(compound_value) + + self.add_match(match_data) + + def crossed_thresholds(self, metric_value): + if metric_value is None: + return False + if 'max_threshold' in self.rules and metric_value > self.rules['max_threshold']: + return True + if 'min_threshold' in self.rules and metric_value < self.rules['min_threshold']: + return True + return False + + +class PercentageMatchRule(BaseAggregationRule): + required_options = frozenset(['match_bucket_filter']) + + def __init__(self, *args): + super(PercentageMatchRule, self).__init__(*args) + self.ts_field = self.rules.get('timestamp_field', '@timestamp') + if 'max_percentage' not in self.rules and 'min_percentage' not in self.rules: + raise EAException("PercentageMatchRule must have at least one of either min_percentage or max_percentage") + + self.match_bucket_filter = self.rules['match_bucket_filter'] + self.rules['aggregation_query_element'] = self.generate_aggregation_query() + + def get_match_str(self, match): + percentage_format_string = self.rules.get('percentage_format_string', None) + message = 'Percentage violation, value: %s (min: %s max : %s) of %s items\n\n' % ( + percentage_format_string % (match['percentage']) if percentage_format_string else match['percentage'], + self.rules.get('min_percentage'), + self.rules.get('max_percentage'), + match['denominator'] + ) + return message + + def generate_aggregation_query(self): + return { + 'percentage_match_aggs': { + 'filters': { + 'other_bucket': True, + 'filters': { + 'match_bucket': { + 'bool': { + 'must': self.match_bucket_filter + } + } + } + } + } + } + + def check_matches(self, timestamp, query_key, aggregation_data): + match_bucket_count = aggregation_data['percentage_match_aggs']['buckets']['match_bucket']['doc_count'] + other_bucket_count = aggregation_data['percentage_match_aggs']['buckets']['_other_']['doc_count'] + + if match_bucket_count is None or other_bucket_count is None: + return + else: + total_count = other_bucket_count + match_bucket_count + if total_count == 0: + return + else: + match_percentage = (match_bucket_count * 1.0) / (total_count * 1.0) * 100 + if self.percentage_violation(match_percentage): + match = {self.rules['timestamp_field']: timestamp, 'percentage': match_percentage, 'denominator': total_count} + if query_key is not None: + match[self.rules['query_key']] = query_key + self.add_match(match) + + def percentage_violation(self, match_percentage): + if 'max_percentage' in self.rules and match_percentage > self.rules['max_percentage']: + return True + if 'min_percentage' in self.rules and match_percentage < self.rules['min_percentage']: + return True + return False diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 00c2ce8d3..128c1c3c6 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -257,14 +257,37 @@ def get_query(filters, starttime=None, endtime=None, sort=True, timestamp_field= query['sort'] = [{timestamp_field: {'order': 'desc' if desc else 'asc'}}] return query - def get_terms_query(self, query, rule, size, field): + def get_terms_query(self, query, rule, size, field, five=False): """ Takes a query generated by get_query and outputs a aggregation query """ query_element = query['query'] if 'sort' in query_element: query_element.pop('sort') - aggs_query = query - aggs_query['aggs'] = {'counts': {'terms': {'field': field, 'size': size}}} - + if not five: + query_element['filtered'].update({'aggs': {'counts': {'terms': {'field': field, + 'size': size, + 'min_doc_count': rule.get('min_doc_count', 1)}}}}) + aggs_query = {'aggs': query_element} + else: + if 'nested_query_key' in rule and rule['nested_query_key'] == True and len(field.split(",")) > 1: + aggs_query = query + query_key_list = field.split(",") + first_query_key = query_key_list.pop() + aggs_element = {'counts': {'terms': {'field': first_query_key, + 'size': size, + 'min_doc_count': rule.get('min_doc_count', 1)}}} + + if len(query_key_list) > 0: + for key in reversed(query_key_list): + aggs_element = {'counts': {'terms': {'field': key, 'size': size, + 'min_doc_count': rule.get('min_doc_count', 1)}, 'aggs': aggs_element}} + aggs_query['aggs'] = aggs_element + else: + aggs_query = query + aggs_query['aggs'] = {'counts': {'terms': {'field': field, + 'size': size, + 'min_doc_count': rule.get('min_doc_count', 1)}}} + + return aggs_query def get_aggregation_query(self, query, rule, query_key, terms_size, timestamp_field='@timestamp'): diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index e37c50650..663ef3a37 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -223,12 +223,33 @@ def add_count_data(self, data): self.check_for_match('all') def add_terms_data(self, terms): - for timestamp, buckets in terms.items(): - for bucket in buckets: + if 'nested_query_key' in self.rules and self.rules['nested_query_key'] == True: + #letting this log message stay inorder to debug issues in future + elastalert_logger.info(terms) + for timestamp, buckets in terms.iteritems(): + self.flatten_nested_aggregations(timestamp,buckets) + else: + for timestamp, buckets in terms.iteritems(): + for bucket in buckets: + event = ({self.ts_field: timestamp, + self.rules['query_key']: bucket['key']}, bucket['doc_count']) + self.occurrences.setdefault(bucket['key'], EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append(event) + self.check_for_match(bucket['key']) + + def flatten_nested_aggregations(self,timestamp,buckets,key=None): + for bucket in buckets: + if key == None: + nestedkey = str(bucket['key']) + else: + nestedkey = key + ',' + str(bucket['key']) + if 'counts' in bucket: + self.flatten_nested_aggregations(timestamp,bucket['counts']['buckets'],nestedkey) + else: event = ({self.ts_field: timestamp, - self.rules['query_key']: bucket['key']}, bucket['doc_count']) - self.occurrences.setdefault(bucket['key'], EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append(event) - self.check_for_match(bucket['key']) + self.rules['query_key']: nestedkey}, bucket['doc_count']) + self.occurrences.setdefault(nestedkey, EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append(event) + self.check_for_match(nestedkey) + def add_data(self, data): if 'query_key' in self.rules: From e0adedb42a43646b46da4726c7f52589d4b9e141 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Tue, 21 Mar 2023 09:54:52 +0530 Subject: [PATCH 1027/1065] removed unwanted changes --- .scratch | 1212 ------------------------------------------------------ 1 file changed, 1212 deletions(-) delete mode 100644 .scratch diff --git a/.scratch b/.scratch deleted file mode 100644 index f0635f3d7..000000000 --- a/.scratch +++ /dev/null @@ -1,1212 +0,0 @@ -# -*- coding: utf-8 -*- -import copy -import datetime -import sys -import time -import json - -from blist import sortedlist -from util import add_raw_postfix -from util import dt_to_ts -from util import EAException -from util import elastalert_logger -from util import elasticsearch_client -from util import format_index -from util import hashable -from util import lookup_es_key -from util import new_get_event_ts -from util import pretty_ts -from util import total_seconds -from util import ts_now -from util import ts_to_dt - - -class RuleType(object): - """ The base class for a rule type. - The class must implement add_data and add any matches to self.matches. - - :param rules: A rule configuration. - """ - required_options = frozenset() - - def __init__(self, rules, args=None): - self.matches = [] - self.rules = rules - self.occurrences = {} - self.rules['owner'] = self.rules.get('owner', '') - self.rules['priority'] = self.rules.get('priority', '2') - - def add_data(self, data): - """ The function that the ElastAlert client calls with results from ES. - Data is a list of dictionaries, from Elasticsearch. - - :param data: A list of events, each of which is a dictionary of terms. - """ - raise NotImplementedError() - - def add_match(self, event): - """ This function is called on all matching events. Rules use it to add - extra information about the context of a match. Event is a dictionary - containing terms directly from Elasticsearch and alerts will report - all of the information. - - :param event: The matching event, a dictionary of terms. - """ - # Convert datetime's back to timestamps - ts = self.rules.get('timestamp_field') - if ts in event: - event[ts] = dt_to_ts(event[ts]) - - self.matches.append(copy.deepcopy(event)) - - def get_match_str(self, match): - """ Returns a string that gives more context about a match. - - :param match: The matching event, a dictionary of terms. - :return: A user facing string describing the match. - """ - return '' - - def garbage_collect(self, timestamp): - """ Gets called periodically to remove old data that is useless beyond given timestamp. - May also be used to compute things in the absence of new data. - - :param timestamp: A timestamp indicating the rule has been run up to that point. - """ - pass - - def add_count_data(self, counts): - """ Gets called when a rule has use_count_query set to True. Called to add data from querying to the rule. - - :param counts: A dictionary mapping timestamps to hit counts. - """ - raise NotImplementedError() - - def add_terms_data(self, terms): - """ Gets called when a rule has use_terms_query set to True. - - :param terms: A list of buckets with a key, corresponding to query_key, and the count """ - raise NotImplementedError() - - def add_aggregation_data(self, payload): - """ Gets called when a rule has use_terms_query set to True. - :param terms: A list of buckets with a key, corresponding to query_key, and the count """ - raise NotImplementedError() - - -class CompareRule(RuleType): - """ A base class for matching a specific term by passing it to a compare function """ - required_options = frozenset(['compound_compare_key']) - - def expand_entries(self, list_type): - """ Expand entries specified in files using the '!file' directive, if there are - any, then add everything to a set. - """ - entries_set = set() - for entry in self.rules[list_type]: - if entry.startswith("!file"): # - "!file /path/to/list" - filename = entry.split()[1] - with open(filename, 'r') as f: - for line in f: - entries_set.add(line.rstrip()) - else: - entries_set.add(entry) - self.rules[list_type] = entries_set - - def compare(self, event): - """ An event is a match if this returns true """ - raise NotImplementedError() - - def add_data(self, data): - # If compare returns true, add it as a match - for event in data: - if self.compare(event): - self.add_match(event) - - -class BlacklistRule(CompareRule): - """ A CompareRule where the compare function checks a given key against a blacklist """ - required_options = frozenset(['compare_key', 'blacklist']) - - def __init__(self, rules, args=None): - super(BlacklistRule, self).__init__(rules, args=None) - self.expand_entries('blacklist') - - def compare(self, event): - term = lookup_es_key(event, self.rules['compare_key']) - if term in self.rules['blacklist']: - return True - return False - - -class WhitelistRule(CompareRule): - """ A CompareRule where the compare function checks a given term against a whitelist """ - required_options = frozenset(['compare_key', 'whitelist', 'ignore_null']) - - def __init__(self, rules, args=None): - super(WhitelistRule, self).__init__(rules, args=None) - self.expand_entries('whitelist') - - def compare(self, event): - term = lookup_es_key(event, self.rules['compare_key']) - if term is None: - return not self.rules['ignore_null'] - if term not in self.rules['whitelist']: - return True - return False - - -class ChangeRule(CompareRule): - """ A rule that will store values for a certain term and match if those values change """ - required_options = frozenset(['query_key', 'compound_compare_key', 'ignore_null']) - change_map = {} - occurrence_time = {} - - def compare(self, event): - key = hashable(lookup_es_key(event, self.rules['query_key'])) - values = [] - elastalert_logger.debug(" Previous Values of compare keys " + str(self.occurrences)) - for val in self.rules['compound_compare_key']: - lookup_value = lookup_es_key(event, val) - values.append(lookup_value) - elastalert_logger.debug(" Current Values of compare keys " + str(values)) - - changed = False - for val in values: - if not isinstance(val, bool) and not val and self.rules['ignore_null']: - return False - # If we have seen this key before, compare it to the new value - if key in self.occurrences: - for idx, previous_values in enumerate(self.occurrences[key]): - elastalert_logger.debug(" " + str(previous_values) + " " + str(values[idx])) - changed = previous_values != values[idx] - if changed: - break - if changed: - self.change_map[key] = (self.occurrences[key], values) - # If using timeframe, only return true if the time delta is < timeframe - if key in self.occurrence_time: - changed = event[self.rules['timestamp_field']] - self.occurrence_time[key] <= self.rules['timeframe'] - - # Update the current value and time - elastalert_logger.debug(" Setting current value of compare keys values " + str(values)) - self.occurrences[key] = values - if 'timeframe' in self.rules: - self.occurrence_time[key] = event[self.rules['timestamp_field']] - elastalert_logger.debug("Final result of comparision between previous and current values " + str(changed)) - return changed - - def add_match(self, match): - # TODO this is not technically correct - # if the term changes multiple times before an alert is sent - # this data will be overwritten with the most recent change - change = self.change_map.get(hashable(lookup_es_key(match, self.rules['query_key']))) - extra = {} - if change: - extra = {'old_value': change[0], - 'new_value': change[1]} - elastalert_logger.debug("Description of the changed records " + str(dict(match.items() + extra.items()))) - super(ChangeRule, self).add_match(dict(match.items() + extra.items())) - - -class FrequencyRule(RuleType): - """ A rule that matches if num_events number of events occur within a timeframe """ - required_options = frozenset(['num_events', 'timeframe']) - - def __init__(self, *args): - super(FrequencyRule, self).__init__(*args) - self.ts_field = self.rules.get('timestamp_field', '@timestamp') - self.get_ts = new_get_event_ts(self.ts_field) - self.attach_related = self.rules.get('attach_related', False) - - def add_count_data(self, data): - """ Add count data to the rule. Data should be of the form {ts: count}. """ - if len(data) > 1: - raise EAException('add_count_data can only accept one count at a time') - - (ts, count), = data.items() - - event = ({self.ts_field: ts}, count) - self.occurrences.setdefault('all', EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append(event) - self.check_for_match('all') - - def add_terms_data(self, terms): - if 'nested_query_key' in self.rules and self.rules['nested_query_key'] == True: - #letting this log message stay inorder to debug issues in future - elastalert_logger.info(terms) - for timestamp, buckets in terms.iteritems(): - self.flatten_nested_aggregations(timestamp,buckets) - else: - for timestamp, buckets in terms.iteritems(): - for bucket in buckets: - event = ({self.ts_field: timestamp, - self.rules['query_key']: bucket['key']}, bucket['doc_count']) - self.occurrences.setdefault(bucket['key'], EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append(event) - self.check_for_match(bucket['key']) - - def flatten_nested_aggregations(self,timestamp,buckets,key=None): - for bucket in buckets: - if key == None: - nestedkey = str(bucket['key']) - else: - nestedkey = key + ',' + str(bucket['key']) - if 'counts' in bucket: - self.flatten_nested_aggregations(timestamp,bucket['counts']['buckets'],nestedkey) - else: - event = ({self.ts_field: timestamp, - self.rules['query_key']: nestedkey}, bucket['doc_count']) - self.occurrences.setdefault(nestedkey, EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append(event) - self.check_for_match(nestedkey) - - - def add_data(self, data): - if 'query_key' in self.rules: - qk = self.rules['query_key'] - else: - qk = None - - for event in data: - if qk: - key = hashable(lookup_es_key(event, qk)) - else: - # If no query_key, we use the key 'all' for all events - key = 'all' - - # Store the timestamps of recent occurrences, per key - self.occurrences.setdefault(key, EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append((event, 1)) - self.check_for_match(key, end=False) - - # We call this multiple times with the 'end' parameter because subclasses - # may or may not want to check while only partial data has been added - if key in self.occurrences: # could have been emptied by previous check - self.check_for_match(key, end=True) - - def check_for_match(self, key, end=False): - # Match if, after removing old events, we hit num_events. - # the 'end' parameter depends on whether this was called from the - # middle or end of an add_data call and is used in subclasses - if self.occurrences[key].count() >= self.rules['num_events']: - event = self.occurrences[key].data[-1][0] - if self.attach_related: - event['related_events'] = [data[0] for data in self.occurrences[key].data[:-1]] - self.add_match(event) - self.occurrences.pop(key) - - def garbage_collect(self, timestamp): - """ Remove all occurrence data that is beyond the timeframe away """ - stale_keys = [] - for key, window in self.occurrences.iteritems(): - if timestamp - lookup_es_key(window.data[-1][0], self.ts_field) > self.rules['timeframe']: - stale_keys.append(key) - map(self.occurrences.pop, stale_keys) - - def get_match_str(self, match): - lt = self.rules.get('use_local_time') - match_ts = lookup_es_key(match, self.ts_field) - starttime = pretty_ts(dt_to_ts(ts_to_dt(match_ts) - self.rules['timeframe']), lt) - endtime = pretty_ts(match_ts, lt) - message = 'At least %d events occurred between %s and %s\n\n' % (self.rules['num_events'], - starttime, - endtime) - return message - -class AnyRule(RuleType): - """ A rule that will match on any input data """ - - def add_data(self, data): - for datum in data: - self.add_match(datum) - - -class EventWindow(object): - """ A container for hold event counts for rules which need a chronological ordered event window. """ - - def __init__(self, timeframe, onRemoved=None, getTimestamp=new_get_event_ts('@timestamp')): - self.timeframe = timeframe - self.onRemoved = onRemoved - self.get_ts = getTimestamp - self.data = sortedlist(key=self.get_ts) - self.running_count = 0 - - def clear(self): - self.data = sortedlist(key=self.get_ts) - self.running_count = 0 - - def append(self, event): - """ Add an event to the window. Event should be of the form (dict, count). - This will also pop the oldest events and call onRemoved on them until the - window size is less than timeframe. """ - self.data.add(event) - self.running_count += event[1] - - while self.duration() >= self.timeframe: - oldest = self.data[0] - self.data.remove(oldest) - self.running_count -= oldest[1] - self.onRemoved and self.onRemoved(oldest) - - def duration(self): - """ Get the size in timedelta of the window. """ - if not self.data: - return datetime.timedelta(0) - return self.get_ts(self.data[-1]) - self.get_ts(self.data[0]) - - def count(self): - """ Count the number of events in the window. """ - return self.running_count - - def mean(self): - """ Compute the mean of the value_field in the window. """ - if len(self.data) > 0: - datasum = 0 - datalen = 0 - for dat in self.data: - if "placeholder" not in dat[0]: - datasum += dat[1] - datalen += 1 - if datalen > 0: - return datasum / float(datalen) - return None - else: - return None - - def __iter__(self): - return iter(self.data) - - def append_middle(self, event): - """ Attempt to place the event in the correct location in our deque. - Returns True if successful, otherwise False. """ - rotation = 0 - ts = self.get_ts(event) - - # Append left if ts is earlier than first event - if self.get_ts(self.data[0]) > ts: - self.data.appendleft(event) - self.running_count += event[1] - return - - # Rotate window until we can insert event - while self.get_ts(self.data[-1]) > ts: - self.data.rotate(1) - rotation += 1 - if rotation == len(self.data): - # This should never happen - return - self.data.append(event) - self.running_count += event[1] - self.data.rotate(-rotation) - - -class SpikeRule(RuleType): - """ A rule that uses two sliding windows to compare relative event frequency. """ - required_options = frozenset(['timeframe', 'spike_height', 'spike_type']) - - def __init__(self, *args): - super(SpikeRule, self).__init__(*args) - self.timeframe = self.rules['timeframe'] - - self.ref_windows = {} - self.cur_windows = {} - - self.ts_field = self.rules.get('timestamp_field', '@timestamp') - self.get_ts = new_get_event_ts(self.ts_field) - self.first_event = {} - self.skip_checks = {} - - self.field_value = self.rules.get('field_value') - - self.ref_window_filled_once = False - - def add_count_data(self, data): - """ Add count data to the rule. Data should be of the form {ts: count}. """ - if len(data) > 1: - raise EAException('add_count_data can only accept one count at a time') - for ts, count in data.iteritems(): - self.handle_event({self.ts_field: ts}, count, 'all') - - def add_terms_data(self, terms): - for timestamp, buckets in terms.iteritems(): - for bucket in buckets: - count = bucket['doc_count'] - event = {self.ts_field: timestamp, - self.rules['query_key']: bucket['key']} - key = bucket['key'] - self.handle_event(event, count, key) - - def add_data(self, data): - for event in data: - qk = self.rules.get('query_key', 'all') - if qk != 'all': - qk = hashable(lookup_es_key(event, qk)) - if qk is None: - qk = 'other' - if self.field_value is not None: - if self.field_value in event: - count = lookup_es_key(event, self.field_value) - if count is not None: - try: - count = int(count) - except ValueError: - elastalert_logger.warn('{} is not a number: {}'.format(self.field_value, count)) - else: - self.handle_event(event, count, qk) - else: - self.handle_event(event, 1, qk) - - def clear_windows(self, qk, event): - # Reset the state and prevent alerts until windows filled again - self.ref_windows[qk].clear() - self.first_event.pop(qk) - self.skip_checks[qk] = lookup_es_key(event, self.ts_field) + self.rules['timeframe'] * 2 - - def handle_event(self, event, count, qk='all'): - self.first_event.setdefault(qk, event) - - self.ref_windows.setdefault(qk, EventWindow(self.timeframe, getTimestamp=self.get_ts)) - self.cur_windows.setdefault(qk, EventWindow(self.timeframe, self.ref_windows[qk].append, self.get_ts)) - - self.cur_windows[qk].append((event, count)) - - # Don't alert if ref window has not yet been filled for this key AND - if lookup_es_key(event, self.ts_field) - self.first_event[qk][self.ts_field] < self.rules['timeframe'] * 2: - # ElastAlert has not been running long enough for any alerts OR - if not self.ref_window_filled_once: - return - # This rule is not using alert_on_new_data (with query_key) OR - if not (self.rules.get('query_key') and self.rules.get('alert_on_new_data')): - return - # An alert for this qk has recently fired - if qk in self.skip_checks and lookup_es_key(event, self.ts_field) < self.skip_checks[qk]: - return - else: - self.ref_window_filled_once = True - - if self.field_value is not None: - if self.find_matches(self.ref_windows[qk].mean(), self.cur_windows[qk].mean()): - # skip over placeholder events - for match, count in self.cur_windows[qk].data: - if "placeholder" not in match: - break - self.add_match(match, qk) - self.clear_windows(qk, match) - else: - if self.find_matches(self.ref_windows[qk].count(), self.cur_windows[qk].count()): - # skip over placeholder events which have count=0 - for match, count in self.cur_windows[qk].data: - if count: - break - - self.add_match(match, qk) - self.clear_windows(qk, match) - - def add_match(self, match, qk): - extra_info = {} - if self.field_value is None: - spike_count = self.cur_windows[qk].count() - reference_count = self.ref_windows[qk].count() - else: - spike_count = self.cur_windows[qk].mean() - reference_count = self.ref_windows[qk].mean() - extra_info = {'spike_count': spike_count, - 'reference_count': reference_count} - - match = dict(match.items() + extra_info.items()) - - super(SpikeRule, self).add_match(match) - - def find_matches(self, ref, cur): - """ Determines if an event spike or dip happening. """ - # Apply threshold limits - if self.field_value is None: - if (cur < self.rules.get('threshold_cur', 0) or - ref < self.rules.get('threshold_ref', 0)): - return False - elif ref is None or ref == 0 or cur is None or cur == 0: - return False - - spike_up, spike_down = False, False - if cur <= ref / self.rules['spike_height']: - spike_down = True - if cur >= ref * self.rules['spike_height']: - spike_up = True - - if (self.rules['spike_type'] in ['both', 'up'] and spike_up) or \ - (self.rules['spike_type'] in ['both', 'down'] and spike_down): - return True - return False - - def get_match_str(self, match): - if self.field_value is None: - message = 'An abnormal number (%d) of events occurred around %s.\n' % ( - match['spike_count'], - pretty_ts(match[self.rules['timestamp_field']], self.rules.get('use_local_time')) - ) - message += 'Preceding that time, there were only %d events within %s\n\n' % (match['reference_count'], self.rules['timeframe']) - else: - message = 'An abnormal average value (%.2f) of field \'%s\' occurred around %s.\n' % ( - match['spike_count'], - self.field_value, - pretty_ts(match[self.rules['timestamp_field']], - self.rules.get('use_local_time')) - ) - message += 'Preceding that time, the field had an average value of (%.2f) within %s\n\n' % ( - match['reference_count'], self.rules['timeframe']) - return message - - def garbage_collect(self, ts): - # Windows are sized according to their newest event - # This is a placeholder to accurately size windows in the absence of events - for qk in self.cur_windows.keys(): - # If we havn't seen this key in a long time, forget it - if qk != 'all' and self.ref_windows[qk].count() == 0 and self.cur_windows[qk].count() == 0: - self.cur_windows.pop(qk) - self.ref_windows.pop(qk) - continue - placeholder = {self.ts_field: ts, "placeholder": True} - # The placeholder may trigger an alert, in which case, qk will be expected - if qk != 'all': - placeholder.update({self.rules['query_key']: qk}) - self.handle_event(placeholder, 0, qk) - - -class FlatlineRule(FrequencyRule): - """ A rule that matches when there is a low number of events given a timeframe. """ - required_options = frozenset(['timeframe', 'threshold']) - - def __init__(self, *args): - super(FlatlineRule, self).__init__(*args) - self.threshold = self.rules['threshold'] - - # Dictionary mapping query keys to the first events - self.first_event = {} - - def check_for_match(self, key, end=True): - # This function gets called between every added document with end=True after the last - # We ignore the calls before the end because it may trigger false positives - if not end: - return - - most_recent_ts = self.get_ts(self.occurrences[key].data[-1]) - if self.first_event.get(key) is None: - self.first_event[key] = most_recent_ts - - # Don't check for matches until timeframe has elapsed - if most_recent_ts - self.first_event[key] < self.rules['timeframe']: - return - - # Match if, after removing old events, we hit num_events - count = self.occurrences[key].count() - if count < self.rules['threshold']: - # Do a deep-copy, otherwise we lose the datetime type in the timestamp field of the last event - event = copy.deepcopy(self.occurrences[key].data[-1][0]) - event.update(key=key, count=count) - self.add_match(event) - - if not self.rules.get('forget_keys'): - # After adding this match, leave the occurrences windows alone since it will - # be pruned in the next add_data or garbage_collect, but reset the first_event - # so that alerts continue to fire until the threshold is passed again. - least_recent_ts = self.get_ts(self.occurrences[key].data[0]) - timeframe_ago = most_recent_ts - self.rules['timeframe'] - self.first_event[key] = min(least_recent_ts, timeframe_ago) - else: - # Forget about this key until we see it again - self.first_event.pop(key) - self.occurrences.pop(key) - - def get_match_str(self, match): - ts = match[self.rules['timestamp_field']] - lt = self.rules.get('use_local_time') - message = 'An abnormally low number of events occurred around %s.\n' % (pretty_ts(ts, lt)) - message += 'Between %s and %s, there were less than %s events.\n\n' % ( - pretty_ts(dt_to_ts(ts_to_dt(ts) - self.rules['timeframe']), lt), - pretty_ts(ts, lt), - self.rules['threshold'] - ) - return message - - def garbage_collect(self, ts): - # We add an event with a count of zero to the EventWindow for each key. This will cause the EventWindow - # to remove events that occurred more than one `timeframe` ago, and call onRemoved on them. - default = ['all'] if 'query_key' not in self.rules else [] - for key in self.occurrences.keys() or default: - self.occurrences.setdefault( - key, - EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts) - ).append( - ({self.ts_field: ts}, 0) - ) - self.first_event.setdefault(key, ts) - self.check_for_match(key) - - -class NewTermsRule(RuleType): - """ Alerts on a new value in a list of fields. """ - - def __init__(self, rule, args=None): - super(NewTermsRule, self).__init__(rule, args) - self.seen_values = {} - # Allow the use of query_key or fields - if 'fields' not in self.rules: - if 'query_key' not in self.rules: - raise EAException("fields or query_key must be specified") - self.fields = self.rules['query_key'] - else: - self.fields = self.rules['fields'] - if not self.fields: - raise EAException("fields must not be an empty list") - if type(self.fields) != list: - self.fields = [self.fields] - if self.rules.get('use_terms_query') and \ - (len(self.fields) != 1 or (len(self.fields) == 1 and type(self.fields[0]) == list)): - raise EAException("use_terms_query can only be used with a single non-composite field") - if self.rules.get('use_terms_query'): - if [self.rules['query_key']] != self.fields: - raise EAException('If use_terms_query is specified, you cannot specify different query_key and fields') - if not self.rules.get('query_key').endswith('.keyword') and not self.rules.get('query_key').endswith('.raw'): - if self.rules.get('use_keyword_postfix', True): - elastalert_logger.warn('Warning: If query_key is a non-keyword field, you must set ' - 'use_keyword_postfix to false, or add .keyword/.raw to your query_key.') - try: - self.get_all_terms(args) - except Exception as e: - # Refuse to start if we cannot get existing terms - raise EAException('Error searching for existing terms: %s' % (repr(e))), None, sys.exc_info()[2] - - def get_all_terms(self, args): - """ Performs a terms aggregation for each field to get every existing term. """ - self.es = elasticsearch_client(self.rules) - window_size = datetime.timedelta(**self.rules.get('terms_window_size', {'days': 30})) - field_name = {"field": "", "size": 2147483647} # Integer.MAX_VALUE - query_template = {"aggs": {"values": {"terms": field_name}}} - if args and hasattr(args, 'start') and args.start: - end = ts_to_dt(args.start) - elif 'start_date' in self.rules: - end = ts_to_dt(self.rules['start_date']) - else: - end = ts_now() - start = end - window_size - step = datetime.timedelta(**self.rules.get('window_step_size', {'days': 1})) - - for field in self.fields: - tmp_start = start - tmp_end = min(start + step, end) - - time_filter = {self.rules['timestamp_field']: {'lt': self.rules['dt_to_ts'](tmp_end), 'gte': self.rules['dt_to_ts'](tmp_start)}} - query_template['filter'] = {'bool': {'must': [{'range': time_filter}]}} - query = {'aggs': {'filtered': query_template}} - - if 'filter' in self.rules: - for item in self.rules['filter']: - query_template['filter']['bool']['must'].append(item) - - # For composite keys, we will need to perform sub-aggregations - if type(field) == list: - self.seen_values.setdefault(tuple(field), []) - level = query_template['aggs'] - # Iterate on each part of the composite key and add a sub aggs clause to the elastic search query - for i, sub_field in enumerate(field): - if self.rules.get('use_keyword_postfix', True): - level['values']['terms']['field'] = add_raw_postfix(sub_field, self.is_five_or_above()) - else: - level['values']['terms']['field'] = sub_field - if i < len(field) - 1: - # If we have more fields after the current one, then set up the next nested structure - level['values']['aggs'] = {'values': {'terms': copy.deepcopy(field_name)}} - level = level['values']['aggs'] - else: - self.seen_values.setdefault(field, []) - # For non-composite keys, only a single agg is needed - if self.rules.get('use_keyword_postfix', True): - field_name['field'] = add_raw_postfix(field, self.is_five_or_above()) - else: - field_name['field'] = field - - # Query the entire time range in small chunks - while tmp_start < end: - if self.rules.get('use_strftime_index'): - index = format_index(self.rules['index'], tmp_start, tmp_end) - else: - index = self.rules['index'] - res = self.es.search(body=query, index=index, ignore_unavailable=True, timeout='50s') - if 'aggregations' in res: - buckets = res['aggregations']['filtered']['values']['buckets'] - if type(field) == list: - # For composite keys, make the lookup based on all fields - # Make it a tuple since it can be hashed and used in dictionary lookups - for bucket in buckets: - # We need to walk down the hierarchy and obtain the value at each level - self.seen_values[tuple(field)] += self.flatten_aggregation_hierarchy(bucket) - else: - keys = [bucket['key'] for bucket in buckets] - self.seen_values[field] += keys - else: - if type(field) == list: - self.seen_values.setdefault(tuple(field), []) - else: - self.seen_values.setdefault(field, []) - if tmp_start == tmp_end: - break - tmp_start = tmp_end - tmp_end = min(tmp_start + step, end) - time_filter[self.rules['timestamp_field']] = {'lt': self.rules['dt_to_ts'](tmp_end), - 'gte': self.rules['dt_to_ts'](tmp_start)} - - for key, values in self.seen_values.iteritems(): - if not values: - if type(key) == tuple: - # If we don't have any results, it could either be because of the absence of any baseline data - # OR it may be because the composite key contained a non-primitive type. Either way, give the - # end-users a heads up to help them debug what might be going on. - elastalert_logger.warning(( - 'No results were found from all sub-aggregations. This can either indicate that there is ' - 'no baseline data OR that a non-primitive field was used in a composite key.' - )) - else: - elastalert_logger.info('Found no values for %s' % (field)) - continue - self.seen_values[key] = list(set(values)) - elastalert_logger.info('Found %s unique values for %s' % (len(set(values)), key)) - - def flatten_aggregation_hierarchy(self, root, hierarchy_tuple=()): - """ For nested aggregations, the results come back in the following format: - { - "aggregations" : { - "filtered" : { - "doc_count" : 37, - "values" : { - "doc_count_error_upper_bound" : 0, - "sum_other_doc_count" : 0, - "buckets" : [ { - "key" : "1.1.1.1", # IP address (root) - "doc_count" : 13, - "values" : { - "doc_count_error_upper_bound" : 0, - "sum_other_doc_count" : 0, - "buckets" : [ { - "key" : "80", # Port (sub-aggregation) - "doc_count" : 3, - "values" : { - "doc_count_error_upper_bound" : 0, - "sum_other_doc_count" : 0, - "buckets" : [ { - "key" : "ack", # Reason (sub-aggregation, leaf-node) - "doc_count" : 3 - }, { - "key" : "syn", # Reason (sub-aggregation, leaf-node) - "doc_count" : 1 - } ] - } - }, { - "key" : "82", # Port (sub-aggregation) - "doc_count" : 3, - "values" : { - "doc_count_error_upper_bound" : 0, - "sum_other_doc_count" : 0, - "buckets" : [ { - "key" : "ack", # Reason (sub-aggregation, leaf-node) - "doc_count" : 3 - }, { - "key" : "syn", # Reason (sub-aggregation, leaf-node) - "doc_count" : 3 - } ] - } - } ] - } - }, { - "key" : "2.2.2.2", # IP address (root) - "doc_count" : 4, - "values" : { - "doc_count_error_upper_bound" : 0, - "sum_other_doc_count" : 0, - "buckets" : [ { - "key" : "443", # Port (sub-aggregation) - "doc_count" : 3, - "values" : { - "doc_count_error_upper_bound" : 0, - "sum_other_doc_count" : 0, - "buckets" : [ { - "key" : "ack", # Reason (sub-aggregation, leaf-node) - "doc_count" : 3 - }, { - "key" : "syn", # Reason (sub-aggregation, leaf-node) - "doc_count" : 3 - } ] - } - } ] - } - } ] - } - } - } - } - - Each level will either have more values and buckets, or it will be a leaf node - We'll ultimately return a flattened list with the hierarchies appended as strings, - e.g the above snippet would yield a list with: - - [ - ('1.1.1.1', '80', 'ack'), - ('1.1.1.1', '80', 'syn'), - ('1.1.1.1', '82', 'ack'), - ('1.1.1.1', '82', 'syn'), - ('2.2.2.2', '443', 'ack'), - ('2.2.2.2', '443', 'syn') - ] - - A similar formatting will be performed in the add_data method and used as the basis for comparison - - """ - results = [] - # There are more aggregation hierarchies left. Traverse them. - if 'values' in root: - results += self.flatten_aggregation_hierarchy(root['values']['buckets'], hierarchy_tuple + (root['key'],)) - else: - # We've gotten to a sub-aggregation, which may have further sub-aggregations - # See if we need to traverse further - for node in root: - if 'values' in node: - results += self.flatten_aggregation_hierarchy(node, hierarchy_tuple) - else: - results.append(hierarchy_tuple + (node['key'],)) - return results - - def add_data(self, data): - for document in data: - for field in self.fields: - value = () - lookup_field = field - if type(field) == list: - # For composite keys, make the lookup based on all fields - # Make it a tuple since it can be hashed and used in dictionary lookups - lookup_field = tuple(field) - for sub_field in field: - lookup_result = lookup_es_key(document, sub_field) - if not lookup_result: - value = None - break - value += (lookup_result,) - else: - value = lookup_es_key(document, field) - if not value and self.rules.get('alert_on_missing_field'): - document['missing_field'] = lookup_field - self.add_match(copy.deepcopy(document)) - elif value: - if value not in self.seen_values[lookup_field]: - document['new_field'] = lookup_field - self.add_match(copy.deepcopy(document)) - self.seen_values[lookup_field].append(value) - - def add_terms_data(self, terms): - # With terms query, len(self.fields) is always 1 and the 0'th entry is always a string - field = self.fields[0] - for timestamp, buckets in terms.iteritems(): - for bucket in buckets: - if bucket['doc_count']: - if bucket['key'] not in self.seen_values[field]: - match = {field: bucket['key'], - self.rules['timestamp_field']: timestamp, - 'new_field': field} - self.add_match(match) - self.seen_values[field].append(bucket['key']) - - def is_five_or_above(self): - version = self.es.info()['version']['number'] - return int(version[0]) >= 5 - - -class CardinalityRule(RuleType): - """ A rule that matches if cardinality of a field is above or below a threshold within a timeframe """ - required_options = frozenset(['timeframe', 'cardinality_field']) - - def __init__(self, *args): - super(CardinalityRule, self).__init__(*args) - if 'max_cardinality' not in self.rules and 'min_cardinality' not in self.rules: - raise EAException("CardinalityRule must have one of either max_cardinality or min_cardinality") - self.ts_field = self.rules.get('timestamp_field', '@timestamp') - self.cardinality_field = self.rules['cardinality_field'] - self.cardinality_cache = {} - self.first_event = {} - self.timeframe = self.rules['timeframe'] - - def add_data(self, data): - qk = self.rules.get('query_key') - for event in data: - if qk: - key = hashable(lookup_es_key(event, qk)) - else: - # If no query_key, we use the key 'all' for all events - key = 'all' - self.cardinality_cache.setdefault(key, {}) - self.first_event.setdefault(key, lookup_es_key(event, self.ts_field)) - value = hashable(lookup_es_key(event, self.cardinality_field)) - if value is not None: - # Store this timestamp as most recent occurence of the term - self.cardinality_cache[key][value] = lookup_es_key(event, self.ts_field) - self.check_for_match(key, event) - - def check_for_match(self, key, event, gc=True): - # Check to see if we are past max/min_cardinality for a given key - time_elapsed = lookup_es_key(event, self.ts_field) - self.first_event.get(key, lookup_es_key(event, self.ts_field)) - timeframe_elapsed = time_elapsed > self.timeframe - if (len(self.cardinality_cache[key]) > self.rules.get('max_cardinality', float('inf')) or - (len(self.cardinality_cache[key]) < self.rules.get('min_cardinality', float('-inf')) and timeframe_elapsed)): - # If there might be a match, run garbage collect first, as outdated terms are only removed in GC - # Only run it if there might be a match so it doesn't impact performance - if gc: - self.garbage_collect(lookup_es_key(event, self.ts_field)) - self.check_for_match(key, event, False) - else: - self.first_event.pop(key, None) - self.add_match(event) - - def garbage_collect(self, timestamp): - """ Remove all occurrence data that is beyond the timeframe away """ - for qk, terms in self.cardinality_cache.items(): - for term, last_occurence in terms.items(): - if timestamp - last_occurence > self.rules['timeframe']: - self.cardinality_cache[qk].pop(term) - - # Create a placeholder event for if a min_cardinality match occured - if 'min_cardinality' in self.rules: - event = {self.ts_field: timestamp} - if 'query_key' in self.rules: - event.update({self.rules['query_key']: qk}) - self.check_for_match(qk, event, False) - - def get_match_str(self, match): - lt = self.rules.get('use_local_time') - starttime = pretty_ts(dt_to_ts(ts_to_dt(match[self.ts_field]) - self.rules['timeframe']), lt) - endtime = pretty_ts(match[self.ts_field], lt) - if 'max_cardinality' in self.rules: - message = ('A maximum of %d unique %s(s) occurred since last alert or between %s and %s\n\n' % (self.rules['max_cardinality'], - self.rules['cardinality_field'], - starttime, endtime)) - else: - message = ('Less than %d unique %s(s) occurred since last alert or between %s and %s\n\n' % (self.rules['min_cardinality'], - self.rules['cardinality_field'], - starttime, endtime)) - return message - - -class BaseAggregationRule(RuleType): - def __init__(self, *args): - super(BaseAggregationRule, self).__init__(*args) - bucket_interval = self.rules.get('bucket_interval') - if bucket_interval: - if 'seconds' in bucket_interval: - self.rules['bucket_interval_period'] = str(bucket_interval['seconds']) + 's' - elif 'minutes' in bucket_interval: - self.rules['bucket_interval_period'] = str(bucket_interval['minutes']) + 'm' - elif 'hours' in bucket_interval: - self.rules['bucket_interval_period'] = str(bucket_interval['hours']) + 'h' - elif 'days' in bucket_interval: - self.rules['bucket_interval_period'] = str(bucket_interval['days']) + 'd' - elif 'weeks' in bucket_interval: - self.rules['bucket_interval_period'] = str(bucket_interval['weeks']) + 'w' - else: - raise EAException("Unsupported window size") - - if self.rules.get('use_run_every_query_size'): - if total_seconds(self.rules['run_every']) % total_seconds(self.rules['bucket_interval_timedelta']) != 0: - raise EAException("run_every must be evenly divisible by bucket_interval if specified") - else: - if total_seconds(self.rules['buffer_time']) % total_seconds(self.rules['bucket_interval_timedelta']) != 0: - raise EAException("Buffer_time must be evenly divisible by bucket_interval if specified") - - def generate_aggregation_query(self): - raise NotImplementedError() - - def add_aggregation_data(self, payload): - for timestamp, payload_data in payload.iteritems(): - if 'interval_aggs' in payload_data: - self.unwrap_interval_buckets(timestamp, None, payload_data['interval_aggs']['buckets']) - elif 'bucket_aggs' in payload_data: - self.unwrap_term_buckets(timestamp, payload_data['bucket_aggs']['buckets']) - else: - self.check_matches(timestamp, None, payload_data) - - def unwrap_interval_buckets(self, timestamp, query_key, interval_buckets): - for interval_data in interval_buckets: - # Use bucket key here instead of start_time for more accurate match timestamp - self.check_matches(ts_to_dt(interval_data['key_as_string']), query_key, interval_data) - - def unwrap_term_buckets(self, timestamp, term_buckets): - for term_data in term_buckets: - if 'interval_aggs' in term_data: - self.unwrap_interval_buckets(timestamp, term_data['key'], term_data['interval_aggs']['buckets']) - else: - self.check_matches(timestamp, term_data['key'], term_data) - - def check_matches(self, timestamp, query_key, aggregation_data): - raise NotImplementedError() - -class ErrorRateRule(BaseAggregationRule): - """ A rule that determines error rate with sampling rate""" - required_options = frozenset(['sampling', 'threshold','error_condition','unique_column']) - def __init__(self, *args): - super(ErrorRateRule, self).__init__(*args) - - self.ts_field = self.rules.get('timestamp_field', '@timestamp') - self.rules['total_agg_key'] = self.rules['unique_column'] - self.rules['count_all_errors'] = True - - if (self.rules.has_key('error_calculation_method') and self.rules['error_calculation_method']=='count_traces_with_errors' ): - self.rules['count_all_errors'] = False - - # hardcoding uniq aggregation for total count - self.rules['total_agg_type'] = "uniq" - - def calculate_err_rate(self,payload): - for timestamp, payload_data in payload.iteritems(): - if int(payload_data['total_count']) > 0: - rate = float(payload_data['error_count'])/float(payload_data['total_count']) - rate = float(rate)/float(self.rules['sampling']) - rate = rate*100 - if 'threshold' in self.rules and rate > self.rules['threshold']: - match = {self.rules['timestamp_field']: timestamp, 'error_rate': rate, 'from': payload_data['start_time'], 'to': payload_data['end_time']} - self.add_match(match) - - -class MetricAggregationRule(BaseAggregationRule): - """ A rule that matches when there is a low number of events given a timeframe. """ - required_options = frozenset(['metric_agg_key', 'metric_agg_type', 'doc_type']) - allowed_aggregations = frozenset(['min', 'max', 'avg', 'sum', 'cardinality', 'value_count']) - - def __init__(self, *args): - super(MetricAggregationRule, self).__init__(*args) - self.ts_field = self.rules.get('timestamp_field', '@timestamp') - if 'max_threshold' not in self.rules and 'min_threshold' not in self.rules: - raise EAException("MetricAggregationRule must have at least one of either max_threshold or min_threshold") - - self.metric_key = self.rules['metric_agg_key'] + '_' + self.rules['metric_agg_type'] - - if not self.rules['metric_agg_type'] in self.allowed_aggregations: - raise EAException("metric_agg_type must be one of %s" % (str(self.allowed_aggregations))) - - self.rules['aggregation_query_element'] = self.generate_aggregation_query() - - def get_match_str(self, match): - message = 'Threshold violation, %s:%s %s (min: %s max : %s) \n\n' % ( - self.rules['metric_agg_type'], - self.rules['metric_agg_key'], - match[self.metric_key], - self.rules.get('min_threshold'), - self.rules.get('max_threshold') - ) - return message - - def generate_aggregation_query(self): - return {self.metric_key: {self.rules['metric_agg_type']: {'field': self.rules['metric_agg_key']}}} - - def check_matches(self, timestamp, query_key, aggregation_data): - if "compound_query_key" in self.rules: - self.check_matches_recursive(timestamp, query_key, aggregation_data, self.rules['compound_query_key'], dict()) - - else: - metric_val = aggregation_data[self.metric_key]['value'] - if self.crossed_thresholds(metric_val): - match = {self.rules['timestamp_field']: timestamp, - self.metric_key: metric_val} - if query_key is not None: - match[self.rules['query_key']] = query_key - self.add_match(match) - - def check_matches_recursive(self, timestamp, query_key, aggregation_data, compound_keys, match_data): - if len(compound_keys) < 1: - # shouldn't get to this point, but checking for safety - return - - match_data[compound_keys[0]] = aggregation_data['key'] - if 'bucket_aggs' in aggregation_data: - for result in aggregation_data['bucket_aggs']['buckets']: - self.check_matches_recursive(timestamp, - query_key, - result, - compound_keys[1:], - match_data) - - else: - metric_val = aggregation_data[self.metric_key]['value'] - if self.crossed_thresholds(metric_val): - match_data[self.rules['timestamp_field']] = timestamp - match_data[self.metric_key] = metric_val - - # add compound key to payload to allow alerts to trigger for every unique occurence - compound_value = [match_data[key] for key in self.rules['compound_query_key']] - match_data[self.rules['query_key']] = ",".join(compound_value) - - self.add_match(match_data) - - def crossed_thresholds(self, metric_value): - if metric_value is None: - return False - if 'max_threshold' in self.rules and metric_value > self.rules['max_threshold']: - return True - if 'min_threshold' in self.rules and metric_value < self.rules['min_threshold']: - return True - return False - - -class PercentageMatchRule(BaseAggregationRule): - required_options = frozenset(['match_bucket_filter']) - - def __init__(self, *args): - super(PercentageMatchRule, self).__init__(*args) - self.ts_field = self.rules.get('timestamp_field', '@timestamp') - if 'max_percentage' not in self.rules and 'min_percentage' not in self.rules: - raise EAException("PercentageMatchRule must have at least one of either min_percentage or max_percentage") - - self.match_bucket_filter = self.rules['match_bucket_filter'] - self.rules['aggregation_query_element'] = self.generate_aggregation_query() - - def get_match_str(self, match): - percentage_format_string = self.rules.get('percentage_format_string', None) - message = 'Percentage violation, value: %s (min: %s max : %s) of %s items\n\n' % ( - percentage_format_string % (match['percentage']) if percentage_format_string else match['percentage'], - self.rules.get('min_percentage'), - self.rules.get('max_percentage'), - match['denominator'] - ) - return message - - def generate_aggregation_query(self): - return { - 'percentage_match_aggs': { - 'filters': { - 'other_bucket': True, - 'filters': { - 'match_bucket': { - 'bool': { - 'must': self.match_bucket_filter - } - } - } - } - } - } - - def check_matches(self, timestamp, query_key, aggregation_data): - match_bucket_count = aggregation_data['percentage_match_aggs']['buckets']['match_bucket']['doc_count'] - other_bucket_count = aggregation_data['percentage_match_aggs']['buckets']['_other_']['doc_count'] - - if match_bucket_count is None or other_bucket_count is None: - return - else: - total_count = other_bucket_count + match_bucket_count - if total_count == 0: - return - else: - match_percentage = (match_bucket_count * 1.0) / (total_count * 1.0) * 100 - if self.percentage_violation(match_percentage): - match = {self.rules['timestamp_field']: timestamp, 'percentage': match_percentage, 'denominator': total_count} - if query_key is not None: - match[self.rules['query_key']] = query_key - self.add_match(match) - - def percentage_violation(self, match_percentage): - if 'max_percentage' in self.rules and match_percentage > self.rules['max_percentage']: - return True - if 'min_percentage' in self.rules and match_percentage < self.rules['min_percentage']: - return True - return False From 245fbccc0c795d3540992316ab0e21b0f41642b2 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Tue, 21 Mar 2023 10:03:36 +0530 Subject: [PATCH 1028/1065] Added docker changes --- Dockerfile | 5 +++-- commands.sh | 5 +++++ 2 files changed, 8 insertions(+), 2 deletions(-) create mode 100644 commands.sh diff --git a/Dockerfile b/Dockerfile index 4653c49de..76a071e11 100644 --- a/Dockerfile +++ b/Dockerfile @@ -41,5 +41,6 @@ RUN python --version WORKDIR /opt/elastalert -ENTRYPOINT ["python","-m","elastalert.create_index","--config","/data/elastalert/config.yaml", "--verbose"] -ENTRYPOINT ["python","-m","elastalert.elastalert","--config","/data/elastalert/config.yaml", "--verbose"] +COPY commands.sh /opt/elastalert/commands.sh +RUN ["chmod", "+x", "/opt/elastalert/commands.sh"] +ENTRYPOINT ["sh","/opt/elastalert/commands.sh"] \ No newline at end of file diff --git a/commands.sh b/commands.sh new file mode 100644 index 000000000..56c37ecb4 --- /dev/null +++ b/commands.sh @@ -0,0 +1,5 @@ +#!/bin/bash +echo "creating elastalert indices" +python -m elastalert.create_index --config /data/elastalert/config.yaml --verbose +echo "Starting elastalert" +python -m elastalert.elastalert --config /data/elastalert/config.yaml --verbose From 8757561c5507ac11356733fdef886b44becd8dd6 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Fri, 24 Mar 2023 11:17:51 +0530 Subject: [PATCH 1029/1065] fixed terms-query error --- elastalert/elastalert.py | 45 +++++++++++++++++++--------------------- elastalert/ruletypes.py | 4 ++-- 2 files changed, 23 insertions(+), 26 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 128c1c3c6..313e98246 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -257,36 +257,33 @@ def get_query(filters, starttime=None, endtime=None, sort=True, timestamp_field= query['sort'] = [{timestamp_field: {'order': 'desc' if desc else 'asc'}}] return query - def get_terms_query(self, query, rule, size, field, five=False): + def get_terms_query(self, query, rule, size, field): """ Takes a query generated by get_query and outputs a aggregation query """ query_element = query['query'] if 'sort' in query_element: query_element.pop('sort') - if not five: - query_element['filtered'].update({'aggs': {'counts': {'terms': {'field': field, - 'size': size, - 'min_doc_count': rule.get('min_doc_count', 1)}}}}) - aggs_query = {'aggs': query_element} + + if 'nested_query_key' in rule and rule['nested_query_key'] == True and len(field.split(",")) > 1: + aggs_query = query + query_key_list = field.split(",") + first_query_key = query_key_list.pop() + aggs_element = {'counts': {'terms': {'field': first_query_key, + 'size': size, + 'min_doc_count': rule.get('min_doc_count', 1)}}} + + if len(query_key_list) > 0: + for key in reversed(query_key_list): + aggs_element = {'counts': {'terms': {'field': key, 'size': size, + 'min_doc_count': rule.get('min_doc_count', 1)}, 'aggs': aggs_element}} + aggs_query['aggs'] = aggs_element else: - if 'nested_query_key' in rule and rule['nested_query_key'] == True and len(field.split(",")) > 1: - aggs_query = query - query_key_list = field.split(",") - first_query_key = query_key_list.pop() - aggs_element = {'counts': {'terms': {'field': first_query_key, - 'size': size, - 'min_doc_count': rule.get('min_doc_count', 1)}}} - - if len(query_key_list) > 0: - for key in reversed(query_key_list): - aggs_element = {'counts': {'terms': {'field': key, 'size': size, - 'min_doc_count': rule.get('min_doc_count', 1)}, 'aggs': aggs_element}} - aggs_query['aggs'] = aggs_element - else: - aggs_query = query - aggs_query['aggs'] = {'counts': {'terms': {'field': field, - 'size': size, - 'min_doc_count': rule.get('min_doc_count', 1)}}} + aggs_query = query + aggs_query['aggs'] = {'counts': {'terms': {'field': field, + 'size': size, + 'min_doc_count': rule.get('min_doc_count', 1)}}} + + print(aggs_query) return aggs_query diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 663ef3a37..3fc9d6dc5 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -226,10 +226,10 @@ def add_terms_data(self, terms): if 'nested_query_key' in self.rules and self.rules['nested_query_key'] == True: #letting this log message stay inorder to debug issues in future elastalert_logger.info(terms) - for timestamp, buckets in terms.iteritems(): + for timestamp, buckets in terms.items(): self.flatten_nested_aggregations(timestamp,buckets) else: - for timestamp, buckets in terms.iteritems(): + for timestamp, buckets in terms.items(): for bucket in buckets: event = ({self.ts_field: timestamp, self.rules['query_key']: bucket['key']}, bucket['doc_count']) From 2112e70d69f2a7668aab2c1ea43df4fac29bf1a1 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Fri, 24 Mar 2023 12:07:12 +0530 Subject: [PATCH 1030/1065] removed unnecessary print statments --- elastalert/elastalert.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 313e98246..38b706803 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -283,8 +283,6 @@ def get_terms_query(self, query, rule, size, field): 'min_doc_count': rule.get('min_doc_count', 1)}}} - print(aggs_query) - return aggs_query def get_aggregation_query(self, query, rule, query_key, terms_size, timestamp_field='@timestamp'): From f934df3a0d7adce115fb8b3a758e8d4da069dae2 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Mon, 27 Mar 2023 17:10:58 +0530 Subject: [PATCH 1031/1065] es-7 create_index testing --- commands.sh | 2 +- elastalert/create_index.py | 10 +++------- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/commands.sh b/commands.sh index 56c37ecb4..f12a8cdd1 100644 --- a/commands.sh +++ b/commands.sh @@ -1,5 +1,5 @@ #!/bin/bash echo "creating elastalert indices" -python -m elastalert.create_index --config /data/elastalert/config.yaml --verbose +python -m elastalert.create_index --config /data/elastalert/config.yaml echo "Starting elastalert" python -m elastalert.elastalert --config /data/elastalert/config.yaml --verbose diff --git a/elastalert/create_index.py b/elastalert/create_index.py index 9b2dab3c3..58c7dd80d 100644 --- a/elastalert/create_index.py +++ b/elastalert/create_index.py @@ -24,13 +24,9 @@ def create_index_mappings(es_client, ea_index, recreate=False, old_ea_index=None esversion = get_version_from_cluster_info(es_client) es_index_mappings = {} - if is_atleasteight(esversion): - es_index_mappings = read_es_index_mappings() - elif is_atleastseven(esversion): - es_index_mappings = read_es_index_mappings(7) - else: - print('FATAL - Unsupported Elasticsearch version: ' + esversion + '. Aborting.') - exit(1) + + #using es_mappings 7 + es_index_mappings = read_es_index_mappings(7) es_index = IndicesClient(es_client) if not recreate: From 6559448cc35ec482f95cc26758ef48e55b8dbac1 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Mon, 27 Mar 2023 20:06:55 +0530 Subject: [PATCH 1032/1065] removed print statements from dockerfile --- Dockerfile | 5 ----- 1 file changed, 5 deletions(-) diff --git a/Dockerfile b/Dockerfile index 76a071e11..f23040582 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,10 +15,6 @@ RUN python setup.py install RUN pip show elastalert2 -RUN echo "coming here..." -RUN ls /usr/local/lib/ -RUN ls /usr/lib/ -RUN ls /lib/ FROM gcr.io/distroless/python3:debug as runtime @@ -36,7 +32,6 @@ COPY --from=build /lib/libc.musl-x86_64.so.1 /lib/ ENV PYTHONPATH=/usr/local/lib/python3.9/site-packages ENV PATH=/usr/local/lib:/usr/lib:$PATH -RUN ls /usr/local/bin/ RUN python --version WORKDIR /opt/elastalert From 891054c624388a8ea9e8ced444f08b5dc4be4a5b Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Mon, 27 Mar 2023 20:40:22 +0530 Subject: [PATCH 1033/1065] added comments for PR readability --- elastalert/elastalert.py | 25 +++++++++++++++++++++++++ elastalert/loaders.py | 4 +++- elastalert/ruletypes.py | 5 ++++- elastalert/util.py | 3 ++- tests/rules_test.py | 3 ++- 5 files changed, 36 insertions(+), 4 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 38b706803..840e2933d 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -93,6 +93,7 @@ def parse_args(self, args): dest='es_debug_trace', help='Enable logging from Elasticsearch queries as curl command. Queries will be logged to file. Note that ' 'this will incorrectly display localhost:9200 as the host/port') + #prometheus port changes parser.add_argument('--prometheus_port', type=int, dest='prometheus_port', default=9090, help='Enables Prometheus metrics on specified port.') self.args = parser.parse_args(args) @@ -172,9 +173,12 @@ def __init__(self, args): self.pretty_ts_format = self.conf.get('custom_pretty_ts_format') self.writeback_es = elasticsearch_client(self.conf) + + #kibana adapter is the modded elasticsearch_client self.kibana_adapter = kibana_adapter_client(self.conf) self._es_version = None + #query_endpoint used by error_rate rule self.query_endpoint = self.conf['query_endpoint'] remove = [] @@ -208,6 +212,8 @@ def get_index(rule, starttime=None, endtime=None): else: return index + + #backwards compatibility with es6 msearch @staticmethod def get_msearch_query(query, rule): search_arr = [] @@ -399,6 +405,7 @@ def get_hits(self, rule, starttime, endtime, index, scroll=False): request = self.get_msearch_query(query,rule) + #removed scroll as it aint supported # extra_args = {'_source_includes': rule['include']} # scroll_keepalive = rule.get('scroll_keepalive', self.scroll_keepalive) # if not rule.get('_source_enabled'): @@ -406,9 +413,12 @@ def get_hits(self, rule, starttime, endtime, index, scroll=False): # extra_args = {} try: + #using backwards compatibile msearch res = self.thread_data.current_es.msearch(body=request) res = res['responses'][0] self.thread_data.total_hits = int(res['hits']['total']) + + #removed scroll as it aint supported # if scroll: # res = self.thread_data.current_es.scroll(scroll_id=rule['scroll_id'], scroll=scroll_keepalive) # else: @@ -483,6 +493,7 @@ def get_hits_count(self, rule, starttime, endtime, index): request = self.get_msearch_query(query,rule) try: + #using backwards compatibile msearch res = self.thread_data.current_es.msearch(body=request) res = res['responses'][0] except ElasticsearchException as e: @@ -535,6 +546,7 @@ def get_hits_terms(self, rule, starttime, endtime, index, key, qk=None, size=Non request = self.get_msearch_query(query,rule) try: + #using backwards compatibile msearch res = self.thread_data.current_es.msearch(body=request) res = res['responses'][0] @@ -573,6 +585,7 @@ def get_hits_aggregation(self, rule, starttime, endtime, index, query_key, term_ query = self.get_aggregation_query(base_query, rule, query_key, term_size, rule['timestamp_field']) request = self.get_msearch_query(query,rule) try: + #using backwards compatibile msearch res = self.thread_data.current_es.msearch(body=request) res = res['responses'][0] except ElasticsearchException as e: @@ -587,6 +600,8 @@ def get_hits_aggregation(self, rule, starttime, endtime, index, query_key, term_ self.thread_data.num_hits += res['hits']['total'] return {endtime: payload} + + #trace_alert specific error rate method def get_error_rate(self, rule, starttime, endtime): agg_key = '{}({})'.format(rule['total_agg_type'],rule['total_agg_key']) query = self.get_query_string(rule) @@ -619,11 +634,13 @@ def get_error_rate(self, rule, starttime, endtime): return {endtime: payload} + #method used by get_error_rate def get_query_string(self, rule): if rule['filter'] and ('query_string' in rule['filter'][0]['query']) and ('query' in rule['filter'][0]['query']['query_string']): return rule['filter'][0]['query']['query_string']['query'] return "" + #method used by get_error_rate for calculating aggregates from ch data using query_endpoint def get_ch_data(self, rule, starttime, endtime, agg_key, freshquery,aggregation): data = { "selects":[], @@ -727,6 +744,9 @@ def run_query(self, rule, start=None, end=None, scroll=False): else: rule_inst.add_data(data) + + #Removed scrolling as in old elastalert + # try: # if rule.get('scroll_id') and self.thread_data.num_hits < self.thread_data.total_hits and should_scrolling_continue(rule): # if not self.run_query(rule, start, end, scroll=True): @@ -757,6 +777,7 @@ def get_starttime(self, rule): try: doc_type = 'elastalert_status' index = self.writeback_es.resolve_writeback_index(self.writeback_index, doc_type) + #modded for elasticsearch ver 6 library compatibility res = self.writeback_es.search(index=index, doc_type='elastalert_status', size=1, body=query, _source_include=['endtime', 'rule_name']) if res['hits']['hits']: @@ -1587,6 +1608,7 @@ def find_recent_pending_alerts(self, time_limit): query = {'query': {'bool': {'must': inner_query, 'filter': time_filter}}} query.update(sort) try: + #modded for elasticsearch ver 6 library compatibility res = self.writeback_es.search(index=self.writeback_index, doc_type='elastalert', body=query, @@ -1674,6 +1696,7 @@ def get_aggregated_matches(self, _id): query = {'query': {'query_string': {'query': 'aggregate_id:"%s"' % (_id)}}, 'sort': {'@timestamp': 'asc'}} matches = [] try: + #modded for elasticsearch ver 6 library compatibility res = self.writeback_es.search(index=self.writeback_index, doc_type='elastalert', body=query, @@ -1697,6 +1720,7 @@ def find_pending_aggregate_alert(self, rule, aggregation_key_value=None): query = {'query': {'bool': query}} query['sort'] = {'alert_time': {'order': 'desc'}} try: + #modded for elasticsearch ver 6 library compatibility res = self.writeback_es.search(index=self.writeback_index, doc_type='elastalert', body=query, @@ -1844,6 +1868,7 @@ def is_silenced(self, rule_name): try: doc_type = 'silence' index = self.writeback_es.resolve_writeback_index(self.writeback_index, doc_type) + #modded for elasticsearch ver 6 library compatibility res = self.writeback_es.search(index=index, doc_type='silence', size=1, body=query, _source_include=['until', 'exponent']) except ElasticsearchException as e: diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 4f219c56a..c46266e8f 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -93,7 +93,7 @@ class RulesLoader(object): 'metric_aggregation': ruletypes.MetricAggregationRule, 'percentage_match': ruletypes.PercentageMatchRule, 'spike_aggregation': ruletypes.SpikeMetricAggregationRule, - 'error_rate': ruletypes.ErrorRateRule + 'error_rate': ruletypes.ErrorRateRule #Adding Error Rate Rule type } # Used to map names of alerts to their classes @@ -253,6 +253,7 @@ def load_yaml(self, filename): while True: loaded = self.get_yaml(current_path) + #Setting default operator for filters as AND as in elastalert-0.1.35 if 'query_string' in loaded['filter'][0]: loaded['filter'][0]['query_string']['default_operator'] = "AND" @@ -397,6 +398,7 @@ def _dt_to_ts_with_format(dt): if 'include' in rule and type(rule['include']) != list: raise EAException('include option must be a list') + #setting default config fields for error_rate if (rule['type'] == 'error_rate'): rule.setdefault('error_condition','exception.type:*') rule.setdefault('unique_column','traceID') diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 3fc9d6dc5..bd9a1a35b 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -222,6 +222,7 @@ def add_count_data(self, data): self.occurrences.setdefault('all', EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append(event) self.check_for_match('all') + #nested query key optimizations def add_terms_data(self, terms): if 'nested_query_key' in self.rules and self.rules['nested_query_key'] == True: #letting this log message stay inorder to debug issues in future @@ -236,6 +237,7 @@ def add_terms_data(self, terms): self.occurrences.setdefault(bucket['key'], EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append(event) self.check_for_match(bucket['key']) + #nested query key optimizations def flatten_nested_aggregations(self,timestamp,buckets,key=None): for bucket in buckets: if key == None: @@ -1067,7 +1069,7 @@ def unwrap_term_buckets(self, timestamp, term_buckets): def check_matches(self, timestamp, query_key, aggregation_data): raise NotImplementedError() - +#Error Rate Rule Definition class ErrorRateRule(BaseAggregationRule): """ A rule that determines error rate with sampling rate""" required_options = frozenset(['sampling', 'threshold','error_condition','unique_column']) @@ -1140,6 +1142,7 @@ def check_matches(self, timestamp, query_key, aggregation_data): else: if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: + #backwards compatibility with existing elasticsearch library metric_val = list(aggregation_data[self.metric_key]['values'][0].values())[0] else: metric_val = aggregation_data[self.metric_key]['value'] diff --git a/elastalert/util.py b/elastalert/util.py index 39e6e58de..dff180228 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -280,7 +280,7 @@ def unixms_to_dt(ts): def unix_to_dt(ts): if(type(ts) == types.UnicodeType): dt = datetime.datetime.strptime(ts, '%Y-%m-%d %H:%M:%S.%f') - else: + else: #if timestamp is in float format dt = datetime.datetime.utcfromtimestamp(float(ts)) dt = dt.replace(tzinfo=dateutil.tz.tzutc()) return dt @@ -348,6 +348,7 @@ def elasticsearch_client(conf): return ElasticSearchClient(es_conn_conf) +#modded version of elasticsearch_client that suits haystack's needs def kibana_adapter_client(conf): """ returns an Elasticsearch instance configured using an es_conn_config """ es_conn_conf = build_adapter_conn_config(conf) diff --git a/tests/rules_test.py b/tests/rules_test.py index 17abe459c..06375a333 100644 --- a/tests/rules_test.py +++ b/tests/rules_test.py @@ -1282,7 +1282,7 @@ def test_metric_aggregation_scripted(): rule.check_matches(datetime.datetime.now(), None, {'metric_cpu_pct_avg': {'value': -0.5}}) assert rule.matches[0]['metric_cpu_pct_avg'] == -0.5 - +#mock_Response for get_ch_date def _mock_response( status=200, content='{"test": "test"}', @@ -1302,6 +1302,7 @@ def _mock_response( mock_resp.json = mock.Mock(return_value=json_data) return mock_resp +#Error rate rule testing methods def get_error_rate_tester(ea,total_count= 5,error_count= 10, count_all_errors=True): #testing elastalert function that hits query_endpoint and gets aggregation data rules = [{'es_host': '', From 9a27131c19f8589cc41de47719ccfaec07f477eb Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Tue, 28 Mar 2023 15:52:44 +0530 Subject: [PATCH 1034/1065] Update CHANGELOG.md --- CHANGELOG.md | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4e7081933..da9384f2e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,28 @@ +# 2.9.1 FW updates + +## Removals / Downgrades +- Downgraded elasticsearch library from 8 to 6 to support the existing es clusters of haystack. +- Downgraded various other libraries for the same +- es scrolls disabled +- replaced search queries with msearch for all es querying + + +## New Features +- [Alertmanager] added tenant specific config to alertmanager +- [Prometheus] added tenant and modified prometheus route and port as per haystack requirements +- [Haystack] Added Kibana adapter support for querying from router +- [Haystack] Added url_prefix for kibana adapter +- [Haystack] Term-query optimzations +- [Engine changes] Common index can be configured directly in config.yaml +- [Trace Alerts] Added Error Rate rule that hits router aggregagte endpoint +- [Trace Alerts] Error rate rule enhancements + - error_calculation_method config for users to decide between two different error rate calculation methods. + - default values for unique_column and + - Improved test cases + - default error_rate configs - unique_column and error_condition +- [Dockerfile] Distroless Docker setup for python 3 - elastalert +- [Dockerfile] Docker optimization to fix create_index not running bug + # 2.TBD.TBD ## Breaking changes From f2a4319e583542bd40c1d8aa9a7d3e82a972a862 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Tue, 28 Mar 2023 15:53:21 +0530 Subject: [PATCH 1035/1065] Update CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index da9384f2e..e09e49ce7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # 2.9.1 FW updates -## Removals / Downgrades +## Breaking changes - Downgraded elasticsearch library from 8 to 6 to support the existing es clusters of haystack. - Downgraded various other libraries for the same - es scrolls disabled From d23582614dbd189116801381d34b06518fc8247e Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Tue, 28 Mar 2023 15:57:23 +0530 Subject: [PATCH 1036/1065] Update CHANGELOG.md --- CHANGELOG.md | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e09e49ce7..db528badd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,25 +1,23 @@ # 2.9.1 FW updates -## Breaking changes -- Downgraded elasticsearch library from 8 to 6 to support the existing es clusters of haystack. -- Downgraded various other libraries for the same -- es scrolls disabled +## Breaking Changes +- Downgraded elasticsearch library from version 8 to version 6 for the engine support the existing es clusters of haystack. +- Downgraded various other libraries for the same. +- es scrolls disabled as per requirement - replaced search queries with msearch for all es querying - ## New Features -- [Alertmanager] added tenant specific config to alertmanager -- [Prometheus] added tenant and modified prometheus route and port as per haystack requirements +- [Alertmanager] Added tenant specific config to alertmanager +- [Prometheus] Added tenant config and modified prometheus route & port as per haystack requirements - [Haystack] Added Kibana adapter support for querying from router - [Haystack] Added url_prefix for kibana adapter - [Haystack] Term-query optimzations - [Engine changes] Common index can be configured directly in config.yaml - [Trace Alerts] Added Error Rate rule that hits router aggregagte endpoint - [Trace Alerts] Error rate rule enhancements - - error_calculation_method config for users to decide between two different error rate calculation methods. - - default values for unique_column and - - Improved test cases - - default error_rate configs - unique_column and error_condition + - error_calculation_method config for users to decide between two different error_rate calculation methods. + - Default values for unique_column and error_condition + - Added Test cases for Error Rate Alert type - [Dockerfile] Distroless Docker setup for python 3 - elastalert - [Dockerfile] Docker optimization to fix create_index not running bug From b775438ff7909e6d9418b74926eb1d4a5f1ae430 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Tue, 28 Mar 2023 16:07:41 +0530 Subject: [PATCH 1037/1065] removed unnecessary method args --- elastalert/elastalert.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 840e2933d..75b8d1272 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -230,8 +230,7 @@ def get_msearch_query(query, rule): return request @staticmethod - def get_query(filters, starttime=None, endtime=None, sort=True, timestamp_field='@timestamp', to_ts_func=dt_to_ts, desc=False, - five=False): + def get_query(filters, starttime=None, endtime=None, sort=True, timestamp_field='@timestamp', to_ts_func=dt_to_ts, desc=False): """ Returns a query dict that will apply a list of filters, filter by start and end time, and sort results by timestamp. From 141bd6e51cf8f8764abb6428d3ea83c27443ef2a Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Mon, 3 Apr 2023 16:39:18 +0530 Subject: [PATCH 1038/1065] initial commit --- elastalert/elastalert.py | 2 +- elastalert/prometheus_wrapper.py | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 75b8d1272..9a00782bf 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -94,7 +94,7 @@ def parse_args(self, args): help='Enable logging from Elasticsearch queries as curl command. Queries will be logged to file. Note that ' 'this will incorrectly display localhost:9200 as the host/port') #prometheus port changes - parser.add_argument('--prometheus_port', type=int, dest='prometheus_port', default=9090, help='Enables Prometheus metrics on specified port.') + parser.add_argument('--prometheus_port', type=int, dest='prometheus_port', default=9099, help='Enables Prometheus metrics on specified port.') self.args = parser.parse_args(args) def __init__(self, args): diff --git a/elastalert/prometheus_wrapper.py b/elastalert/prometheus_wrapper.py index e20ac0b88..9c806fa7c 100644 --- a/elastalert/prometheus_wrapper.py +++ b/elastalert/prometheus_wrapper.py @@ -48,6 +48,9 @@ def metrics_writeback(self, doc_type, body, rule=None, match_body=None): else: self.prom_alerts_not_sent.labels(body['rule_name']).inc() elif doc_type == 'elastalert_error': + print("coming_here") + print(body) + print("pt 2") self.prom_errors.inc() elif doc_type == 'silence': self.prom_alerts_silenced.labels(body['rule_name']).inc() From f3fc09283000613b14b2439f54727b614bd5c487 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Wed, 5 Apr 2023 14:38:47 +0530 Subject: [PATCH 1039/1065] added rule name to send alert logs --- elastalert/elastalert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 9a00782bf..2cdd91bf1 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -1509,7 +1509,7 @@ def send_alert(self, matches, rule, alert_time=None, retried=False): try: alert.alert(matches) except EAException as e: - self.handle_error('Error while running alert %s: %s' % (alert.get_info()['type'], e), {'rule': rule['name']}) + self.handle_error('Error while running alert %s ( Tenant : %s , Rule : %s ) - %s' % (alert.get_info()['type'], rule.get('tenant') , rule.get('name'), e), {'rule': rule['name']}) alert_exception = str(e) else: self.thread_data.alerts_sent += 1 From 7dcdc6bb3c572595c50e04eb80bbce1a25c22824 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Thu, 20 Apr 2023 15:06:04 +0530 Subject: [PATCH 1040/1065] create-index bug fix --- elastalert/create_index.py | 39 ++++++++++++++++++++++---------------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/elastalert/create_index.py b/elastalert/create_index.py index 58c7dd80d..26971624f 100644 --- a/elastalert/create_index.py +++ b/elastalert/create_index.py @@ -24,9 +24,14 @@ def create_index_mappings(es_client, ea_index, recreate=False, old_ea_index=None esversion = get_version_from_cluster_info(es_client) es_index_mappings = {} - - #using es_mappings 7 - es_index_mappings = read_es_index_mappings(7) + + if is_atleasteight(esversion): + es_index_mappings = read_es_index_mappings() + elif is_atleastseven(esversion) or is_atleastsix(esversion): + es_index_mappings = read_es_index_mappings(7) + else: + print('FATAL - Unsupported Elasticsearch version: ' + esversion + '. Aborting.') + exit(1) es_index = IndicesClient(es_client) if not recreate: @@ -35,7 +40,7 @@ def create_index_mappings(es_client, ea_index, recreate=False, old_ea_index=None return None # (Re-)Create indices. - if is_atleastseven(esversion): + if is_atleastseven(esversion) or is_atleastsix(esversion): index_names = ( ea_index, ea_index + '_status', @@ -70,18 +75,17 @@ def create_index_mappings(es_client, ea_index, recreate=False, old_ea_index=None body=es_index_mappings['elastalert_error']) es_client.indices.put_mapping(index=ea_index + '_past', body=es_index_mappings['past_elastalert']) - elif is_atleastseven(esversion): - es_client.indices.put_mapping(index=ea_index, doc_type='_doc', - body=es_index_mappings['elastalert'], include_type_name=True) - es_client.indices.put_mapping(index=ea_index + '_status', doc_type='_doc', - body=es_index_mappings['elastalert_status'], include_type_name=True) - es_client.indices.put_mapping(index=ea_index + '_silence', doc_type='_doc', - body=es_index_mappings['silence'], include_type_name=True) - es_client.indices.put_mapping(index=ea_index + '_error', doc_type='_doc', - body=es_index_mappings['elastalert_error'], include_type_name=True) - es_client.indices.put_mapping(index=ea_index + '_past', doc_type='_doc', - body=es_index_mappings['past_elastalert'], include_type_name=True) - + elif is_atleastseven(esversion) or is_atleastsix(esversion): + es_client.indices.put_mapping(index=ea_index, doc_type='elastalert', + body=es_index_mappings['elastalert']) + es_client.indices.put_mapping(index=ea_index + '_status', doc_type='elastalert_status', + body=es_index_mappings['elastalert_status']) + es_client.indices.put_mapping(index=ea_index + '_silence', doc_type='silence', + body=es_index_mappings['silence']) + es_client.indices.put_mapping(index=ea_index + '_error', doc_type='elastalert_error', + body=es_index_mappings['elastalert_error']) + es_client.indices.put_mapping(index=ea_index + '_past', doc_type='past_elastalert', + body=es_index_mappings['past_elastalert']) print('New index %s created' % ea_index) if old_ea_index: print("Copying all data from old index '{0}' to new index '{1}'".format(old_ea_index, ea_index)) @@ -110,6 +114,9 @@ def read_es_index_mapping(mapping, es_version=7): print("Reading index mapping '{0}'".format(mapping_path)) return json.load(f) +def is_atleastsix(es_version): + return int(es_version.split(".")[0]) >= 6 + def is_atleastseven(es_version): return int(es_version.split(".")[0]) >= 7 From 06a382a101cad00fbe779aa79f1fda16a01c21cb Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Thu, 20 Apr 2023 16:46:25 +0530 Subject: [PATCH 1041/1065] separated six and seven put mapping definitions --- elastalert/create_index.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/elastalert/create_index.py b/elastalert/create_index.py index 26971624f..95600c9c9 100644 --- a/elastalert/create_index.py +++ b/elastalert/create_index.py @@ -75,7 +75,18 @@ def create_index_mappings(es_client, ea_index, recreate=False, old_ea_index=None body=es_index_mappings['elastalert_error']) es_client.indices.put_mapping(index=ea_index + '_past', body=es_index_mappings['past_elastalert']) - elif is_atleastseven(esversion) or is_atleastsix(esversion): + elif is_atleastseven(esversion) : + es_client.indices.put_mapping(index=ea_index, doc_type='_doc', + body=es_index_mappings['elastalert'], include_type_name=True) + es_client.indices.put_mapping(index=ea_index + '_status', doc_type='_doc', + body=es_index_mappings['elastalert_status'], include_type_name=True) + es_client.indices.put_mapping(index=ea_index + '_silence', doc_type='_doc', + body=es_index_mappings['silence'], include_type_name=True) + es_client.indices.put_mapping(index=ea_index + '_error', doc_type='_doc', + body=es_index_mappings['elastalert_error'], include_type_name=True) + es_client.indices.put_mapping(index=ea_index + '_past', doc_type='_doc', + body=es_index_mappings['past_elastalert'], include_type_name=True) + elif is_atleastsix(esversion): es_client.indices.put_mapping(index=ea_index, doc_type='elastalert', body=es_index_mappings['elastalert']) es_client.indices.put_mapping(index=ea_index + '_status', doc_type='elastalert_status', From 279ca89a9f539e9e148da453915f8f7168d2597a Mon Sep 17 00:00:00 2001 From: aravind-musigumpula Date: Fri, 12 May 2023 17:26:22 +0530 Subject: [PATCH 1042/1065] fix default operator --- elastalert/loaders.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index c46266e8f..09b0deb1a 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -254,8 +254,11 @@ def load_yaml(self, filename): loaded = self.get_yaml(current_path) #Setting default operator for filters as AND as in elastalert-0.1.35 - if 'query_string' in loaded['filter'][0]: - loaded['filter'][0]['query_string']['default_operator'] = "AND" + if 'filter' in loaded: + for filter in loaded['filter']: + if 'query' in filter: + if 'query_string' in filter['query']: + filter['query']['query_string']['default_operator'] = "AND" # Special case for merging filters - if both files specify a filter merge (AND) them if 'filter' in rule and 'filter' in loaded: From 4ae2fa51fd9a305371ba38619680bfda843ae767 Mon Sep 17 00:00:00 2001 From: aravind-musigumpula Date: Fri, 12 May 2023 18:50:46 +0530 Subject: [PATCH 1043/1065] elastalert error for default operator --- elastalert/loaders.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 09b0deb1a..5421e26a7 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -256,9 +256,11 @@ def load_yaml(self, filename): #Setting default operator for filters as AND as in elastalert-0.1.35 if 'filter' in loaded: for filter in loaded['filter']: - if 'query' in filter: - if 'query_string' in filter['query']: + if 'query' in filter and filter['query'] != None: + if 'query_string' in filter['query'] and filter['query']['query_string']!= None: filter['query']['query_string']['default_operator'] = "AND" + else: + elastalert_logger.info("Query is None in file: %s",filename) # Special case for merging filters - if both files specify a filter merge (AND) them if 'filter' in rule and 'filter' in loaded: From 2af9dc0d6c15b9b666c54e00bfdb5449502ab796 Mon Sep 17 00:00:00 2001 From: aravind-musigumpula Date: Tue, 16 May 2023 18:02:18 +0530 Subject: [PATCH 1044/1065] fix response and query for percentile query --- elastalert/ruletypes.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index bd9a1a35b..80cb17ff7 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -1134,6 +1134,7 @@ def generate_aggregation_query(self): query = {self.metric_key: {self.rules['metric_agg_type']: {'field': self.rules['metric_agg_key']}}} if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: query[self.metric_key][self.rules['metric_agg_type']]['percents'] = [self.rules['percentile_range']] + query[self.metric_key][self.rules['metric_agg_type']]['keyed'] = False return query def check_matches(self, timestamp, query_key, aggregation_data): @@ -1143,7 +1144,8 @@ def check_matches(self, timestamp, query_key, aggregation_data): else: if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: #backwards compatibility with existing elasticsearch library - metric_val = list(aggregation_data[self.metric_key]['values'][0].values())[0] + #aggregation_data = {"doc_count":258757,"key":"appmailer","metric_qt_percentiles":{"values":[{"key":95,"value":0}]}} + metric_val = aggregation_data[self.metric_key]['values'][0]['value'] else: metric_val = aggregation_data[self.metric_key]['value'] if self.crossed_thresholds(metric_val): From 04476287339464a9d42599cd2e97dc19ba915501 Mon Sep 17 00:00:00 2001 From: aravind-musigumpula Date: Wed, 17 May 2023 15:10:16 +0530 Subject: [PATCH 1045/1065] add testcase for percentile metric aggregation --- elastalert/ruletypes.py | 4 +-- tests/rules_test.py | 68 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 70 insertions(+), 2 deletions(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 80cb17ff7..dff6ce322 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -1192,9 +1192,9 @@ def check_matches_recursive(self, timestamp, query_key, aggregation_data, compou def crossed_thresholds(self, metric_value): if metric_value is None: return False - if 'max_threshold' in self.rules and int(metric_value) > self.rules['max_threshold']: + if 'max_threshold' in self.rules and float(metric_value) > self.rules['max_threshold']: return True - if 'min_threshold' in self.rules and int(metric_value) < self.rules['min_threshold']: + if 'min_threshold' in self.rules and float(metric_value) < self.rules['min_threshold']: return True return False diff --git a/tests/rules_test.py b/tests/rules_test.py index 06375a333..452cc453f 100644 --- a/tests/rules_test.py +++ b/tests/rules_test.py @@ -1207,6 +1207,74 @@ def test_metric_aggregation(): rule.check_matches(datetime.datetime.now(), 'qk_val', {'metric_cpu_pct_avg': {'value': 0.95}}) assert rule.matches[0]['subdict1']['subdict2']['subdict3'] == 'qk_val' +def test_percentile_metric_aggregation(): + rules = {'buffer_time': datetime.timedelta(minutes=5), + 'timestamp_field': '@timestamp', + 'metric_agg_type': 'percentiles', + 'percentile_range': 95, + 'metric_agg_key': 'cpu_pct'} + + # Check threshold logic + with pytest.raises(EAException): + rule = MetricAggregationRule(rules) + + rules['min_threshold'] = 0.1 + rules['max_threshold'] = 0.8 + + rule = MetricAggregationRule(rules) + + assert rule.rules['aggregation_query_element'] == {'metric_cpu_pct_percentiles': {'percentiles': {'field': 'cpu_pct', 'percents': [95]}}} + + assert rule.crossed_thresholds(None) is False + assert rule.crossed_thresholds(0.09) is True + assert rule.crossed_thresholds(0.10) is False + assert rule.crossed_thresholds(0.79) is False + assert rule.crossed_thresholds(0.81) is True + + rule.check_matches(datetime.datetime.now(), None, {"doc_count":258757,"key":"appmailer","metric_cpu_pct_percentiles":{"values":[{"key":95,"value":None}]}}) + rule.check_matches(datetime.datetime.now(), None, {"doc_count":258757,"key":"appmailer","metric_cpu_pct_percentiles":{"values":[{"key":95,"value":0.5}]}}) + assert len(rule.matches) == 0 + + rule.check_matches(datetime.datetime.now(), None, {"doc_count":258757,"key":"appmailer","metric_cpu_pct_percentiles":{"values":[{"key":95,"value":0.05}]}}) + rule.check_matches(datetime.datetime.now(), None, {"doc_count":258757,"key":"appmailer","metric_cpu_pct_percentiles":{"values":[{"key":95,"value":0.95}]}}) + assert len(rule.matches) == 2 + + rule = MetricAggregationRule(rules) + rule.check_matches(datetime.datetime.now(), None, {"doc_count":258757,"key":"appmailer","metric_cpu_pct_percentiles":{"values":[{"key":95,"value":0.966666667}]}}) + assert '0.966666667' in rule.get_match_str(rule.matches[0]) + assert rule.matches[0]['metric_cpu_pct_percentiles'] == 0.966666667 + assert rule.matches[0]['metric_agg_value'] == 0.966666667 + assert 'metric_cpu_pct_avg_formatted' not in rule.matches[0] + assert 'metric_agg_value_formatted' not in rule.matches[0] + + rules['metric_format_string'] = '{:.2%}' + rule = MetricAggregationRule(rules) + rule.check_matches(datetime.datetime.now(), None, {"doc_count":258757,"key":"appmailer","metric_cpu_pct_percentiles":{"values":[{"key":95,"value":0.966666667}]}}) + assert '96.67%' in rule.get_match_str(rule.matches[0]) + assert rule.matches[0]['metric_cpu_pct_percentiles'] == 0.966666667 + assert rule.matches[0]['metric_agg_value'] == 0.966666667 + assert rule.matches[0]['metric_cpu_pct_percentiles_formatted'] == '96.67%' + assert rule.matches[0]['metric_agg_value_formatted'] == '96.67%' + + rules['metric_format_string'] = '%.2f' + rule = MetricAggregationRule(rules) + rule.check_matches(datetime.datetime.now(), None, {"doc_count":258757,"key":"appmailer","metric_cpu_pct_percentiles":{"values":[{"key":95,"value":0.966666667}]}}) + assert '0.97' in rule.get_match_str(rule.matches[0]) + assert rule.matches[0]['metric_cpu_pct_percentiles'] == 0.966666667 + assert rule.matches[0]['metric_agg_value'] == 0.966666667 + assert rule.matches[0]['metric_cpu_pct_percentiles_formatted'] == '0.97' + assert rule.matches[0]['metric_agg_value_formatted'] == '0.97' + + rules['query_key'] = 'subdict' + rule = MetricAggregationRule(rules) + rule.check_matches(datetime.datetime.now(), 'qk_val', {'metric_cpu_pct_percentiles': {"values":[{"key":95,"value":0.95}]}}) + assert rule.matches[0]['subdict'] == 'qk_val' + + rules['query_key'] = 'subdict1.subdict2.subdict3' + rule = MetricAggregationRule(rules) + rule.check_matches(datetime.datetime.now(), 'qk_val', {'metric_cpu_pct_percentiles': {"values":[{"key":95,"value":0.95}]}}) + assert rule.matches[0]['subdict1']['subdict2']['subdict3'] == 'qk_val' + def test_metric_aggregation_complex_query_key(): rules = {'buffer_time': datetime.timedelta(minutes=5), From dac7b19df733ae197333ace93dc86fe1a5f1f86f Mon Sep 17 00:00:00 2001 From: aravind-musigumpula Date: Thu, 8 Jun 2023 15:04:44 +0530 Subject: [PATCH 1046/1065] add keyed false --- tests/rules_test.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/rules_test.py b/tests/rules_test.py index 452cc453f..c77a86f9d 100644 --- a/tests/rules_test.py +++ b/tests/rules_test.py @@ -1222,8 +1222,7 @@ def test_percentile_metric_aggregation(): rules['max_threshold'] = 0.8 rule = MetricAggregationRule(rules) - - assert rule.rules['aggregation_query_element'] == {'metric_cpu_pct_percentiles': {'percentiles': {'field': 'cpu_pct', 'percents': [95]}}} + assert rule.rules['aggregation_query_element'] == {'metric_cpu_pct_percentiles': {'percentiles': {'field': 'cpu_pct', 'percents': [95],'keyed': False}}} assert rule.crossed_thresholds(None) is False assert rule.crossed_thresholds(0.09) is True From d4c500b5d0ac1619b5eea61b6b776ad637a50345 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Fri, 9 Jun 2023 12:52:13 +0530 Subject: [PATCH 1047/1065] New term integration (#24) * updated changes * added except block in get_new_terms * new-terms - composite field support added * updated test-cases for new-term * new-terms - made default size 500 * new-terms - updates test cases * new-term - set limit for terms_size * refresh interval - test cases added & other test case fixes * added missing except block & increase request_timeout in get_all_terms * bug fix - to avoid crash if new term initializaton fails * making use_keyword_postfix False by default * test cases fixed * added testcase for upper and lower bounds * moved get_msearch_query to utils * unnecessary lines removal * revert initialization exception bypass --- elastalert/elastalert.py | 89 +++++++--- elastalert/loaders.py | 2 +- elastalert/ruletypes.py | 176 +++++++++++++------ elastalert/util.py | 16 ++ tests/rules_test.py | 362 ++++++++++++++++++++++++++------------- 5 files changed, 448 insertions(+), 197 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 2cdd91bf1..bee8537e1 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -29,7 +29,7 @@ from elasticsearch.exceptions import ElasticsearchException from elasticsearch.exceptions import NotFoundError from elasticsearch.exceptions import TransportError -from elastalert.ruletypes import ErrorRateRule +from elastalert.ruletypes import ErrorRateRule, NewTermsRule from elastalert.alerters.debug import DebugAlerter from elastalert.config import load_conf @@ -39,7 +39,7 @@ from elastalert.prometheus_wrapper import PrometheusWrapper from elastalert.ruletypes import FlatlineRule from elastalert.util import (add_raw_postfix, cronite_datetime_to_timestamp, dt_to_ts, dt_to_unix, EAException, - elastalert_logger, elasticsearch_client,kibana_adapter_client, format_index, lookup_es_key, parse_deadline, + elastalert_logger, elasticsearch_client, get_msearch_query,kibana_adapter_client, format_index, lookup_es_key, parse_deadline, parse_duration, pretty_ts, replace_dots_in_field_names, seconds, set_es_key, should_scrolling_continue, total_seconds, ts_add, ts_now, ts_to_dt, unix_to_dt, ts_utc_to_tz, dt_to_ts_with_format) @@ -212,23 +212,6 @@ def get_index(rule, starttime=None, endtime=None): else: return index - - #backwards compatibility with es6 msearch - @staticmethod - def get_msearch_query(query, rule): - search_arr = [] - search_arr.append({'index': [rule['index']]}) - if rule.get('use_count_query'): - query['size'] = 0 - if rule['include']: - query['_source'] = {} - query['_source']['includes'] = rule['include'] - search_arr.append(query) - request = '' - for each in search_arr: - request += '%s \n' %json.dumps(each) - return request - @staticmethod def get_query(filters, starttime=None, endtime=None, sort=True, timestamp_field='@timestamp', to_ts_func=dt_to_ts, desc=False): """ Returns a query dict that will apply a list of filters, filter by @@ -402,7 +385,7 @@ def get_hits(self, rule, starttime, endtime, index, scroll=False): to_ts_func=rule['dt_to_ts'], ) - request = self.get_msearch_query(query,rule) + request = get_msearch_query(query,rule) #removed scroll as it aint supported # extra_args = {'_source_includes': rule['include']} @@ -470,6 +453,58 @@ def get_hits(self, rule, starttime, endtime, index, scroll=False): return hits + def get_new_terms_data(self, rule, starttime, endtime, field): + new_terms = [] + + rule_inst = rule["type"] + try: + query = rule_inst.get_new_term_query(starttime,endtime,field) + request = get_msearch_query(query,rule) + res = self.thread_data.current_es.msearch(body=request) + res = res['responses'][0] + + if 'aggregations' in res: + buckets = res['aggregations']['values']['buckets'] + if type(field) == list: + for bucket in buckets: + new_terms += rule_inst.flatten_aggregation_hierarchy(bucket) + else: + new_terms = [bucket['key'] for bucket in buckets] + + except ElasticsearchException as e: + if len(str(e)) > 1024: + e = str(e)[:1024] + '... (%d characters removed)' % (len(str(e)) - 1024) + self.handle_error('Error running new terms query: %s' % (e), {'rule': rule['name'], 'query': query}) + return [] + + return new_terms + + + + + def get_new_terms(self,rule, starttime, endtime): + data = {} + + for field in rule['fields']: + new_terms = self.get_new_terms_data(rule,starttime,endtime,field) + self.thread_data.num_hits += len(new_terms) + if type(field) == list: + data[tuple(field)] = new_terms + else: + data[field] = new_terms + + lt = rule.get('use_local_time') + status_log = "Queried rule %s from %s to %s: %s / %s hits" % ( + rule['name'], + pretty_ts(starttime, lt, self.pretty_ts_format), + pretty_ts(endtime, lt, self.pretty_ts_format), + self.thread_data.num_hits, + self.thread_data.num_hits, + ) + elastalert_logger.info(status_log) + + return {endtime : data} + def get_hits_count(self, rule, starttime, endtime, index): """ Query Elasticsearch for the count of results and returns a list of timestamps equal to the endtime. This allows the results to be passed to rules which expect @@ -489,7 +524,7 @@ def get_hits_count(self, rule, starttime, endtime, index): to_ts_func=rule['dt_to_ts'], ) - request = self.get_msearch_query(query,rule) + request = get_msearch_query(query,rule) try: #using backwards compatibile msearch @@ -542,7 +577,7 @@ def get_hits_terms(self, rule, starttime, endtime, index, key, qk=None, size=Non if size is None: size = rule.get('terms_size', 50) query = self.get_terms_query(base_query, rule, size, key) - request = self.get_msearch_query(query,rule) + request = get_msearch_query(query,rule) try: #using backwards compatibile msearch @@ -582,7 +617,7 @@ def get_hits_aggregation(self, rule, starttime, endtime, index, query_key, term_ if term_size is None: term_size = rule.get('terms_size', 50) query = self.get_aggregation_query(base_query, rule, query_key, term_size, rule['timestamp_field']) - request = self.get_msearch_query(query,rule) + request = get_msearch_query(query,rule) try: #using backwards compatibile msearch res = self.thread_data.current_es.msearch(body=request) @@ -713,7 +748,9 @@ def run_query(self, rule, start=None, end=None, scroll=False): rule_inst = rule['type'] rule['scrolling_cycle'] = rule.get('scrolling_cycle', 0) + 1 index = self.get_index(rule, start, end) - if rule.get('use_count_query'): + if isinstance(rule_inst, NewTermsRule): + data = self.get_new_terms(rule, start, end) + elif rule.get('use_count_query'): data = self.get_hits_count(rule, start, end, index) elif rule.get('use_terms_query'): data = self.get_hits_terms(rule, start, end, index, rule['query_key']) @@ -732,7 +769,9 @@ def run_query(self, rule, start=None, end=None, scroll=False): if data is None: return False elif data: - if rule.get('use_count_query'): + if isinstance(rule_inst, NewTermsRule): + rule_inst.add_new_term_data(data) + elif rule.get('use_count_query'): rule_inst.add_count_data(data) elif rule.get('use_terms_query'): rule_inst.add_terms_data(data) diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 5421e26a7..a1a07837a 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -173,7 +173,7 @@ def load(self, conf, args=None): continue if rule['name'] in names: raise EAException('Duplicate rule named %s' % (rule['name'])) - except EAException as e: + except EAException as e: raise EAException('Error loading file %s: %s' % (rule_file, e)) rules.append(rule) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index dff6ce322..af042d292 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -7,7 +7,7 @@ from sortedcontainers import SortedKeyList as sortedlist from elastalert.util import (add_raw_postfix, dt_to_ts, EAException, elastalert_logger, elasticsearch_client, - format_index, hashable, lookup_es_key, new_get_event_ts, pretty_ts, total_seconds, + format_index, get_msearch_query, hashable, kibana_adapter_client, lookup_es_key, new_get_event_ts, pretty_ts, total_seconds, ts_now, ts_to_dt, expand_string_into_dict, format_string) @@ -677,6 +677,21 @@ class NewTermsRule(RuleType): def __init__(self, rule, args=None): super(NewTermsRule, self).__init__(rule, args) self.seen_values = {} + self.last_updated_at = None + self.es = kibana_adapter_client(self.rules) + + # terms_window_size : Default & Upperbound - 7 Days + self.window_size = min(datetime.timedelta(**self.rules.get('terms_window_size', {'days': 7})), datetime.timedelta(**{'days': 7})) + + # window_step_size : Default - 1 Days, Lowerbound: 6 hours + self.step = max( datetime.timedelta(**self.rules.get('window_step_size', {'days': 1})), datetime.timedelta(**{'hours': 6}) ) + + # refresh_interval : Default - 6 hours, Lowerbound: 6 hours + self.refresh_interval = max( datetime.timedelta(**self.rules.get('refresh_interval', {'hours': 6})), datetime.timedelta(**{'hours': 6}) ) + + # refresh_interval : Default - 500, Upperbound: 1000 + self.terms_size = min(self.rules.get('terms_size', 500),1000) + # Allow the use of query_key or fields if 'fields' not in self.rules: if 'query_key' not in self.rules: @@ -695,73 +710,116 @@ def __init__(self, rule, args=None): if [self.rules['query_key']] != self.fields: raise EAException('If use_terms_query is specified, you cannot specify different query_key and fields') if not self.rules.get('query_key').endswith('.keyword') and not self.rules.get('query_key').endswith('.raw'): - if self.rules.get('use_keyword_postfix', True): + if self.rules.get('use_keyword_postfix', False): # making it false by default as we wont use the keyword suffix elastalert_logger.warn('Warning: If query_key is a non-keyword field, you must set ' 'use_keyword_postfix to false, or add .keyword/.raw to your query_key.') + + def should_refresh_terms(self): + return self.last_updated_at is None or self.last_updated_at < ( ts_now() - self.refresh_interval) + + def update_terms(self,args=None): try: - self.get_all_terms(args) + self.get_all_terms(args=args) except Exception as e: # Refuse to start if we cannot get existing terms raise EAException('Error searching for existing terms: %s' % (repr(e))).with_traceback(sys.exc_info()[2]) - def get_all_terms(self, args): + + + def get_new_term_query(self,starttime,endtime,field): + + field_name = { + "field": "", + "size": self.terms_size, + "order": { + "_count": "desc" + } + } + + query = { + "aggs": { + "values": { + "terms": field_name + } + } + } + + query["query"] = { + 'bool': { + 'filter': { + 'bool': { + 'must': [{ + 'range': { + self.rules['timestamp_field']: { + 'lt': self.rules['dt_to_ts'](endtime), + 'gte': self.rules['dt_to_ts'](starttime) + } + } + }] + } + } + } + } + + filter_level = query['query']['bool']['filter']['bool']['must'] + if 'filter' in self.rules: + for item in self.rules['filter']: + filter_level.append(item) + + # For composite keys, we will need to perform sub-aggregations + if type(field) == list: + self.seen_values.setdefault(tuple(field), []) + level = query['aggs'] + # Iterate on each part of the composite key and add a sub aggs clause to the elastic search query + for i, sub_field in enumerate(field): + if self.rules.get('use_keyword_postfix', False): # making it false by default as we wont use the keyword suffix + level['values']['terms']['field'] = add_raw_postfix(sub_field, True) + else: + level['values']['terms']['field'] = sub_field + if i < len(field) - 1: + # If we have more fields after the current one, then set up the next nested structure + level['values']['aggs'] = {'values': {'terms': copy.deepcopy(field_name)}} + level = level['values']['aggs'] + else: + self.seen_values.setdefault(field, []) + # For non-composite keys, only a single agg is needed + if self.rules.get('use_keyword_postfix', False):# making it false by default as we wont use the keyword suffix + field_name['field'] = add_raw_postfix(field, True) + else: + field_name['field'] = field + + return query + + + + + def get_all_terms(self,args): """ Performs a terms aggregation for each field to get every existing term. """ - self.es = elasticsearch_client(self.rules) - window_size = datetime.timedelta(**self.rules.get('terms_window_size', {'days': 30})) - field_name = {"field": "", "size": 2147483647} # Integer.MAX_VALUE - query_template = {"aggs": {"values": {"terms": field_name}}} + if args and hasattr(args, 'start') and args.start: end = ts_to_dt(args.start) elif 'start_date' in self.rules: end = ts_to_dt(self.rules['start_date']) else: end = ts_now() - start = end - window_size - step = datetime.timedelta(**self.rules.get('window_step_size', {'days': 1})) + start = end - self.window_size + for field in self.fields: tmp_start = start - tmp_end = min(start + step, end) - - time_filter = {self.rules['timestamp_field']: {'lt': self.rules['dt_to_ts'](tmp_end), 'gte': self.rules['dt_to_ts'](tmp_start)}} - query_template['filter'] = {'bool': {'must': [{'range': time_filter}]}} - query = {'aggs': {'filtered': query_template}, 'size': 0} - - if 'filter' in self.rules: - for item in self.rules['filter']: - query_template['filter']['bool']['must'].append(item) - - # For composite keys, we will need to perform sub-aggregations - if type(field) == list: - self.seen_values.setdefault(tuple(field), []) - level = query_template['aggs'] - # Iterate on each part of the composite key and add a sub aggs clause to the elastic search query - for i, sub_field in enumerate(field): - if self.rules.get('use_keyword_postfix', True): - level['values']['terms']['field'] = add_raw_postfix(sub_field, True) - else: - level['values']['terms']['field'] = sub_field - if i < len(field) - 1: - # If we have more fields after the current one, then set up the next nested structure - level['values']['aggs'] = {'values': {'terms': copy.deepcopy(field_name)}} - level = level['values']['aggs'] - else: - self.seen_values.setdefault(field, []) - # For non-composite keys, only a single agg is needed - if self.rules.get('use_keyword_postfix', True): - field_name['field'] = add_raw_postfix(field, True) - else: - field_name['field'] = field + tmp_end = min(start + self.step, end) + query = self.get_new_term_query(tmp_start,tmp_end,field) # Query the entire time range in small chunks while tmp_start < end: - if self.rules.get('use_strftime_index'): - index = format_index(self.rules['index'], tmp_start, tmp_end) - else: - index = self.rules['index'] - res = self.es.search(body=query, index=index, doc_type='elastalert_status', ignore_unavailable=True, timeout='50s') + + msearch_query = get_msearch_query(query,self.rules) + + res = self.es.msearch(msearch_query,request_timeout=50) + res = res['responses'][0] + if 'aggregations' in res: - buckets = res['aggregations']['filtered']['values']['buckets'] + buckets = res['aggregations']['values']['buckets'] if type(field) == list: # For composite keys, make the lookup based on all fields # Make it a tuple since it can be hashed and used in dictionary lookups @@ -779,9 +837,9 @@ def get_all_terms(self, args): if tmp_start == tmp_end: break tmp_start = tmp_end - tmp_end = min(tmp_start + step, end) - time_filter[self.rules['timestamp_field']] = {'lt': self.rules['dt_to_ts'](tmp_end), - 'gte': self.rules['dt_to_ts'](tmp_start)} + tmp_end = min(tmp_start + self.step, end) + query = self.get_new_term_query(tmp_start,tmp_end,field) + for key, values in self.seen_values.items(): if not values: @@ -798,6 +856,7 @@ def get_all_terms(self, args): continue self.seen_values[key] = list(set(values)) elastalert_logger.info('Found %s unique values for %s' % (len(set(values)), key)) + self.last_updated_at = ts_now() def flatten_aggregation_hierarchy(self, root, hierarchy_tuple=()): """ For nested aggregations, the results come back in the following format: @@ -902,6 +961,23 @@ def flatten_aggregation_hierarchy(self, root, hierarchy_tuple=()): results.append(hierarchy_tuple + (node['key'],)) return results + def add_new_term_data(self, payload): + if self.should_refresh_terms(): + self.update_terms() + timestamp = list(payload.keys())[0] + data = payload[timestamp] + for field in self.fields: + lookup_key =tuple(field) if type(field) == list else field + for value in data[lookup_key]: + if value not in self.seen_values[lookup_key]: + match = { + "field": lookup_key, + self.rules['timestamp_field']: timestamp, + 'new_value': tuple(value) if type(field) == list else value + } + self.add_match(copy.deepcopy(match)) + self.seen_values[lookup_key].append(value) + def add_data(self, data): for document in data: for field in self.fields: diff --git a/elastalert/util.py b/elastalert/util.py index dff180228..86c5c3145 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -7,6 +7,7 @@ import sys import time import types +import json import dateutil.parser import pytz @@ -20,6 +21,21 @@ logging.captureWarnings(True) elastalert_logger = logging.getLogger('elastalert') +#backwards compatibility with es6 msearch +def get_msearch_query(query, rule): + search_arr = [] + search_arr.append({'index': [rule['index']]}) + if rule.get('use_count_query'): + query['size'] = 0 + if rule.get('include'): + query['_source'] = {} + query['_source']['includes'] = rule['include'] + search_arr.append(query) + request = '' + for each in search_arr: + request += '%s \n' %json.dumps(each) + return request + def get_module(module_name): """ Loads a module and returns a specific object. diff --git a/tests/rules_test.py b/tests/rules_test.py index c77a86f9d..ee9299879 100644 --- a/tests/rules_test.py +++ b/tests/rules_test.py @@ -567,14 +567,29 @@ def test_change(): def test_new_term(version): rules = {'fields': ['a', 'b'], 'timestamp_field': '@timestamp', - 'es_host': 'example.com', 'es_port': 10, 'index': 'logstash', + 'kibana_adapter': 'example.com', 'kibana_adapter_port': 10, 'index': 'logstash', 'ts_to_dt': ts_to_dt, 'dt_to_ts': dt_to_ts} - mock_res = {'aggregations': {'filtered': {'values': {'buckets': [{'key': 'key1', 'doc_count': 1}, - {'key': 'key2', 'doc_count': 5}]}}}} + mock_res = { + 'responses': [{ + 'aggregations': { + 'values': { + 'buckets': [{ + 'key': 'key1', + 'doc_count': 1 + }, + { + 'key': 'key2', + 'doc_count': 5 + } + ] + } + } + }] + } - with mock.patch('elastalert.ruletypes.elasticsearch_client') as mock_es: + with mock.patch('elastalert.ruletypes.kibana_adapter_client') as mock_es: mock_es.return_value = mock.Mock() - mock_es.return_value.search.return_value = mock_res + mock_es.return_value.msearch.return_value = mock_res mock_es.return_value.info.return_value = version call_args = [] @@ -586,8 +601,7 @@ def record_args(*args, **kwargs): mock_es.return_value.search.side_effect = record_args rule = NewTermsRule(rules) - # 30 day default range, 1 day default step, times 2 fields - assert rule.es.search.call_count == 60 + # Assert that all calls have the proper ordering of time ranges old_ts = '2010-01-01T00:00:00Z' @@ -604,186 +618,292 @@ def record_args(*args, **kwargs): old_ts = gte # Key1 and key2 shouldn't cause a match - rule.add_data([{'@timestamp': ts_now(), 'a': 'key1', 'b': 'key2'}]) + data = { + ts_now() : { + "a": ["key1"], + "b": ["key2"] + } + } + rule.add_new_term_data(data) + + # 30 day default range, 1 day default step, times 2 fields + assert rule.es.msearch.call_count == 14 + + # rule.add_data([{'@timestamp': ts_now(), 'a': 'key1', 'b': 'key2'}]) + assert rule.matches == [] # Neither will missing values - rule.add_data([{'@timestamp': ts_now(), 'a': 'key2'}]) + data = { + ts_now() : { + "a": ["key2"], + "b": [] + } + } + rule.add_new_term_data(data) + # rule.add_data([{'@timestamp': ts_now(), 'a': 'key2'}]) assert rule.matches == [] # Key3 causes an alert for field b - rule.add_data([{'@timestamp': ts_now(), 'a': 'key2', 'b': 'key3'}]) + data = { + ts_now() : { + "a": ["key2"], + "b": ["key3"] + } + } + rule.add_new_term_data(data) + + #rule.add_data([{'@timestamp': ts_now(), 'a': 'key2', 'b': 'key3'}]) assert len(rule.matches) == 1 - assert rule.matches[0]['new_field'] == 'b' - assert rule.matches[0]['b'] == 'key3' + assert rule.matches[0]['field'] == 'b' + assert rule.matches[0]['new_value'] == 'key3' rule.matches = [] # Key3 doesn't cause another alert for field b - rule.add_data([{'@timestamp': ts_now(), 'a': 'key2', 'b': 'key3'}]) + data = { + ts_now() : { + "a": ["key2"], + "b": ["key3"] + } + } + rule.add_new_term_data(data) + # rule.add_data([{'@timestamp': ts_now(), 'a': 'key2', 'b': 'key3'}]) assert rule.matches == [] - # Missing_field - rules['alert_on_missing_field'] = True - with mock.patch('elastalert.ruletypes.elasticsearch_client') as mock_es: - mock_es.return_value = mock.Mock() - mock_es.return_value.search.return_value = mock_res - mock_es.return_value.info.return_value = version - rule = NewTermsRule(rules) - rule.add_data([{'@timestamp': ts_now(), 'a': 'key2'}]) - assert len(rule.matches) == 1 - assert rule.matches[0]['missing_field'] == 'b' + ## Missing field - wont work as we use terms aggregation + # rules['alert_on_missing_field'] = True + # with mock.patch('elastalert.ruletypes.kibana_adapter_client') as mock_es: + # mock_es.return_value = mock.Mock() + # mock_es.return_value.msearch.return_value = mock_res + # mock_es.return_value.info.return_value = version + # rule = NewTermsRule(rules) + # data = { + # ts_now() : { + # "a": ["key2"], + # "b": [] + # } + # } + # rule.add_new_term_data(data) + # #rule.add_data([{'@timestamp': ts_now(), 'a': 'key2'}]) + # assert len(rule.matches) == 1 + # assert rule.matches[0]['missing_field'] == 'b' def test_new_term_nested_field(): rules = {'fields': ['a', 'b.c'], 'timestamp_field': '@timestamp', - 'es_host': 'example.com', 'es_port': 10, 'index': 'logstash', + 'kibana_adapter_host': 'example.com', 'kibana_adapter_port': 10, 'index': 'logstash', 'ts_to_dt': ts_to_dt, 'dt_to_ts': dt_to_ts} - mock_res = {'aggregations': {'filtered': {'values': {'buckets': [{'key': 'key1', 'doc_count': 1}, - {'key': 'key2', 'doc_count': 5}]}}}} - with mock.patch('elastalert.ruletypes.elasticsearch_client') as mock_es: + mock_res ={'responses' : [{'aggregations': {'values': {'buckets': [{'key': 'key1', 'doc_count': 1}, + {'key': 'key2', 'doc_count': 5}]}}}] } + with mock.patch('elastalert.ruletypes.kibana_adapter_client') as mock_es: mock_es.return_value = mock.Mock() - mock_es.return_value.search.return_value = mock_res + mock_es.return_value.msearch.return_value = mock_res mock_es.return_value.info.return_value = {'version': {'number': '2.x.x'}} rule = NewTermsRule(rules) - assert rule.es.search.call_count == 60 + # Key3 causes an alert for nested field b.c - rule.add_data([{'@timestamp': ts_now(), 'b': {'c': 'key3'}}]) + data = { + ts_now() : { + "a": [], + "b.c": ["key3"] + } + } + rule.add_new_term_data(data) + + assert rule.es.msearch.call_count == 14 + + # rule.add_data([{'@timestamp': ts_now(), 'b': {'c': 'key3'}}]) assert len(rule.matches) == 1 - assert rule.matches[0]['new_field'] == 'b.c' - assert rule.matches[0]['b']['c'] == 'key3' + assert rule.matches[0]['field'] == 'b.c' + assert rule.matches[0]['new_value'] == 'key3' rule.matches = [] +def test_new_term_refresh_interval(): -def test_new_term_with_terms(): rules = {'fields': ['a'], 'timestamp_field': '@timestamp', - 'es_host': 'example.com', 'es_port': 10, 'index': 'logstash', 'query_key': 'a', - 'window_step_size': {'days': 2}, - 'ts_to_dt': ts_to_dt, 'dt_to_ts': dt_to_ts} - mock_res = {'aggregations': {'filtered': {'values': {'buckets': [{'key': 'key1', 'doc_count': 1}, - {'key': 'key2', 'doc_count': 5}]}}}} + 'kibana_adapter_host': 'example.com', 'kibana_adapter_port': 10, 'index': 'logstash', + 'ts_to_dt': ts_to_dt, 'dt_to_ts': dt_to_ts, 'terms_window_size': {'days': 1 } } + mock_res ={'responses' : [{'aggregations': {'values': {'buckets': [{'key': 'key1', 'doc_count': 1}, + {'key': 'key2', 'doc_count': 5}]}}}] } + + #random_test_data + data = { ts_now() : { "a": [] } } - with mock.patch('elastalert.ruletypes.elasticsearch_client') as mock_es: + with mock.patch('elastalert.ruletypes.kibana_adapter_client') as mock_es: mock_es.return_value = mock.Mock() - mock_es.return_value.search.return_value = mock_res + mock_es.return_value.msearch.return_value = mock_res mock_es.return_value.info.return_value = {'version': {'number': '2.x.x'}} + + + # Rule with refresh_interval not set, defaulting to 6 hours rule = NewTermsRule(rules) - # Only 15 queries because of custom step size - assert rule.es.search.call_count == 15 - # Key1 and key2 shouldn't cause a match - terms = {ts_now(): [{'key': 'key1', 'doc_count': 1}, - {'key': 'key2', 'doc_count': 1}]} - rule.add_terms_data(terms) - assert rule.matches == [] + # get_all_terms should not be called as last_updated will be less than now - 6 hours + rule.add_new_term_data(data) + assert rule.es.msearch.assert_called + mock_es.return_value.msearch.reset_mock() - # Key3 causes an alert for field a - terms = {ts_now(): [{'key': 'key3', 'doc_count': 1}]} - rule.add_terms_data(terms) - assert len(rule.matches) == 1 - assert rule.matches[0]['new_field'] == 'a' - assert rule.matches[0]['a'] == 'key3' - rule.matches = [] + # get_all_terms should be called when last_updated is none + rule.last_updated_at = None + rule.add_new_term_data(data) + assert rule.es.msearch.assert_called_once + mock_es.return_value.msearch.reset_mock() - # Key3 doesn't cause another alert - terms = {ts_now(): [{'key': 'key3', 'doc_count': 1}]} - rule.add_terms_data(terms) - assert rule.matches == [] + # get_all_terms should not be called as last_updated will not be less than now - 6 hours + rule.last_updated_at = ts_now() - datetime.timedelta(**{'hours': 4}) + rule.add_new_term_data(data) + assert not rule.es.msearch.called + + # get_all_terms should be called as last_updated will be less than now - 6 hours + rule.last_updated_at = ts_now() - datetime.timedelta(**{'hours': 7}) + rule.add_new_term_data(data) + assert rule.es.msearch.assert_called_once + mock_es.return_value.msearch.reset_mock() + + # Rule with refresh_interval set to 2 hours + rules["refresh_interval"] = {'hours': 2} + rule = NewTermsRule(rules) + mock_es.return_value.msearch.reset_mock() + + # get_all_terms should not be called as last_updated will not be less than now - 2 hours + rule.last_updated_at = ts_now() - datetime.timedelta(**{'hours': 1}) + rule.add_new_term_data(data) + assert not rule.es.msearch.called + + # get_all_terms should be called as last_updated will be less than now - 2 hours + rule.last_updated_at = ts_now() - datetime.timedelta(**{'hours': 3}) + rule.add_new_term_data(data) + assert rule.es.msearch.assert_called_once + + +## New implementation will never use with_terms +# def test_new_term_with_terms(): +# rules = {'fields': ['a'], +# 'timestamp_field': '@timestamp', +# 'kibana_adapter_host': 'example.com', 'kibana_adapter_port': 10, 'index': 'logstash', 'query_key': 'a', +# 'window_step_size': {'days': 2}, +# 'ts_to_dt': ts_to_dt, 'dt_to_ts': dt_to_ts} +# mock_res = {'responses' : [{'aggregations': {'values': {'buckets': [{'key': 'key1', 'doc_count': 1}, +# {'key': 'key2', 'doc_count': 5}]}}}]} + +# with mock.patch('elastalert.ruletypes.kibana_adapter_client') as mock_es: +# mock_es.return_value = mock.Mock() +# mock_es.return_value.msearch.return_value = mock_res +# mock_es.return_value.info.return_value = {'version': {'number': '2.x.x'}} +# rule = NewTermsRule(rules) + +# # Only 4 queries because of custom step size +# assert rule.es.msearch.call_count == 4 + +# # Key1 and key2 shouldn't cause a match +# terms = {ts_now(): [{'key': 'key1', 'doc_count': 1}, +# {'key': 'key2', 'doc_count': 1}]} +# rule.add_terms_data(terms) +# assert rule.matches == [] + +# # Key3 causes an alert for field a +# terms = {ts_now(): [{'key': 'key3', 'doc_count': 1}]} +# rule.add_terms_data(terms) +# assert len(rule.matches) == 1 +# assert rule.matches[0]['new_field'] == 'a' +# assert rule.matches[0]['a'] == 'key3' +# rule.matches = [] + +# # Key3 doesn't cause another alert +# terms = {ts_now(): [{'key': 'key3', 'doc_count': 1}]} +# rule.add_terms_data(terms) +# assert rule.matches == [] def test_new_term_with_composite_fields(): rules = {'fields': [['a', 'b', 'c'], ['d', 'e.f']], 'timestamp_field': '@timestamp', - 'es_host': 'example.com', 'es_port': 10, 'index': 'logstash', + 'kibana_adapter': 'example.com', 'kibana_adapter_port': 10, 'index': 'logstash', 'ts_to_dt': ts_to_dt, 'dt_to_ts': dt_to_ts} mock_res = { - 'aggregations': { - 'filtered': { + 'responses': [{ + 'aggregations': { 'values': { - 'buckets': [ - { - 'key': 'key1', - 'doc_count': 5, - 'values': { - 'buckets': [ - { - 'key': 'key2', - 'doc_count': 5, - 'values': { - 'buckets': [ - { - 'key': 'key3', - 'doc_count': 3, - }, - { - 'key': 'key4', - 'doc_count': 2, - }, - ] - } - } - ] - } + 'buckets': [{ + 'key': 'key1', + 'doc_count': 5, + 'values': { + 'buckets': [{ + 'key': 'key2', + 'doc_count': 5, + 'values': { + 'buckets': [{ + 'key': 'key3', + 'doc_count': 3, + }, + { + 'key': 'key4', + 'doc_count': 2, + }, + ] + } + }] } - ] + }] + } } - } + }] } - with mock.patch('elastalert.ruletypes.elasticsearch_client') as mock_es: + with mock.patch('elastalert.ruletypes.kibana_adapter_client') as mock_es: mock_es.return_value = mock.Mock() - mock_es.return_value.search.return_value = mock_res + mock_es.return_value.msearch.return_value = mock_res mock_es.return_value.info.return_value = {'version': {'number': '2.x.x'}} rule = NewTermsRule(rules) - assert rule.es.search.call_count == 60 # key3 already exists, and thus shouldn't cause a match - rule.add_data([{'@timestamp': ts_now(), 'a': 'key1', 'b': 'key2', 'c': 'key3'}]) + data = { + ts_now() : { + tuple(['a','b','c']): [tuple(["key1","key2","key3"])], + tuple(['d','e.f']): [] + } + } + rule.add_new_term_data(data) assert rule.matches == [] - # key5 causes an alert for composite field [a, b, c] - rule.add_data([{'@timestamp': ts_now(), 'a': 'key1', 'b': 'key2', 'c': 'key5'}]) - assert len(rule.matches) == 1 - assert rule.matches[0]['new_field'] == ('a', 'b', 'c') - assert rule.matches[0]['a'] == 'key1' - assert rule.matches[0]['b'] == 'key2' - assert rule.matches[0]['c'] == 'key5' - rule.matches = [] + assert rule.es.msearch.call_count == 14 - # New values in other fields that are not part of the composite key should not cause an alert - rule.add_data([{'@timestamp': ts_now(), 'a': 'key1', 'b': 'key2', 'c': 'key4', 'd': 'unrelated_value'}]) - assert len(rule.matches) == 0 - rule.matches = [] - # Verify nested fields work properly - # Key6 causes an alert for nested field e.f - rule.add_data([{'@timestamp': ts_now(), 'd': 'key4', 'e': {'f': 'key6'}}]) + # key5 causes an alert for composite field [a, b, c] + data = { + ts_now() : { + ('a', 'b', 'c'): [("key1","key2","key5")], + ('d','e.f'): [] + } + } + rule.add_new_term_data(data) assert len(rule.matches) == 1 - assert rule.matches[0]['new_field'] == ('d', 'e.f') - assert rule.matches[0]['d'] == 'key4' - assert rule.matches[0]['e']['f'] == 'key6' + assert rule.matches[0]['field'] == ('a', 'b', 'c') + assert rule.matches[0]['new_value'] == ("key1","key2","key5") rule.matches = [] - # Missing_fields - rules['alert_on_missing_field'] = True - with mock.patch('elastalert.ruletypes.elasticsearch_client') as mock_es: - mock_es.return_value = mock.Mock() - mock_es.return_value.search.return_value = mock_res - mock_es.return_value.info.return_value = {'version': {'number': '2.x.x'}} - rule = NewTermsRule(rules) - rule.add_data([{'@timestamp': ts_now(), 'a': 'key2'}]) - assert len(rule.matches) == 2 - # This means that any one of the three n composite fields were not present - assert rule.matches[0]['missing_field'] == ('a', 'b', 'c') - assert rule.matches[1]['missing_field'] == ('d', 'e.f') +def test_new_term_bounds(): + rules = {'fields': ['a'], + 'timestamp_field': '@timestamp', + 'kibana_adapter': 'example.com', 'kibana_adapter_port': 10, 'index': 'logstash', + 'ts_to_dt': ts_to_dt, 'dt_to_ts': dt_to_ts, 'terms_window_size': {'days': 10 }, + 'window_step_size' : {'hours': 1 },'refresh_interval' : {'hours': 2 }, 'terms_size': 10000 } + + rule = NewTermsRule(rules) + + assert rule.window_size == datetime.timedelta(**{'days': 7}) + assert rule.step == datetime.timedelta(**{'hours': 6}) + assert rule.refresh_interval == datetime.timedelta(**{'hours': 6}) + assert rule.terms_size == 1000 def test_flatline(): From e2a47fd5a206fd251f98accaafae84d0d5562db7 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Fri, 9 Jun 2023 15:25:34 +0530 Subject: [PATCH 1048/1065] Update CHANGELOG.md --- CHANGELOG.md | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index db528badd..326eb51dc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,16 @@ -# 2.9.1 FW updates +# fw_2.9.0.5 +- New Term Rule type modified to work with haystack usecase +- Updated terms aggregation query to get existing terms +- Updated Query used to get current terms to make use of terms aggregation instead of a search query +- Added ability to refresh existing terms in a configureable refresh interval +- Added Upper and Lower bounds for configureable query parameters like window_size, step_size and terms_size +- Modified Default values for new-term rule configurations + +# 2.9.0.1 FW updates +- Downgraded elasticsearch library from version 8 to version 6 for the engine support the existing es clusters of haystack. +- Downgraded various other libraries for the same. +- es scrolls disabled as per requirement +- replaced search queries with msearch for all es querying ## Breaking Changes - Downgraded elasticsearch library from version 8 to version 6 for the engine support the existing es clusters of haystack. From 6f3978de7665fee168f95cbb774a44e4c8b0fb29 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Wed, 14 Jun 2023 12:17:03 +0530 Subject: [PATCH 1049/1065] query_delay support in conf added --- elastalert/config.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/elastalert/config.py b/elastalert/config.py index 3d6b458ff..7e841799a 100644 --- a/elastalert/config.py +++ b/elastalert/config.py @@ -92,6 +92,8 @@ def load_conf(args, defaults=None, overrides=None): conf['old_query_limit'] = datetime.timedelta(**conf['old_query_limit']) else: conf['old_query_limit'] = datetime.timedelta(weeks=1) + if 'query_delay' in conf: + conf['query_delay'] = datetime.timedelta(**conf['query_delay']) except (KeyError, TypeError) as e: raise EAException('Invalid time format used: %s' % e) From fc3e0a7400d2e87a9f9f24b922870d030c938f3e Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Mon, 19 Jun 2023 15:54:52 +0530 Subject: [PATCH 1050/1065] new-term - filter query bug fix --- elastalert/ruletypes.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index af042d292..07eed070f 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -764,7 +764,8 @@ def get_new_term_query(self,starttime,endtime,field): filter_level = query['query']['bool']['filter']['bool']['must'] if 'filter' in self.rules: for item in self.rules['filter']: - filter_level.append(item) + if "query" in item: + filter_level.append(item['query']) # For composite keys, we will need to perform sub-aggregations if type(field) == list: From 85c2059f945570fe9d23ca2f5870596ec6cc7d74 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Mon, 19 Jun 2023 17:01:04 +0530 Subject: [PATCH 1051/1065] new-term - filter query bug fix --- elastalert/ruletypes.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 07eed070f..95f0cabfc 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -766,6 +766,8 @@ def get_new_term_query(self,starttime,endtime,field): for item in self.rules['filter']: if "query" in item: filter_level.append(item['query']) + else: + filter_level.append(item) # For composite keys, we will need to perform sub-aggregations if type(field) == list: From 3a7a86c3b63506ef5d27328953e54db03e1ec43c Mon Sep 17 00:00:00 2001 From: sivatarunp Date: Wed, 30 Aug 2023 17:48:25 +0530 Subject: [PATCH 1052/1065] [HAYS-4850]changing percentage match query (#27) * changing percentage match query * changing percentage match query in test cases * changing dependency * adding size param for percentagMatch rule to get num_hits --- elastalert/elastalert.py | 3 +++ elastalert/ruletypes.py | 6 +++++- requirements.txt | 2 +- tests/rules_test.py | 8 +++++++- 4 files changed, 16 insertions(+), 3 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index bee8537e1..985793d48 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -30,6 +30,7 @@ from elasticsearch.exceptions import NotFoundError from elasticsearch.exceptions import TransportError from elastalert.ruletypes import ErrorRateRule, NewTermsRule +from elastalert.ruletypes import PercentageMatchRule from elastalert.alerters.debug import DebugAlerter from elastalert.config import load_conf @@ -275,6 +276,8 @@ def get_terms_query(self, query, rule, size, field): def get_aggregation_query(self, query, rule, query_key, terms_size, timestamp_field='@timestamp'): """ Takes a query generated by get_query and outputs a aggregation query """ + if isinstance(rule['type'], PercentageMatchRule): + query['size'] = 10 query_element = query['query'] if 'sort' in query_element: query_element.pop('sort') diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 95f0cabfc..a0dd79b08 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -1400,12 +1400,16 @@ def generate_aggregation_query(self): return { 'percentage_match_aggs': { 'filters': { - 'other_bucket': True, 'filters': { 'match_bucket': { 'bool': { 'must': self.match_bucket_filter } + }, + '_other_': { + 'bool': { + 'must_not': self.match_bucket_filter + } } } } diff --git a/requirements.txt b/requirements.txt index ff8a2accd..570b9c945 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,7 +9,7 @@ envparse>=0.2.0 exotel==0.1.5 Jinja2==3.1.2 jira>=3.1.1 -jsonschema>=4.4.0 +jsonschema==4.17.3 mock>=2.0.0 prison>=0.2.1 prometheus_client>=0.13.1 diff --git a/tests/rules_test.py b/tests/rules_test.py index ee9299879..5dc575601 100644 --- a/tests/rules_test.py +++ b/tests/rules_test.py @@ -1621,7 +1621,6 @@ def test_percentage_match(): assert rule.rules['aggregation_query_element'] == { 'percentage_match_aggs': { 'filters': { - 'other_bucket': True, 'filters': { 'match_bucket': { 'bool': { @@ -1629,6 +1628,13 @@ def test_percentage_match(): 'term': 'term_val' } } + }, + '_other_': { + 'bool': { + 'must_not': { + 'term': 'term_val' + } + } } } } From 71f9916fed088a9735b54a3a7df91614d52ae3f6 Mon Sep 17 00:00:00 2001 From: sivatarunp Date: Thu, 31 Aug 2023 11:49:07 +0530 Subject: [PATCH 1053/1065] updating changelog --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 326eb51dc..9875f5b3f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,6 @@ +# fw_2.9.0.8 +- Percentage match rule type modified to work with haystack usecase [HAYS-4850] + # fw_2.9.0.5 - New Term Rule type modified to work with haystack usecase - Updated terms aggregation query to get existing terms From a8b62c7eb7d8ad601863e9c5dceef143a9bdb215 Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Mon, 4 Sep 2023 12:16:20 +0530 Subject: [PATCH 1054/1065] New term optimisation (#26) * New Term Optimisation - Threshold Feature enabled, removed use of refresh interval, Sliding Terms Window * Test case updates --------- Co-authored-by: RashmiRam --- .DS_Store | Bin 0 -> 6148 bytes elastalert/elastalert.py | 51 ++---- elastalert/ruletypes.py | 325 +++++++++++++++++++++++----------- examples/ex_flatline.yaml | 20 +++ tests/rules_test.py | 362 ++++++++++++++++++++++++++------------ 5 files changed, 503 insertions(+), 255 deletions(-) create mode 100644 .DS_Store create mode 100644 examples/ex_flatline.yaml diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..c5ba7493f389f5720cca0be609f65c3c2424c2a0 GIT binary patch literal 6148 zcmeHKIc~#13?v;Dg44KkxnJ-P7J~DFd?1ICN`L?>=~ek%d0K`C3>$(0HEDt&K+Z0g zvx24&MG?{NyFZBRM5Kiq%Gtuw?A&}|o6IN>jyt~ORR;Nx6IScYxLYs=FsEz&XFt8a zjpMNH8dgyODnJFO02QDDD;2Qb3u_+(8L0pjpaK^K?E6sQhBa{x^iKzZj{v|1X*aBW zmH-w@0BhnLhzv}F3Jj{|h@nA8zGPiZoCAX{n!|_YlQkz4^{3kdX>dfm;P0 zV!N~c{{+7?|KF0hqXJamUn!u?@%h-{m9n=kUe0=Lfxp15<_B(uwNnth9Rs}`V`J@j d??q8pY>o4pI0rf%d8Y&UGhn*VsK8$<@C!$z6`=qC literal 0 HcmV?d00001 diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 985793d48..d8c6354af 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -456,45 +456,26 @@ def get_hits(self, rule, starttime, endtime, index, scroll=False): return hits - def get_new_terms_data(self, rule, starttime, endtime, field): - new_terms = [] - - rule_inst = rule["type"] + + def get_terms_data(self,rule, starttime, endtime): + data = {} + rule_inst = rule['type'] try: - query = rule_inst.get_new_term_query(starttime,endtime,field) - request = get_msearch_query(query,rule) - res = self.thread_data.current_es.msearch(body=request) - res = res['responses'][0] - - if 'aggregations' in res: - buckets = res['aggregations']['values']['buckets'] + for field in rule['fields']: + terms, counts = rule_inst.get_terms_data(self.thread_data.current_es,starttime,endtime,field) + self.thread_data.num_hits += len(terms) + terms_counts_pair = ( terms, counts ) if type(field) == list: - for bucket in buckets: - new_terms += rule_inst.flatten_aggregation_hierarchy(bucket) - else: - new_terms = [bucket['key'] for bucket in buckets] - + data[tuple(field)] = terms_counts_pair + else: + data[field] = terms_counts_pair except ElasticsearchException as e: if len(str(e)) > 1024: e = str(e)[:1024] + '... (%d characters removed)' % (len(str(e)) - 1024) - self.handle_error('Error running new terms query: %s' % (e), {'rule': rule['name'], 'query': query}) - return [] - - return new_terms - + self.handle_error('Error running new terms query: %s' % (e), {'rule': rule['name'], 'query': rule_inst.get_new_term_query(starttime, endtime,field)}) + return {endtime: {}} - - - def get_new_terms(self,rule, starttime, endtime): - data = {} - - for field in rule['fields']: - new_terms = self.get_new_terms_data(rule,starttime,endtime,field) - self.thread_data.num_hits += len(new_terms) - if type(field) == list: - data[tuple(field)] = new_terms - else: - data[field] = new_terms + lt = rule.get('use_local_time') status_log = "Queried rule %s from %s to %s: %s / %s hits" % ( @@ -752,7 +733,7 @@ def run_query(self, rule, start=None, end=None, scroll=False): rule['scrolling_cycle'] = rule.get('scrolling_cycle', 0) + 1 index = self.get_index(rule, start, end) if isinstance(rule_inst, NewTermsRule): - data = self.get_new_terms(rule, start, end) + data = self.get_terms_data(rule, start, end) elif rule.get('use_count_query'): data = self.get_hits_count(rule, start, end, index) elif rule.get('use_terms_query'): @@ -773,7 +754,7 @@ def run_query(self, rule, start=None, end=None, scroll=False): return False elif data: if isinstance(rule_inst, NewTermsRule): - rule_inst.add_new_term_data(data) + rule_inst.add_terms_data(data) elif rule.get('use_count_query'): rule_inst.add_count_data(data) elif rule.get('use_terms_query'): diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index a0dd79b08..e34ce6704 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -3,6 +3,7 @@ import datetime import sys import time +import itertools from sortedcontainers import SortedKeyList as sortedlist @@ -410,6 +411,104 @@ def append_middle(self, event): self.running_count += event[1] self.data.rotate(-rotation) +class TermsWindow: + + """ For each field configured in new_term rule, This term window is created and maintained. + A sliding window is maintained and count of all the existing terms are stored. + + data - Sliding window which holds the queried terms and counts along with timestamp. This list is sorted in ascending order based on the timestamp + existing_terms - A set containing existing terms. mainly used for looking up new terms. + new_terms - Dictionary of EventWindows created for new terms. + count_dict - Dictionary containing the count of existing terms. When something is added to or popped from the sliding window - data, this count is updated + """ + def __init__(self, term_window_size, ts_field , threshold, threshold_window_size, get_ts): + self.term_window_size = term_window_size + self.ts_field = ts_field + self.threshold = threshold + self.threshold_window_size = threshold_window_size + self.get_ts = get_ts + + self.data = sortedlist(key= lambda x: x[0]) #sorted by timestamp + self.existing_terms = set() + self.potential_new_term_windows = {} + self.count_dict = {} + + """ used to add new terms and their counts for a timestamp into the sliding window - data """ + def add(self, timestamp, terms, counts): + for (term, count) in zip(terms, counts): + if term not in self.count_dict: + self.count_dict[term] = 0 + self.count_dict[term] += count + self.existing_terms.add(term) + self.data.add((timestamp, terms,counts)) + self.resize() + + """ function to split new terms and existing terms when given timestamp, terms and counts""" + def split(self,timestamp, terms, counts): + unseen_terms = [] + unseen_counts = [] + seen_terms = [] + seen_counts = [] + self.resize(till = timestamp - self.term_window_size) + for (term, count) in zip(terms, counts): + if term not in self.existing_terms: + unseen_terms.append(term) + unseen_counts.append(count) + else: + seen_terms.append(term) + seen_counts.append(count) + return seen_terms, seen_counts, unseen_terms, unseen_counts + + """ function to update the potential new terms windows""" + def update_potential_new_term_windows(self, timestamp, unseen_terms, unseen_counts): + for (term, count) in zip(unseen_terms, unseen_counts): + event = ({self.ts_field: timestamp}, count) + window = self.potential_new_term_windows.setdefault( term , EventWindow(self.threshold_window_size, getTimestamp=self.get_ts)) + window.append(event) + + + """function to get the matched new_terms that have crossed the threshold configured""" + def extract_new_terms(self, potential_new_terms, potential_term_counts): + new_terms = [] + new_counts = [] + for (potential_new_term, potential_term_count) in zip(potential_new_terms, potential_term_counts): + window = self.potential_new_term_windows.get(potential_new_term) + if window.count() >= self.threshold: + new_terms.append(potential_new_term) + new_counts.append(potential_term_count) + self.potential_new_term_windows.pop(potential_new_term) + return new_terms, new_counts + + def get_new_terms(self, timestamp, terms, counts): + existing_terms, existing_counts, potential_new_terms, potential_term_counts = self.split(timestamp, terms, counts) # Split the potential_new_terms and existing terms along with their counts based on current timestamp + self.update_potential_new_term_windows(timestamp, potential_new_terms, potential_term_counts) # Update the potential_new_term_windows + new_terms, new_counts = self.extract_new_terms( potential_new_terms, potential_term_counts) # extract and delete new terms from the potential_new_terms_window. + self.add(timestamp, existing_terms + new_terms, existing_counts + new_counts) # Add the exiting terms and new_terms to the terms_window + return new_terms, new_counts + + + """ This fn makes sure that the duration of the sliding window does not exceed term_window_size + all the events with their timestamp lesser than 'till' are popped and the counts of keys in popped events are subtracted from count_dict + After subtraction, if a term's count reaches 0, they are removed from count_dict and existing_terms, i.e they have not occured in terms_window duration + by default, till = (last event's timestamp - term_window_size ) , + """ + def resize(self, till=None): + if len(self.data)==0: + return + + if till == None: + till = self.data[-1][0] - self.term_window_size + + while len(self.data)!=0 and self.data[0][0] < till: + timestamp, keys, counts = self.data.pop(0) + for i in range(len(keys)): + self.count_dict[keys[i]] -= counts[i] + if self.count_dict[keys[i]] <= 0: + self.count_dict.pop(keys[i]) + self.existing_terms.discard(keys[i]) + + + class SpikeRule(RuleType): """ A rule that uses two sliding windows to compare relative event frequency. """ @@ -676,22 +775,26 @@ class NewTermsRule(RuleType): def __init__(self, rule, args=None): super(NewTermsRule, self).__init__(rule, args) - self.seen_values = {} + self.term_windows = {} self.last_updated_at = None self.es = kibana_adapter_client(self.rules) + self.ts_field = self.rules.get('timestamp_field', '@timestamp') + self.get_ts = new_get_event_ts(self.ts_field) + self.new_terms = {} + + self.threshold = rule.get('threshold',0) # terms_window_size : Default & Upperbound - 7 Days self.window_size = min(datetime.timedelta(**self.rules.get('terms_window_size', {'days': 7})), datetime.timedelta(**{'days': 7})) - # window_step_size : Default - 1 Days, Lowerbound: 6 hours - self.step = max( datetime.timedelta(**self.rules.get('window_step_size', {'days': 1})), datetime.timedelta(**{'hours': 6}) ) + self.step = datetime.timedelta(**{'hours': 1}) - # refresh_interval : Default - 6 hours, Lowerbound: 6 hours - self.refresh_interval = max( datetime.timedelta(**self.rules.get('refresh_interval', {'hours': 6})), datetime.timedelta(**{'hours': 6}) ) - - # refresh_interval : Default - 500, Upperbound: 1000 + # terms_size : Default - 500, Upperbound: 1000 self.terms_size = min(self.rules.get('terms_size', 500),1000) + # threshold_window_size + self.threshold_window_size = min( datetime.timedelta(**self.rules.get('threshold_window_size', {'hours': 1})), datetime.timedelta(**{'days': 2}) ) + # Allow the use of query_key or fields if 'fields' not in self.rules: if 'query_key' not in self.rules: @@ -713,17 +816,12 @@ def __init__(self, rule, args=None): if self.rules.get('use_keyword_postfix', False): # making it false by default as we wont use the keyword suffix elastalert_logger.warn('Warning: If query_key is a non-keyword field, you must set ' 'use_keyword_postfix to false, or add .keyword/.raw to your query_key.') - - def should_refresh_terms(self): - return self.last_updated_at is None or self.last_updated_at < ( ts_now() - self.refresh_interval) - - def update_terms(self,args=None): try: self.get_all_terms(args=args) except Exception as e: # Refuse to start if we cannot get existing terms raise EAException('Error searching for existing terms: %s' % (repr(e))).with_traceback(sys.exc_info()[2]) - + def get_new_term_query(self,starttime,endtime,field): @@ -771,7 +869,7 @@ def get_new_term_query(self,starttime,endtime,field): # For composite keys, we will need to perform sub-aggregations if type(field) == list: - self.seen_values.setdefault(tuple(field), []) + self.term_windows.setdefault(tuple(field), TermsWindow(self.window_size, self.ts_field , self.threshold, self.threshold_window_size, self.get_ts)) level = query['aggs'] # Iterate on each part of the composite key and add a sub aggs clause to the elastic search query for i, sub_field in enumerate(field): @@ -784,7 +882,7 @@ def get_new_term_query(self,starttime,endtime,field): level['values']['aggs'] = {'values': {'terms': copy.deepcopy(field_name)}} level = level['values']['aggs'] else: - self.seen_values.setdefault(field, []) + self.term_windows.setdefault(field, TermsWindow(self.window_size, self.ts_field , self.threshold, self.threshold_window_size, self.get_ts)) # For non-composite keys, only a single agg is needed if self.rules.get('use_keyword_postfix', False):# making it false by default as we wont use the keyword suffix field_name['field'] = add_raw_postfix(field, True) @@ -793,6 +891,32 @@ def get_new_term_query(self,starttime,endtime,field): return query + def get_terms_data(self, es, starttime, endtime, field, request_timeout= None): + terms = [] + counts = [] + query = self.get_new_term_query(starttime,endtime,field) + request = get_msearch_query(query,self.rules) + + if request_timeout == None: + res = es.msearch(body=request) + else: + res = es.msearch(body=request, request_timeout=request_timeout) + res = res['responses'][0] + + if 'aggregations' in res: + buckets = res['aggregations']['values']['buckets'] + if type(field) == list: + for bucket in buckets: + keys, doc_counts = self.flatten_aggregation_hierarchy(bucket) + terms += keys + counts += doc_counts + else: + for bucket in buckets: + terms.append(bucket['key']) + counts.append(bucket['doc_count']) + + return terms, counts + @@ -810,43 +934,18 @@ def get_all_terms(self,args): for field in self.fields: tmp_start = start - tmp_end = min(start + self.step, end) - query = self.get_new_term_query(tmp_start,tmp_end,field) - + # Query the entire time range in small chunks while tmp_start < end: - - msearch_query = get_msearch_query(query,self.rules) - - res = self.es.msearch(msearch_query,request_timeout=50) - res = res['responses'][0] - - if 'aggregations' in res: - buckets = res['aggregations']['values']['buckets'] - if type(field) == list: - # For composite keys, make the lookup based on all fields - # Make it a tuple since it can be hashed and used in dictionary lookups - for bucket in buckets: - # We need to walk down the hierarchy and obtain the value at each level - self.seen_values[tuple(field)] += self.flatten_aggregation_hierarchy(bucket) - else: - keys = [bucket['key'] for bucket in buckets] - self.seen_values[field] += keys - else: - if type(field) == list: - self.seen_values.setdefault(tuple(field), []) - else: - self.seen_values.setdefault(field, []) - if tmp_start == tmp_end: - break - tmp_start = tmp_end tmp_end = min(tmp_start + self.step, end) - query = self.get_new_term_query(tmp_start,tmp_end,field) + terms, counts = self.get_terms_data(self.es, tmp_start, tmp_end, field, request_timeout=50) + self.term_windows[self.get_lookup_key(field)].add(tmp_end,terms,counts) + tmp_start = tmp_end - for key, values in self.seen_values.items(): - if not values: - if type(key) == tuple: + for lookup_key, window in self.term_windows.items(): + if not window.existing_terms: + if type(lookup_key) == tuple: # If we don't have any results, it could either be because of the absence of any baseline data # OR it may be because the composite key contained a non-primitive type. Either way, give the # end-users a heads up to help them debug what might be going on. @@ -857,9 +956,8 @@ def get_all_terms(self,args): else: elastalert_logger.info('Found no values for %s' % (field)) continue - self.seen_values[key] = list(set(values)) - elastalert_logger.info('Found %s unique values for %s' % (len(set(values)), key)) - self.last_updated_at = ts_now() + elastalert_logger.info('Found %s unique values for %s' % (len(window.existing_terms), lookup_key)) + # self.last_updated_at = ts_now() def flatten_aggregation_hierarchy(self, root, hierarchy_tuple=()): """ For nested aggregations, the results come back in the following format: @@ -950,75 +1048,88 @@ def flatten_aggregation_hierarchy(self, root, hierarchy_tuple=()): A similar formatting will be performed in the add_data method and used as the basis for comparison """ - results = [] + final_keys = [] + final_counts = [] # There are more aggregation hierarchies left. Traverse them. if 'values' in root: - results += self.flatten_aggregation_hierarchy(root['values']['buckets'], hierarchy_tuple + (root['key'],)) + keys, counts = self.flatten_aggregation_hierarchy(root['values']['buckets'], hierarchy_tuple + (root['key'],)) + final_keys += keys + final_counts += counts else: # We've gotten to a sub-aggregation, which may have further sub-aggregations # See if we need to traverse further for node in root: if 'values' in node: - results += self.flatten_aggregation_hierarchy(node, hierarchy_tuple) + keys, counts = self.flatten_aggregation_hierarchy(node, hierarchy_tuple) + final_keys += keys + final_counts += counts else: - results.append(hierarchy_tuple + (node['key'],)) - return results + final_keys.append(hierarchy_tuple + (node['key'],)) + final_counts.append(node['doc_count']) + return final_keys, final_counts - def add_new_term_data(self, payload): - if self.should_refresh_terms(): - self.update_terms() + def add_terms_data(self, payload): timestamp = list(payload.keys())[0] data = payload[timestamp] for field in self.fields: - lookup_key =tuple(field) if type(field) == list else field - for value in data[lookup_key]: - if value not in self.seen_values[lookup_key]: - match = { - "field": lookup_key, - self.rules['timestamp_field']: timestamp, - 'new_value': tuple(value) if type(field) == list else value - } - self.add_match(copy.deepcopy(match)) - self.seen_values[lookup_key].append(value) - - def add_data(self, data): - for document in data: - for field in self.fields: - value = () - lookup_field = field - if type(field) == list: - # For composite keys, make the lookup based on all fields - # Make it a tuple since it can be hashed and used in dictionary lookups - lookup_field = tuple(field) - for sub_field in field: - lookup_result = lookup_es_key(document, sub_field) - if not lookup_result: - value = None - break - value += (lookup_result,) - else: - value = lookup_es_key(document, field) - if not value and self.rules.get('alert_on_missing_field'): - document['missing_field'] = lookup_field - self.add_match(copy.deepcopy(document)) - elif value: - if value not in self.seen_values[lookup_field]: - document['new_field'] = lookup_field - self.add_match(copy.deepcopy(document)) - self.seen_values[lookup_field].append(value) - - def add_terms_data(self, terms): - # With terms query, len(self.fields) is always 1 and the 0'th entry is always a string - field = self.fields[0] - for timestamp, buckets in terms.items(): - for bucket in buckets: - if bucket['doc_count']: - if bucket['key'] not in self.seen_values[field]: - match = {field: bucket['key'], - self.rules['timestamp_field']: timestamp, - 'new_field': field} - self.add_match(match) - self.seen_values[field].append(bucket['key']) + lookup_key = self.get_lookup_key(field) + keys, counts = data[lookup_key] + + new_terms, new_counts = self.term_windows[lookup_key].get_new_terms(timestamp, keys, counts ) + + # append and get all match keys and counts + for (new_term, new_count) in zip(new_terms, new_counts): + match = { + "field": lookup_key, + self.rules['timestamp_field']: timestamp, + "new_value": tuple(new_term) if type(new_term) == list else new_term, + "hits" : new_count + } + self.add_match(copy.deepcopy(match)) + + ### NOT USED ANYMORE ### + # def add_data(self, data): + # for document in data: + # for field in self.fields: + # value = () + # lookup_field = field + # if type(field) == list: + # # For composite keys, make the lookup based on all fields + # # Make it a tuple since it can be hashed and used in dictionary lookups + # lookup_field = tuple(field) + # for sub_field in field: + # lookup_result = lookup_es_key(document, sub_field) + # if not lookup_result: + # value = None + # break + # value += (lookup_result,) + # else: + # value = lookup_es_key(document, field) + # if not value and self.rules.get('alert_on_missing_field'): + # document['missing_field'] = lookup_field + # self.add_match(copy.deepcopy(document)) + # elif value: + # if value not in self.seen_values[lookup_field]: + # document['new_field'] = lookup_field + # self.add_match(copy.deepcopy(document)) + # self.seen_values[lookup_field].append(value) + + ### NOT USED ANYMORE ### + # def add_terms_data(self, terms): + # # With terms query, len(self.fields) is always 1 and the 0'th entry is always a string + # field = self.fields[0] + # for timestamp, buckets in terms.items(): + # for bucket in buckets: + # if bucket['doc_count']: + # if bucket['key'] not in self.seen_values[field]: + # match = {field: bucket['key'], + # self.rules['timestamp_field']: timestamp, + # 'new_field': field} + # self.add_match(match) + # self.seen_values[field].append(bucket['key']) + + def get_lookup_key(self,field): + return tuple(field) if type(field) == list else field class CardinalityRule(RuleType): diff --git a/examples/ex_flatline.yaml b/examples/ex_flatline.yaml new file mode 100644 index 000000000..70cd7033e --- /dev/null +++ b/examples/ex_flatline.yaml @@ -0,0 +1,20 @@ +name: freshemail debug rule +type: flatline +index: traces* +threshold: 3 +# use_count_query: true +timestamp_field: timestamp +timeframe: + minutes: 1 +filter: +- query: + query_string: + query: "*" +alert: +- "debug" +scan_entire_timeframe: true + +realert: + minutes: 0 +query_delay: + minutes: 3 \ No newline at end of file diff --git a/tests/rules_test.py b/tests/rules_test.py index 5dc575601..1c3e5aaa7 100644 --- a/tests/rules_test.py +++ b/tests/rules_test.py @@ -620,14 +620,12 @@ def record_args(*args, **kwargs): # Key1 and key2 shouldn't cause a match data = { ts_now() : { - "a": ["key1"], - "b": ["key2"] + "a": (["key1"],[1]), + "b": (["key2"], [1]) } } - rule.add_new_term_data(data) + rule.add_terms_data(data) - # 30 day default range, 1 day default step, times 2 fields - assert rule.es.msearch.call_count == 14 # rule.add_data([{'@timestamp': ts_now(), 'a': 'key1', 'b': 'key2'}]) @@ -636,22 +634,22 @@ def record_args(*args, **kwargs): # Neither will missing values data = { ts_now() : { - "a": ["key2"], - "b": [] + "a": (["key2"],[1]), + "b": ([],[]) } } - rule.add_new_term_data(data) + rule.add_terms_data(data) # rule.add_data([{'@timestamp': ts_now(), 'a': 'key2'}]) assert rule.matches == [] # Key3 causes an alert for field b data = { ts_now() : { - "a": ["key2"], - "b": ["key3"] + "a": (["key2"],[1]), + "b": (["key3"],[1]) } } - rule.add_new_term_data(data) + rule.add_terms_data(data) #rule.add_data([{'@timestamp': ts_now(), 'a': 'key2', 'b': 'key3'}]) assert len(rule.matches) == 1 @@ -662,11 +660,11 @@ def record_args(*args, **kwargs): # Key3 doesn't cause another alert for field b data = { ts_now() : { - "a": ["key2"], - "b": ["key3"] + "a": (["key2"],[1]), + "b": (["key3"],[1]) } } - rule.add_new_term_data(data) + rule.add_terms_data(data) # rule.add_data([{'@timestamp': ts_now(), 'a': 'key2', 'b': 'key3'}]) assert rule.matches == [] @@ -683,7 +681,7 @@ def record_args(*args, **kwargs): # "b": [] # } # } - # rule.add_new_term_data(data) + # rule.add_terms_data(data) # #rule.add_data([{'@timestamp': ts_now(), 'a': 'key2'}]) # assert len(rule.matches) == 1 # assert rule.matches[0]['missing_field'] == 'b' @@ -708,13 +706,11 @@ def test_new_term_nested_field(): # Key3 causes an alert for nested field b.c data = { ts_now() : { - "a": [], - "b.c": ["key3"] + "a": ([],[]), + "b.c": (["key3"],[1]) } } - rule.add_new_term_data(data) - - assert rule.es.msearch.call_count == 14 + rule.add_terms_data(data) # rule.add_data([{'@timestamp': ts_now(), 'b': {'c': 'key3'}}]) assert len(rule.matches) == 1 @@ -722,104 +718,65 @@ def test_new_term_nested_field(): assert rule.matches[0]['new_value'] == 'key3' rule.matches = [] -def test_new_term_refresh_interval(): +def test_new_term_window_updates(): rules = {'fields': ['a'], 'timestamp_field': '@timestamp', 'kibana_adapter_host': 'example.com', 'kibana_adapter_port': 10, 'index': 'logstash', - 'ts_to_dt': ts_to_dt, 'dt_to_ts': dt_to_ts, 'terms_window_size': {'days': 1 } } - mock_res ={'responses' : [{'aggregations': {'values': {'buckets': [{'key': 'key1', 'doc_count': 1}, + 'ts_to_dt': ts_to_dt, 'dt_to_ts': dt_to_ts, 'terms_window_size': {'hours': 3 }, 'threshold': 20, 'threshold_window_size': {'hours': 1} } + mock_res ={'responses' : [{'aggregations': {'values': {'buckets': [{'key': 'key1', 'doc_count': 5}, {'key': 'key2', 'doc_count': 5}]}}}] } - #random_test_data - data = { ts_now() : { "a": [] } } + #empty_test_data + time_pointer = ts_now() + with mock.patch('elastalert.ruletypes.kibana_adapter_client') as mock_es: mock_es.return_value = mock.Mock() mock_es.return_value.msearch.return_value = mock_res mock_es.return_value.info.return_value = {'version': {'number': '2.x.x'}} - - - # Rule with refresh_interval not set, defaulting to 6 hours - rule = NewTermsRule(rules) - - - # get_all_terms should not be called as last_updated will be less than now - 6 hours - rule.add_new_term_data(data) - assert rule.es.msearch.assert_called - mock_es.return_value.msearch.reset_mock() - - # get_all_terms should be called when last_updated is none - rule.last_updated_at = None - rule.add_new_term_data(data) - assert rule.es.msearch.assert_called_once - mock_es.return_value.msearch.reset_mock() - - # get_all_terms should not be called as last_updated will not be less than now - 6 hours - rule.last_updated_at = ts_now() - datetime.timedelta(**{'hours': 4}) - rule.add_new_term_data(data) - assert not rule.es.msearch.called - - # get_all_terms should be called as last_updated will be less than now - 6 hours - rule.last_updated_at = ts_now() - datetime.timedelta(**{'hours': 7}) - rule.add_new_term_data(data) - assert rule.es.msearch.assert_called_once - mock_es.return_value.msearch.reset_mock() - - # Rule with refresh_interval set to 2 hours - rules["refresh_interval"] = {'hours': 2} rule = NewTermsRule(rules) - mock_es.return_value.msearch.reset_mock() - - # get_all_terms should not be called as last_updated will not be less than now - 2 hours - rule.last_updated_at = ts_now() - datetime.timedelta(**{'hours': 1}) - rule.add_new_term_data(data) - assert not rule.es.msearch.called - - # get_all_terms should be called as last_updated will be less than now - 2 hours - rule.last_updated_at = ts_now() - datetime.timedelta(**{'hours': 3}) - rule.add_new_term_data(data) - assert rule.es.msearch.assert_called_once - - -## New implementation will never use with_terms -# def test_new_term_with_terms(): -# rules = {'fields': ['a'], -# 'timestamp_field': '@timestamp', -# 'kibana_adapter_host': 'example.com', 'kibana_adapter_port': 10, 'index': 'logstash', 'query_key': 'a', -# 'window_step_size': {'days': 2}, -# 'ts_to_dt': ts_to_dt, 'dt_to_ts': dt_to_ts} -# mock_res = {'responses' : [{'aggregations': {'values': {'buckets': [{'key': 'key1', 'doc_count': 1}, -# {'key': 'key2', 'doc_count': 5}]}}}]} + + # key 2 keeps occuring every 1 hour + for i in range(4): + time_pointer += datetime.timedelta(hours=1) + data = { time_pointer : { "a": (['key2'],[5]) } } + rule.add_terms_data(data) + + # 4 hours later, if key1 comes again, match should come + data = { time_pointer : { "a": (['key1'],[20]) } } + rule.add_terms_data(data) + assert len(rule.matches) == 1 -# with mock.patch('elastalert.ruletypes.kibana_adapter_client') as mock_es: -# mock_es.return_value = mock.Mock() -# mock_es.return_value.msearch.return_value = mock_res -# mock_es.return_value.info.return_value = {'version': {'number': '2.x.x'}} -# rule = NewTermsRule(rules) + # if key1 comes again in the next 2 hour 59 minutes, match woundnt come, as it is now in existing terms + time_pointer += datetime.timedelta(hours=2, minutes=59) + data = { time_pointer : { "a": (['key1'],[20]) } } + rule.add_terms_data(data) + assert len(rule.matches) == 1 -# # Only 4 queries because of custom step size -# assert rule.es.msearch.call_count == 4 + # 3 hours later, if same key comes. it will be considered new term, but since threshold isnt reached no matches + time_pointer += datetime.timedelta(hours=3, minutes=1) + data = { time_pointer : { "a": (['key1'],[1]) } } + rule.add_terms_data(data) + assert len(rule.matches) == 1 -# # Key1 and key2 shouldn't cause a match -# terms = {ts_now(): [{'key': 'key1', 'doc_count': 1}, -# {'key': 'key2', 'doc_count': 1}]} -# rule.add_terms_data(terms) -# assert rule.matches == [] -# # Key3 causes an alert for field a -# terms = {ts_now(): [{'key': 'key3', 'doc_count': 1}]} -# rule.add_terms_data(terms) -# assert len(rule.matches) == 1 -# assert rule.matches[0]['new_field'] == 'a' -# assert rule.matches[0]['a'] == 'key3' -# rule.matches = [] + #in next 30 mins, threshold is reached and match is found + time_pointer += datetime.timedelta(minutes= 30) + data = { time_pointer : { "a": (['key1'],[19]) } } + rule.add_terms_data(data) + assert len(rule.matches) == 2 -# # Key3 doesn't cause another alert -# terms = {ts_now(): [{'key': 'key3', 'doc_count': 1}]} -# rule.add_terms_data(terms) -# assert rule.matches == [] + #another new term causing match + time_pointer += datetime.timedelta(minutes= 30) + data = { time_pointer : { "a": (['key2'],[21]) } } + rule.add_terms_data(data) + assert len(rule.matches) == 3 + time_pointer += datetime.timedelta(minutes= 40) + data = { time_pointer : { "a": (['key2'],[21]) } } + rule.add_terms_data(data) + assert len(rule.matches) == 3 def test_new_term_with_composite_fields(): rules = {'fields': [['a', 'b', 'c'], ['d', 'e.f']], @@ -864,48 +821,227 @@ def test_new_term_with_composite_fields(): mock_es.return_value.info.return_value = {'version': {'number': '2.x.x'}} rule = NewTermsRule(rules) - # key3 already exists, and thus shouldn't cause a match data = { ts_now() : { - tuple(['a','b','c']): [tuple(["key1","key2","key3"])], - tuple(['d','e.f']): [] + tuple(['a','b','c']): ([tuple(["key1","key2","key3"])],[1]), + tuple(['d','e.f']): ([],[]) } } - rule.add_new_term_data(data) + rule.add_terms_data(data) assert rule.matches == [] - assert rule.es.msearch.call_count == 14 - # key5 causes an alert for composite field [a, b, c] data = { ts_now() : { - ('a', 'b', 'c'): [("key1","key2","key5")], - ('d','e.f'): [] + ('a', 'b', 'c'): ([("key1","key2","key5")],[1]), + ('d','e.f'): ([],[]) + } + } + rule.add_terms_data(data) + assert len(rule.matches) == 1 + assert rule.matches[0]['field'] == ('a', 'b', 'c') + assert rule.matches[0]['new_value'] == ("key1","key2","key5") + rule.matches = [] + + # testing same with Threshold Window and Threshold + + rules['threshold'] = 10 + rules['threshold_window_size'] = {'hours': 6} + + + with mock.patch('elastalert.ruletypes.kibana_adapter_client') as mock_es: + mock_es.return_value = mock.Mock() + mock_es.return_value.msearch.return_value = mock_res + mock_es.return_value.info.return_value = {'version': {'number': '2.x.x'}} + rule = NewTermsRule(rules) + + time_pointer = ts_now() + + # will not cause match + data = { + time_pointer : { + ('a', 'b', 'c'): ([("key1","key2","key4")],[1]), + ('d','e.f'): ([],[]) + } + } + rule.add_terms_data(data) + assert len(rule.matches) == 0 + rule.matches = [] + + # will not cause match, as threshold wont be reached + time_pointer += datetime.timedelta(hours = 1) + data = { + time_pointer : { + ('a', 'b', 'c'): ([("key1","key2","key5")],[9]), + ('d','e.f'): ([],[]) } } - rule.add_new_term_data(data) + rule.add_terms_data(data) + assert len(rule.matches) == 0 + + + # will cause match, as threshold will be reached + data = { + time_pointer : { + ('a', 'b', 'c'): ([("key1","key2","key5")],[1]), + ('d','e.f'): ([],[]) + } + } + rule.add_terms_data(data) assert len(rule.matches) == 1 assert rule.matches[0]['field'] == ('a', 'b', 'c') assert rule.matches[0]['new_value'] == ("key1","key2","key5") rule.matches = [] + #test composite flatten buckets + keys,counts = rule.flatten_aggregation_hierarchy(mock_res['responses'][0]['aggregations']['values']['buckets']) + assert keys == [('key1', 'key2', 'key3'), ('key1', 'key2', 'key4')] + assert counts == [3, 2] + +def test_new_term_threshold(): + rules = {'fields': ['a'], + 'timestamp_field': '@timestamp', + 'kibana_adapter': 'example.com', 'kibana_adapter_port': 10, 'index': 'logstash', + 'ts_to_dt': ts_to_dt, 'dt_to_ts': dt_to_ts, 'terms_window_size': {'days': 10 }, + 'window_step_size' : {'hours': 1 }, 'terms_size': 10000, 'threshold': 0 } + + mock_res ={'responses' : [{'aggregations': {'values': {'buckets': [{'key': 'key1', 'doc_count': 1}]}}}] } + + with mock.patch('elastalert.ruletypes.kibana_adapter_client') as mock_es: + mock_es.return_value = mock.Mock() + mock_es.return_value.msearch.return_value = mock_res + rule = NewTermsRule(rules) + + + # introducting new value for field a, should trigger as threshold is 0 + data = { + ts_now() : { + ('a'): (["key2"],[1]) + } + } + rule.add_terms_data(data) + assert len(rule.matches) == 1 + + # changing threshold to 10 and threhold_duration to 2 hours + rules['threshold'] = 10 + rules['threshold_window_size'] = {"hours" : 2} + + # used for incrementing time + time_pointer = ts_now() + + with mock.patch('elastalert.ruletypes.kibana_adapter_client') as mock_es: + mock_es.return_value = mock.Mock() + mock_es.return_value.msearch.return_value = mock_res + rule = NewTermsRule(rules) + + # new value for field 'a' with count 8, shouldnt create a match + data = { + time_pointer : { + ('a'): (["key2"],[8]) + } + } + rule.add_terms_data(data) + assert len(rule.matches) == 0 + + + # new value for field 'a' with count 8 after 3 hours, shouldnt create a match + + time_pointer += datetime.timedelta(**{"hours":3}) + + data = { + time_pointer : { + ('a'): (["key2"],[8]) + } + } + rule.add_terms_data(data) + assert len(rule.matches) == 0 + + # new value for field a with count 2 after 10 minutes + # should create a match as the total count stored for the last 2 hours would be 10 + time_pointer += datetime.timedelta(**{"minutes":10}) + + data = { + time_pointer : { + ('a'): (["key1","key2"],[1,2]) + } + } + rule.add_terms_data(data) + assert len(rule.matches) == 1 + + # no new matches should be added, when the rule crosses the threshold the second time + + time_pointer += datetime.timedelta(**{"minutes":10}) + + data = { + time_pointer : { + ('a'): (["key2"],[20]) + } + } + rule.add_terms_data(data) + assert len(rule.matches) == 1 + def test_new_term_bounds(): rules = {'fields': ['a'], 'timestamp_field': '@timestamp', 'kibana_adapter': 'example.com', 'kibana_adapter_port': 10, 'index': 'logstash', 'ts_to_dt': ts_to_dt, 'dt_to_ts': dt_to_ts, 'terms_window_size': {'days': 10 }, - 'window_step_size' : {'hours': 1 },'refresh_interval' : {'hours': 2 }, 'terms_size': 10000 } + 'window_step_size' : {'hours': 1 }, 'terms_size': 10000, 'threshold_window_size': {"days": 3} } - rule = NewTermsRule(rules) + mock_res ={'responses' : [{'aggregations': {'values': {'buckets': [{'key': 'key1', 'doc_count': 1}, + {'key': 'key2', 'doc_count': 5}]}}}] } + + with mock.patch('elastalert.ruletypes.kibana_adapter_client') as mock_es: + mock_es.return_value = mock.Mock() + mock_es.return_value.msearch.return_value = mock_res + rule = NewTermsRule(rules) assert rule.window_size == datetime.timedelta(**{'days': 7}) - assert rule.step == datetime.timedelta(**{'hours': 6}) - assert rule.refresh_interval == datetime.timedelta(**{'hours': 6}) + assert rule.threshold_window_size == datetime.timedelta(**{'days': 2}) assert rule.terms_size == 1000 +## New implementation will never use with_terms +# def test_new_term_with_terms(): +# rules = {'fields': ['a'], +# 'timestamp_field': '@timestamp', +# 'kibana_adapter_host': 'example.com', 'kibana_adapter_port': 10, 'index': 'logstash', 'query_key': 'a', +# 'window_step_size': {'days': 2}, +# 'ts_to_dt': ts_to_dt, 'dt_to_ts': dt_to_ts} +# mock_res = {'responses' : [{'aggregations': {'values': {'buckets': [{'key': 'key1', 'doc_count': 1}, +# {'key': 'key2', 'doc_count': 5}]}}}]} + +# with mock.patch('elastalert.ruletypes.kibana_adapter_client') as mock_es: +# mock_es.return_value = mock.Mock() +# mock_es.return_value.msearch.return_value = mock_res +# mock_es.return_value.info.return_value = {'version': {'number': '2.x.x'}} +# rule = NewTermsRule(rules) + +# # Only 4 queries because of custom step size +# assert rule.es.msearch.call_count == 4 + +# # Key1 and key2 shouldn't cause a match +# terms = {ts_now(): [{'key': 'key1', 'doc_count': 1}, +# {'key': 'key2', 'doc_count': 1}]} +# rule.add_terms_data(terms) +# assert rule.matches == [] + +# # Key3 causes an alert for field a +# terms = {ts_now(): [{'key': 'key3', 'doc_count': 1}]} +# rule.add_terms_data(terms) +# assert len(rule.matches) == 1 +# assert rule.matches[0]['new_field'] == 'a' +# assert rule.matches[0]['a'] == 'key3' +# rule.matches = [] + +# # Key3 doesn't cause another alert +# terms = {ts_now(): [{'key': 'key3', 'doc_count': 1}]} +# rule.add_terms_data(terms) +# assert rule.matches == [] + + + def test_flatline(): events = hits(40) rules = { From 80730bd2fbcd9b682a334baa4356f667ab88a2b7 Mon Sep 17 00:00:00 2001 From: aravind-musigumpula Date: Thu, 21 Sep 2023 21:09:54 +0530 Subject: [PATCH 1055/1065] add if result is empty dict --- elastalert/elastalert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index d8c6354af..36e7329f8 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -1978,7 +1978,7 @@ def get_top_counts(self, rule, starttime, endtime, keys, number=None, qk=None): index = self.get_index(rule, starttime, endtime) hits_terms = self.get_hits_terms(rule, starttime, endtime, index, key, qk, number) - if hits_terms is None: + if hits_terms is None or not hits_terms: top_events_count = {} else: buckets = list(hits_terms.values())[0] From be8094bdebf1abcc004a423fbe74d625279f069e Mon Sep 17 00:00:00 2001 From: aravind-musigumpula Date: Mon, 25 Sep 2023 17:47:37 +0530 Subject: [PATCH 1056/1065] include using use_count_query --- elastalert/elastalert.py | 17 ++++++++++------ elastalert/ruletypes.py | 43 +++++++++++++++++++++++++++++----------- elastalert/util.py | 2 +- tests/rules_test.py | 38 +++++++++++++++++------------------ 4 files changed, 62 insertions(+), 38 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index d8c6354af..5d968fdb3 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -453,7 +453,6 @@ def get_hits(self, rule, starttime, endtime, index, scroll=False): elastalert_logger.info(status_log) hits = self.process_hits(rule, hits) - return hits @@ -504,8 +503,9 @@ def get_hits_count(self, rule, starttime, endtime, index): starttime, endtime, timestamp_field=rule['timestamp_field'], - sort=False, + sort=True, to_ts_func=rule['dt_to_ts'], + desc=True, ) request = get_msearch_query(query,rule) @@ -528,7 +528,14 @@ def get_hits_count(self, rule, starttime, endtime, index): "Queried rule %s from %s to %s: %s hits" % (rule['name'], pretty_ts(starttime, lt, self.pretty_ts_format), pretty_ts(endtime, lt, self.pretty_ts_format), res['hits']['total']) ) - return {endtime: res['hits']['total']} + + if len(res['hits']['hits']) > 0 : + event = self.process_hits(rule, res['hits']['hits']) + else: + event= self.process_hits(rule,[{'_source': {'@timestamp': endtime}}]) + + return {"endtime":endtime,"count": res['hits']['total'],"event": event} + #return {endtime: res['hits']['total']} def get_hits_terms(self, rule, starttime, endtime, index, key, qk=None, size=None): rule_filter = copy.copy(rule['filter']) @@ -727,7 +734,6 @@ def run_query(self, rule, start=None, end=None, scroll=False): elastalert_logger.info("Query start and end time converting UTC to query_timezone : {}".format(rule.get('query_timezone'))) start = ts_utc_to_tz(start, rule.get('query_timezone')) end = ts_utc_to_tz(end, rule.get('query_timezone')) - # Reset hit counter and query rule_inst = rule['type'] rule['scrolling_cycle'] = rule.get('scrolling_cycle', 0) + 1 @@ -988,7 +994,6 @@ def run_rule(self, rule, endtime, starttime=None): rule['original_starttime'] = rule['starttime'] rule['scrolling_cycle'] = 0 - self.thread_data.num_hits = 0 self.thread_data.num_dupes = 0 self.thread_data.cumulative_hits = 0 @@ -1978,7 +1983,7 @@ def get_top_counts(self, rule, starttime, endtime, keys, number=None, qk=None): index = self.get_index(rule, starttime, endtime) hits_terms = self.get_hits_terms(rule, starttime, endtime, index, key, qk, number) - if hits_terms is None: + if hits_terms is None or not hits_terms: top_events_count = {} else: buckets = list(hits_terms.values())[0] diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index e34ce6704..9c4a72dc2 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -5,6 +5,7 @@ import time import itertools + from sortedcontainers import SortedKeyList as sortedlist from elastalert.util import (add_raw_postfix, dt_to_ts, EAException, elastalert_logger, elasticsearch_client, @@ -211,15 +212,30 @@ def __init__(self, *args): self.ts_field = self.rules.get('timestamp_field', '@timestamp') self.get_ts = new_get_event_ts(self.ts_field) self.attach_related = self.rules.get('attach_related', False) + + # def add_count_data(self, data): + # """ Add count data to the rule. Data should be of the form {ts: count}. """ + # if len(data) > 1: + # raise EAException('add_count_data can only accept one count at a time') - def add_count_data(self, data): - """ Add count data to the rule. Data should be of the form {ts: count}. """ - if len(data) > 1: - raise EAException('add_count_data can only accept one count at a time') + # (ts, count), = list(data.items()) - (ts, count), = list(data.items()) + # event = ({self.ts_field: ts}, count) + # self.occurrences.setdefault('all', EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append(event) + # self.check_for_match('all') - event = ({self.ts_field: ts}, count) + def add_count_data(self, data): + if not 'endtime' in data or not 'count' in data: + raise EAException('add_count_data should have endtime and count') + ts = data['endtime'] + count = data['count'] + doc = {} + if 'event' in data and data['event'][0]: + doc = data['event'][0] + else: + doc = {self.ts_field: ts} + event = (doc, count) + elastalert_logger.info("event %s",event) self.occurrences.setdefault('all', EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append(event) self.check_for_match('all') @@ -336,7 +352,7 @@ def append(self, event): self.data.add(event) if event and event[1]: self.running_count += event[1] - + while self.duration() >= self.timeframe: oldest = self.data[0] self.data.remove(oldest) @@ -531,11 +547,14 @@ def __init__(self, *args): self.ref_window_filled_once = False def add_count_data(self, data): - """ Add count data to the rule. Data should be of the form {ts: count}. """ - if len(data) > 1: - raise EAException('add_count_data can only accept one count at a time') - for ts, count in data.items(): - self.handle_event({self.ts_field: ts}, count, 'all') + #""" Add count data to the rule. Data should be of the form {ts: count}. """ + # if len(data) > 1: + # raise EAException('add_count_data can only accept one count at a time') + # for ts, count in data.items(): + # self.handle_event({self.ts_field: ts}, count, 'all') + ts = data['endtime'] + count = data['count'] + self.handle_event({self.ts_field: ts}, count, 'all') def add_terms_data(self, terms): for timestamp, buckets in terms.items(): diff --git a/elastalert/util.py b/elastalert/util.py index 86c5c3145..10cf76519 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -26,7 +26,7 @@ def get_msearch_query(query, rule): search_arr = [] search_arr.append({'index': [rule['index']]}) if rule.get('use_count_query'): - query['size'] = 0 + query['size'] = 1 if rule.get('include'): query['_source'] = {} query['_source']['includes'] = rule['include'] diff --git a/tests/rules_test.py b/tests/rules_test.py index 1c3e5aaa7..00d87742b 100644 --- a/tests/rules_test.py +++ b/tests/rules_test.py @@ -115,26 +115,26 @@ def test_freq_count(): 'use_count_query': True} # Normal match rule = FrequencyRule(rules) - rule.add_count_data({ts_to_dt('2014-10-10T00:00:00'): 75}) + rule.add_count_data({'event':[{'@timestamp': ts_to_dt('2014-10-10T00:00:00')}],'endtime':ts_to_dt('2014-10-10T00:00:00'),'count': 75}) assert len(rule.matches) == 0 - rule.add_count_data({ts_to_dt('2014-10-10T00:15:00'): 10}) + rule.add_count_data({'event':[{'@timestamp': ts_to_dt('2014-10-10T00:15:00')}],'endtime':ts_to_dt('2014-10-10T00:15:00'),'count': 10}) assert len(rule.matches) == 0 - rule.add_count_data({ts_to_dt('2014-10-10T00:25:00'): 10}) + rule.add_count_data({'event':[{'@timestamp': ts_to_dt('2014-10-10T00:25:00')}],'endtime':ts_to_dt('2014-10-10T00:25:00'),'count': 10}) assert len(rule.matches) == 0 - rule.add_count_data({ts_to_dt('2014-10-10T00:45:00'): 6}) + rule.add_count_data({'event':[{'@timestamp': ts_to_dt('2014-10-10T00:45:00')}],'endtime':ts_to_dt('2014-10-10T00:45:00'),'count': 6}) assert len(rule.matches) == 1 # First data goes out of timeframe first rule = FrequencyRule(rules) - rule.add_count_data({ts_to_dt('2014-10-10T00:00:00'): 75}) + rule.add_count_data({'event':[{'@timestamp': ts_to_dt('2014-10-10T00:00:00')}],'endtime':ts_to_dt('2014-10-10T00:00:00'),'count': 75}) assert len(rule.matches) == 0 - rule.add_count_data({ts_to_dt('2014-10-10T00:45:00'): 10}) + rule.add_count_data({'event':[{'@timestamp': ts_to_dt('2014-10-10T00:45:00')}],'endtime':ts_to_dt('2014-10-10T00:45:00'),'count': 10}) assert len(rule.matches) == 0 - rule.add_count_data({ts_to_dt('2014-10-10T00:55:00'): 10}) + rule.add_count_data({'event':[{'@timestamp': ts_to_dt('2014-10-10T00:55:00')}],'endtime':ts_to_dt('2014-10-10T00:55:00'),'count': 10}) assert len(rule.matches) == 0 - rule.add_count_data({ts_to_dt('2014-10-10T01:05:00'): 6}) + rule.add_count_data({'event':[{'@timestamp': ts_to_dt('2014-10-10T01:05:00')}],'endtime':ts_to_dt('2014-10-10T01:05:00'),'count': 6}) assert len(rule.matches) == 0 - rule.add_count_data({ts_to_dt('2014-10-10T01:00:00'): 75}) + rule.add_count_data({'event':[{'@timestamp': ts_to_dt('2014-10-10T01:00:00')}],'endtime':ts_to_dt('2014-10-10T01:00:00'),'count': 75}) assert len(rule.matches) == 1 # except EAException @@ -142,7 +142,7 @@ def test_freq_count(): rule = FrequencyRule(rules) rule.add_count_data('aaaa') except EAException as ea: - assert 'add_count_data can only accept one count at a time' in str(ea) + assert 'add_count_data should have endtime and count' in str(ea) def test_freq_out_of_order(): @@ -226,20 +226,20 @@ def test_spike_count(): rule = SpikeRule(rules) # Double rate of events at 20 seconds - rule.add_count_data({ts_to_dt('2014-09-26T00:00:00'): 10}) + rule.add_count_data({'endtime':ts_to_dt('2014-09-26T00:00:00'),'count': 10}) assert len(rule.matches) == 0 - rule.add_count_data({ts_to_dt('2014-09-26T00:00:10'): 10}) + rule.add_count_data({'endtime':ts_to_dt('2014-09-26T00:00:10'),'count': 10}) assert len(rule.matches) == 0 - rule.add_count_data({ts_to_dt('2014-09-26T00:00:20'): 20}) + rule.add_count_data({'endtime':ts_to_dt('2014-09-26T00:00:20'),'count': 20}) assert len(rule.matches) == 1 # Downward spike rule = SpikeRule(rules) - rule.add_count_data({ts_to_dt('2014-09-26T00:00:00'): 10}) + rule.add_count_data({'endtime':ts_to_dt('2014-09-26T00:00:00'),'count': 10}) assert len(rule.matches) == 0 - rule.add_count_data({ts_to_dt('2014-09-26T00:00:10'): 10}) + rule.add_count_data({'endtime':ts_to_dt('2014-09-26T00:00:10'),'count': 10}) assert len(rule.matches) == 0 - rule.add_count_data({ts_to_dt('2014-09-26T00:00:20'): 0}) + rule.add_count_data({'endtime':ts_to_dt('2014-09-26T00:00:20'),'count': 0}) assert len(rule.matches) == 1 @@ -1106,13 +1106,13 @@ def test_flatline_count(): 'threshold': 1, 'timestamp_field': '@timestamp'} rule = FlatlineRule(rules) - rule.add_count_data({ts_to_dt('2014-10-11T00:00:00'): 1}) + rule.add_count_data({'endtime':ts_to_dt('2014-10-11T00:00:00'),'count': 1}) rule.garbage_collect(ts_to_dt('2014-10-11T00:00:10')) assert len(rule.matches) == 0 - rule.add_count_data({ts_to_dt('2014-10-11T00:00:15'): 0}) + rule.add_count_data({'endtime':ts_to_dt('2014-10-11T00:00:15'),'count': 0}) rule.garbage_collect(ts_to_dt('2014-10-11T00:00:20')) assert len(rule.matches) == 0 - rule.add_count_data({ts_to_dt('2014-10-11T00:00:35'): 0}) + rule.add_count_data({'endtime':ts_to_dt('2014-10-11T00:00:35'),'count': 0}) assert len(rule.matches) == 1 From bdb737e1dc369a412aed5765448b05eb88958b03 Mon Sep 17 00:00:00 2001 From: aravind-musigumpula Date: Tue, 26 Sep 2023 11:02:10 +0530 Subject: [PATCH 1057/1065] add comments --- elastalert/ruletypes.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 9c4a72dc2..3c2734e4c 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -225,6 +225,8 @@ def __init__(self, *args): # self.check_for_match('all') def add_count_data(self, data): + # data struncture should be -> data: {endtime:,count:,event:[{}]} + # if data doesn't have endtime and count as above example, raise an exception if not 'endtime' in data or not 'count' in data: raise EAException('add_count_data should have endtime and count') ts = data['endtime'] @@ -552,6 +554,9 @@ def add_count_data(self, data): # raise EAException('add_count_data can only accept one count at a time') # for ts, count in data.items(): # self.handle_event({self.ts_field: ts}, count, 'all') + + # data struncture should be -> data: {endtime:,count:,event:[{}]} + # if data doesn't have endtime and count as above example, raise an exception ts = data['endtime'] count = data['count'] self.handle_event({self.ts_field: ts}, count, 'all') From deed439c0e894ee0b4e74c5bd8d91f9da862e4dc Mon Sep 17 00:00:00 2001 From: aravind-musigumpula Date: Tue, 26 Sep 2023 12:14:12 +0530 Subject: [PATCH 1058/1065] remove debug statement --- elastalert/ruletypes.py | 1 - 1 file changed, 1 deletion(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 3c2734e4c..61657bc8f 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -237,7 +237,6 @@ def add_count_data(self, data): else: doc = {self.ts_field: ts} event = (doc, count) - elastalert_logger.info("event %s",event) self.occurrences.setdefault('all', EventWindow(self.rules['timeframe'], getTimestamp=self.get_ts)).append(event) self.check_for_match('all') From 603cfd93758567efdb28f3d20b8f7235f5e15d85 Mon Sep 17 00:00:00 2001 From: aravind-musigumpula Date: Wed, 17 Jan 2024 17:52:30 +0530 Subject: [PATCH 1059/1065] add alert_time as label in alert --- elastalert/alerters/alertmanager.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/elastalert/alerters/alertmanager.py b/elastalert/alerters/alertmanager.py index c5f2784f4..ed3c41d7b 100644 --- a/elastalert/alerters/alertmanager.py +++ b/elastalert/alerters/alertmanager.py @@ -59,6 +59,9 @@ def alert(self, matches): 'labels': self.labels } + if self.rule.get('timestamp_field') in matches[0]: + payload['labels']['alert_time']=matches[0][self.rule.get('timestamp_field')] + for host in self.hosts: try: url = host From 7738a0719f3bf2c50a342233351df575fe33b8bc Mon Sep 17 00:00:00 2001 From: aravind-musigumpula Date: Thu, 18 Jan 2024 13:23:36 +0530 Subject: [PATCH 1060/1065] change the label name --- elastalert/alerters/alertmanager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/alerters/alertmanager.py b/elastalert/alerters/alertmanager.py index ed3c41d7b..d714e49ab 100644 --- a/elastalert/alerters/alertmanager.py +++ b/elastalert/alerters/alertmanager.py @@ -60,7 +60,7 @@ def alert(self, matches): } if self.rule.get('timestamp_field') in matches[0]: - payload['labels']['alert_time']=matches[0][self.rule.get('timestamp_field')] + payload['labels']['alert_match_time']=matches[0][self.rule.get('timestamp_field')] for host in self.hosts: try: From 0961faffb55d2b4d41a862225ab382110b3089d9 Mon Sep 17 00:00:00 2001 From: sivatarunp Date: Mon, 15 Jul 2024 11:25:05 +0530 Subject: [PATCH 1061/1065] converting alert info to dictionary based on a flag, add adv query support, update docker file (#31) * changing alert data to stingified dict * checking query_key exist condition before adding * changing condition to json_paylaod key * removing extra fucntion * removing log file * removing print statement * chaging count addition condition * adding adv query support and changign count value of freq * adding advance query rule * removing print statements * adding missed count print statements * changing condition check * adding changes to support for OS * sticking to old version of image * syntax change * making all key values as strings * hardcoding python base image as well * reverting the change * changing package version * removing update * installing fixed versions * installing fixed versions * removing distroless * changign docker file --- Dockerfile | 22 +-------- elastalert/alerters/alertmanager.py | 10 +++++ elastalert/alerts.py | 42 +++++++++++------ elastalert/elastalert.py | 46 ++++++++++++++++--- elastalert/loaders.py | 3 +- elastalert/ruletypes.py | 70 +++++++++++++++++++++++++++++ elastalert/schema.yaml | 6 +++ requirements.txt | 38 ++++++++-------- 8 files changed, 176 insertions(+), 61 deletions(-) diff --git a/Dockerfile b/Dockerfile index f23040582..7a49357cc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,11 +1,10 @@ -FROM python:3.9-alpine as build +FROM public.ecr.aws/i1i0w6p5/python:3.9.2 as build ENV ELASTALERT_HOME /opt/elastalert ADD . /opt/elastalert/ WORKDIR /opt -RUN apk add --update --no-cache jq curl gcc openssl-dev libffi-dev ca-certificates musl-dev RUN pip install "setuptools==65.5.0" "elasticsearch==6.3.1" WORKDIR "${ELASTALERT_HOME}" @@ -15,27 +14,10 @@ RUN python setup.py install RUN pip show elastalert2 - -FROM gcr.io/distroless/python3:debug as runtime - -COPY --from=build /opt/elastalert /opt/elastalert -COPY --from=build /usr/local/bin/elastalert* /usr/local/bin/ - -COPY --from=build /opt/elastalert /opt/elastalert -COPY --from=build /usr/local/lib/python3.9 /usr/local/lib/python3.9 -COPY --from=build /usr/local/bin/elastalert* /usr/local/bin/ -COPY --from=build /usr/local/lib/libpython3.9.so.1.0 /usr/local/lib/ -COPY --from=build /lib/libc.musl-x86_64.so.1 /lib/ - -#COPY --from=build /data/elastalert /data/elastalert - -ENV PYTHONPATH=/usr/local/lib/python3.9/site-packages -ENV PATH=/usr/local/lib:/usr/lib:$PATH - RUN python --version WORKDIR /opt/elastalert COPY commands.sh /opt/elastalert/commands.sh RUN ["chmod", "+x", "/opt/elastalert/commands.sh"] -ENTRYPOINT ["sh","/opt/elastalert/commands.sh"] \ No newline at end of file +ENTRYPOINT ["sh","/opt/elastalert/commands.sh"] diff --git a/elastalert/alerters/alertmanager.py b/elastalert/alerters/alertmanager.py index d714e49ab..ea7bc5528 100644 --- a/elastalert/alerters/alertmanager.py +++ b/elastalert/alerters/alertmanager.py @@ -51,6 +51,16 @@ def alert(self, matches): self.labels.update( alertname=self.alertname, elastalert_rule=self.rule.get('name')) + if 'json_payload' in self.rule and self.rule['json_payload'] == True: + self.labels.update(query_key_fields=self.rule.get('query_key')) + if self.rule.get('query_key') in matches[0].keys(): + self.labels.update(query_key=matches[0][self.rule.get('query_key')]) + if self.rule.get('alert_field'): + if 'value' in matches[0]: + self.labels.update(query_key_fields=matches[0]['key']) + self.labels.update(query_key=matches[0]['value']) + else: + self.labels.update(query_key_fields=self.rule.get('alert_field')) self.annotations.update({ self.title_labelname: self.create_title(matches), self.body_labelname: self.create_alert_body(matches)}) diff --git a/elastalert/alerts.py b/elastalert/alerts.py index 1d0dd60d4..bd5a196a9 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -103,7 +103,10 @@ def _add_match_items(self): except TypeError: # Non serializable object, fallback to str pass - self.text += '%s: %s\n' % (key, value_str) + if (isinstance(self.text,dict)): + self.text[key] = value_str + else: + self.text += '%s: %s\n' % (key, value_str) def _pretty_print_as_json(self, blob): try: @@ -113,20 +116,31 @@ def _pretty_print_as_json(self, blob): return json.dumps(blob, cls=DateTimeEncoder, sort_keys=True, indent=4, encoding='Latin-1', ensure_ascii=False) def __str__(self): - self.text = '' - if 'alert_text' not in self.rule: - self.text += self.rule['name'] + '\n\n' - - self._add_custom_alert_text() - self._ensure_new_line() - if self.rule.get('alert_text_type') != 'alert_text_only' and self.rule.get('alert_text_type') != 'alert_text_jinja': - self._add_rule_text() + if 'json_payload' in self.rule and self.rule['json_payload'] == True: + self.text= {} + if 'alert_text' not in self.rule: + self.text['elastalert_rule'] = self.rule['name'] + if self.rule.get('alert_text_type') != 'alert_text_only' and self.rule.get('alert_text_type') != 'alert_text_jinja': + self.text['alert_criteria'] = self.rule['type'].get_match_str(self.match) + if self.rule.get('top_count_keys'): + self._add_top_counts() + if self.rule.get('alert_text_type') != 'exclude_fields': + self._add_match_items() + return str(self.text) + else: + self.text = '' + if 'alert_text' not in self.rule: + self.text += self.rule['name'] + '\n\n' + self._add_custom_alert_text() self._ensure_new_line() - if self.rule.get('top_count_keys'): - self._add_top_counts() - if self.rule.get('alert_text_type') != 'exclude_fields': - self._add_match_items() - return self.text + if self.rule.get('alert_text_type') != 'alert_text_only' and self.rule.get('alert_text_type') != 'alert_text_jinja': + self._add_rule_text() + self._ensure_new_line() + if self.rule.get('top_count_keys'): + self._add_top_counts() + if self.rule.get('alert_text_type') != 'exclude_fields': + self._add_match_items() + return self.text class Alerter(object): diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 5d968fdb3..1207cc198 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -29,6 +29,7 @@ from elasticsearch.exceptions import ElasticsearchException from elasticsearch.exceptions import NotFoundError from elasticsearch.exceptions import TransportError +from elastalert.ruletypes import AdvancedQueryRule from elastalert.ruletypes import ErrorRateRule, NewTermsRule from elastalert.ruletypes import PercentageMatchRule @@ -401,7 +402,7 @@ def get_hits(self, rule, starttime, endtime, index, scroll=False): #using backwards compatibile msearch res = self.thread_data.current_es.msearch(body=request) res = res['responses'][0] - self.thread_data.total_hits = int(res['hits']['total']) + self.thread_data.total_hits = int(res['hits']['total']['value'] if isinstance(res['hits']['total'], dict) else res['hits']['total']) #removed scroll as it aint supported # if scroll: @@ -522,11 +523,11 @@ def get_hits_count(self, rule, starttime, endtime, index): self.handle_error('Error running count query: %s' % (e), {'rule': rule['name'], 'query': query}) return None - self.thread_data.num_hits += res['hits']['total'] + self.thread_data.num_hits += (res['hits']['total']['value'] if isinstance(res['hits']['total'], dict) else res['hits']['total']) lt = rule.get('use_local_time') elastalert_logger.info( "Queried rule %s from %s to %s: %s hits" % (rule['name'], pretty_ts(starttime, lt, self.pretty_ts_format), - pretty_ts(endtime, lt, self.pretty_ts_format), res['hits']['total']) + pretty_ts(endtime, lt, self.pretty_ts_format), (res['hits']['total']['value'] if isinstance(res['hits']['total'], dict) else res['hits']['total'])) ) if len(res['hits']['hits']) > 0 : @@ -534,7 +535,7 @@ def get_hits_count(self, rule, starttime, endtime, index): else: event= self.process_hits(rule,[{'_source': {'@timestamp': endtime}}]) - return {"endtime":endtime,"count": res['hits']['total'],"event": event} + return {"endtime":endtime,"count": (res['hits']['total']['value'] if isinstance(res['hits']['total'], dict) else res['hits']['total']),"event": event} #return {endtime: res['hits']['total']} def get_hits_terms(self, rule, starttime, endtime, index, key, qk=None, size=None): @@ -622,7 +623,33 @@ def get_hits_aggregation(self, rule, starttime, endtime, index, query_key, term_ return {} payload = res['aggregations'] - self.thread_data.num_hits += res['hits']['total'] + self.thread_data.num_hits += (res['hits']['total']['value'] if isinstance(res['hits']['total'], dict) else res['hits']['total']) + return {endtime: payload} + + def get_adv_query_aggregation(self, rule, starttime, endtime, index, term_size=None): + rule_filter = copy.copy(rule['filter']) + base_query = self.get_query( + rule_filter, + starttime, + endtime, + timestamp_field=rule['timestamp_field'], + sort=False, + to_ts_func=rule['dt_to_ts'], + ) + request = get_msearch_query(base_query,rule) + try: + #using backwards compatibile msearch + res = self.thread_data.current_es.msearch(body=request) + res = res['responses'][0] + except ElasticsearchException as e: + if len(str(e)) > 1024: + e = str(e)[:1024] + '... (%d characters removed)' % (len(str(e)) - 1024) + self.handle_error('Error running query: %s' % (e), {'rule': rule['name']}) + return None + if 'aggregations' not in res: + return {} + payload = res['aggregations'] + self.thread_data.num_hits += int(res['hits']['total']['value'] if isinstance(res['hits']['total'], dict) else res['hits']['total']) return {endtime: payload} @@ -689,7 +716,7 @@ def get_ch_data(self, rule, starttime, endtime, agg_key, freshquery,aggregation) elastalert_logger.info("request data is %s" % json.dumps(data)) # res = requests.post(self.query_endpoint, json=data) # return None, None - + def remove_duplicate_events(self, data, rule): new_events = [] for event in data: @@ -747,7 +774,12 @@ def run_query(self, rule, start=None, end=None, scroll=False): elif isinstance(rule_inst, ErrorRateRule): data = self.get_error_rate(rule, start, end) elif rule.get('aggregation_query_element'): - data = self.get_hits_aggregation(rule, start, end, index, rule.get('query_key', None)) + elastalert_logger.info("in agg query element") + if isinstance(rule_inst, AdvancedQueryRule): + data = self.get_adv_query_aggregation(rule, start, end,index) + else: + data = self.get_hits_aggregation(rule, start, end, index, rule.get('query_key', None)) + else: data = self.get_hits(rule, start, end, index, scroll) if data: diff --git a/elastalert/loaders.py b/elastalert/loaders.py index a1a07837a..18ea2479d 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -93,7 +93,8 @@ class RulesLoader(object): 'metric_aggregation': ruletypes.MetricAggregationRule, 'percentage_match': ruletypes.PercentageMatchRule, 'spike_aggregation': ruletypes.SpikeMetricAggregationRule, - 'error_rate': ruletypes.ErrorRateRule #Adding Error Rate Rule type + 'error_rate': ruletypes.ErrorRateRule, #Adding Error Rate Rule type + 'advanced_query': ruletypes.AdvancedQueryRule } # Used to map names of alerts to their classes diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 61657bc8f..048c6c9b9 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -301,6 +301,7 @@ def check_for_match(self, key, end=False): event = self.occurrences[key].data[-1][0] if self.attach_related: event['related_events'] = [data[0] for data in self.occurrences[key].data[:-1]] + event['count'] = self.occurrences[key].count() self.add_match(event) self.occurrences.pop(key) @@ -720,6 +721,74 @@ def garbage_collect(self, ts): placeholder.update({self.rules['query_key']: qk}) self.handle_event(placeholder, 0, qk) +class AdvancedQueryRule(RuleType): + """ A rule that uses a query_string query to perform a advanced search like parsing, evaluating conditions, calculating aggs etc """ + required_options = frozenset(['alert_field']) + + def __init__(self, *args): + super(AdvancedQueryRule, self).__init__(*args) + if 'max_threshold' not in self.rules and 'min_threshold' not in self.rules: + raise EAException("AdvancedQueryRule must have one of either max_threshold or min_threshold") + #self.query_string = self.rules.get('query_string') + self.rules['aggregation_query_element'] = {"query": ""} + + def add_aggregation_data(self, payload): + for timestamp, payload_data in payload.items(): + self.check_matches(payload_data,timestamp) + + def check_matches(self,data,timestamp): + results=[] + for key, value in data.items(): + if 'buckets' in value: + if len(value['buckets']) >0 : + results = self.flatten_results(key,value['buckets'],self.rules['alert_field'],{},results) + else: + if self.crossed_thresholds(value['value']): + match={"key":self.rules['alert_field'],"count":value['value'],self.rules['timestamp_field']:timestamp} + self.add_match(match) + if len(results) > 0: + for event in results: + if self.crossed_thresholds(event[self.rules['alert_field']]): + #looping the object to form data structure in required format + group_by_keys=[] + group_by_values=[] + for k,v in event.items(): + if self.rules['alert_field'] not in k : + group_by_keys.append(str(k)) + group_by_values.append(str(v)) + else: + count = v + group_by_key = ','.join(group_by_keys) + group_by_value = ','.join(group_by_values) + match={"key":group_by_key,"value":group_by_value,"count":count,self.rules['timestamp_field']:timestamp} + self.add_match(match) + + #function to flatten the aggregated data. This returns an array of dictionaries which has corresponding key, value + #group starts initially empty and as we progress we keep adding this groups. + def flatten_results(self,key,value,alert_field,group,results=[]): + for item in value: + temp_group={} #temp group to start the loop back again with empty, if at all one iteration is completed + group[key]=item['key'] + for k,v in item.items(): + if isinstance(v,dict): + if "buckets" in v: + self.flatten_results(k,v['buckets'],alert_field,group,results) + elif alert_field in k: + temp_group.update(group) + group[alert_field] = v['value'] + results.append(group) + group=temp_group + return results + + def crossed_thresholds(self, metric_value): + if metric_value is None: + return False + if 'max_threshold' in self.rules and float(metric_value) > self.rules['max_threshold']: + return True + if 'min_threshold' in self.rules and float(metric_value) < self.rules['min_threshold']: + return True + return False + class FlatlineRule(FrequencyRule): """ A rule that matches when there is a low number of events given a timeframe. """ @@ -752,6 +821,7 @@ def check_for_match(self, key, end=True): # Do a deep-copy, otherwise we lose the datetime type in the timestamp field of the last event event = copy.deepcopy(self.occurrences[key].data[-1][0]) event.update(key=key, count=count) + event[self.rules['query_key']]=key self.add_match(event) if not self.rules.get('forget_keys'): diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 46217cc79..7893778d1 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -220,6 +220,12 @@ oneOf: error_condition: {type: string} unique_column: {type: string} + - title: Advanced Query + required: [alert_field] + properties: + type: {enum: [advanced_query]} + + properties: # Common Settings diff --git a/requirements.txt b/requirements.txt index 570b9c945..f1f4ec190 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,26 +1,26 @@ -apscheduler>=3.8.1,<4.0 -aws-requests-auth>=0.4.3 -sortedcontainers>=2.4.0 -boto3>=1.20.53 -cffi>=1.15.0 -croniter>=1.2.0 +apscheduler==3.10.4 +aws-requests-auth==0.4.3 +sortedcontainers==2.4.0 +boto3==1.34.129 +cffi==1.16.0 +croniter==2.0.5 elasticsearch==6.3.1 -envparse>=0.2.0 +envparse==0.2.0 exotel==0.1.5 Jinja2==3.1.2 -jira>=3.1.1 +jira==3.8.0 jsonschema==4.17.3 -mock>=2.0.0 -prison>=0.2.1 -prometheus_client>=0.13.1 -py-zabbix>=1.1.7 -python-dateutil>=2.8.2 -PyYAML>=6.0 -requests>=2.27.1 -stomp.py>=8.0.1 -texttable>=1.6.4 +mock>=5.1.0 +prison==0.2.1 +prometheus_client==0.13.1 +py-zabbix==1.1.7 +python-dateutil==2.9.0 +PyYAML==6.0.1 +requests==2.32.3 +stomp.py==8.1.2 +texttable>=1.7.0 statsd-tags==3.2.1.post1 twilio==6.57.0 -tencentcloud-sdk-python>=3.0.577 -jsonpointer>=2.2 +tencentcloud-sdk-python==3.0.1171 +jsonpointer==3.0.0 tzlocal==2.1 From ddf3e7717729156cdf71cf135d57e17a799b773c Mon Sep 17 00:00:00 2001 From: ajaywk7 Date: Tue, 30 Jul 2024 09:54:39 +0530 Subject: [PATCH 1062/1065] X env staging header addition (#33) * Adding local datastore setup files * Added header X-Env to router requests if configured * Added header X-Env to router requests if configured * Changed X_ENV env variable name to X-ENV * Revert "Changed X_ENV env variable name to X-ENV" This reverts commit be627e792d68d426d58a1b7f8ee4b8886a7ba14c. --- elastalert/config.py | 3 ++- elastalert/elastalert.py | 5 ++++- elastalert/util.py | 7 ++++++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/elastalert/config.py b/elastalert/config.py index 7e841799a..9cc2618bc 100644 --- a/elastalert/config.py +++ b/elastalert/config.py @@ -25,7 +25,8 @@ 'ES_PORT': 'es_port', 'ES_URL_PREFIX': 'es_url_prefix', 'STATSD_INSTANCE_TAG': 'statsd_instance_tag', - 'STATSD_HOST': 'statsd_host'} + 'STATSD_HOST': 'statsd_host', + 'X_ENV':'X_ENV'} env = Env(ES_USE_SSL=bool) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 1207cc198..5cf6c6210 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -704,7 +704,10 @@ def get_ch_data(self, rule, starttime, endtime, agg_key, freshquery,aggregation) "aggregations":[aggregation] } try: - res = requests.post(self.query_endpoint, json=data) + headers = {} + if 'X_ENV' in rule: + headers['X-ENV'] = rule['X_ENV'] + res = requests.post(self.query_endpoint, json=data, headers=headers) res.raise_for_status() except requests.exceptions.RequestException as e: if len(str(e)) > 1024: diff --git a/elastalert/util.py b/elastalert/util.py index 10cf76519..dedb8b42a 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -395,7 +395,7 @@ def build_adapter_conn_config(conf): parsed_conf['es_bearer'] = None parsed_conf['aws_region'] = None parsed_conf['profile'] = None - parsed_conf['headers'] = None + parsed_conf['headers'] = {} parsed_conf['es_host'] = conf['kibana_adapter'] parsed_conf['es_port'] = conf['kibana_adapter_port'] parsed_conf['es_url_prefix'] = '' @@ -420,6 +420,11 @@ def build_adapter_conn_config(conf): elif 'es_bearer' in conf: parsed_conf['es_bearer'] = conf['es_bearer'] + if os.environ.get('X_ENV'): + parsed_conf['headers']['X-ENV'] = os.environ.get('X_ENV') + elif 'X_ENV' in conf: + parsed_conf['headers']['X-ENV'] = os.environ.get('X_ENV') + if 'aws_region' in conf: parsed_conf['aws_region'] = conf['aws_region'] From ed6cf658531405aeb88a17641a9641a161d48e9b Mon Sep 17 00:00:00 2001 From: sivatarunp Date: Wed, 31 Jul 2024 15:01:47 +0530 Subject: [PATCH 1063/1065] Upgrading to os (#34) * changing es package version * changing elastalert to work for OS clusters --- elastalert/elastalert.py | 16 ++++++---------- requirements.txt | 2 +- 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 5cf6c6210..7d17d2859 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -318,7 +318,7 @@ def get_index_start(self, index, timestamp_field='@timestamp'): query = {'sort': {timestamp_field: {'order': 'asc'}}} try: res = self.thread_data.current_es.search(index=index, size=1, body=query, - _source_includes=[timestamp_field], ignore_unavailable=True) + _source_includes=[timestamp_field], ignore_unavailable=True) except ElasticsearchException as e: self.handle_error("Elasticsearch query error: %s" % (e), {'index': index, 'query': query}) return '1969-12-30T00:00:00Z' @@ -841,8 +841,8 @@ def get_starttime(self, rule): doc_type = 'elastalert_status' index = self.writeback_es.resolve_writeback_index(self.writeback_index, doc_type) #modded for elasticsearch ver 6 library compatibility - res = self.writeback_es.search(index=index, doc_type='elastalert_status', - size=1, body=query, _source_include=['endtime', 'rule_name']) + res = self.writeback_es.search(index=index, size=1, body=query, + _source_includes=['endtime', 'rule_name']) if res['hits']['hits']: endtime = ts_to_dt(res['hits']['hits'][0]['_source']['endtime']) @@ -1650,7 +1650,7 @@ def writeback(self, doc_type, body, rule=None, match_body=None): try: index = self.writeback_es.resolve_writeback_index(self.writeback_index, doc_type) - res = self.writeback_es.index(index=index,doc_type=doc_type, body=body) + res = self.writeback_es.index(index=index, body=body) return res except ElasticsearchException as e: elastalert_logger.exception("Error writing alert info to Elasticsearch: %s" % (e)) @@ -1672,7 +1672,6 @@ def find_recent_pending_alerts(self, time_limit): try: #modded for elasticsearch ver 6 library compatibility res = self.writeback_es.search(index=self.writeback_index, - doc_type='elastalert', body=query, size=1000) if res['hits']['hits']: @@ -1727,7 +1726,6 @@ def send_pending_alerts(self): # Delete it from the index try: self.writeback_es.delete(index=self.writeback_index, - doc_type='elastalert', id=_id) except ElasticsearchException: # TODO: Give this a more relevant exception, try:except: is evil. self.handle_error("Failed to delete alert %s at %s" % (_id, alert_time)) @@ -1760,13 +1758,11 @@ def get_aggregated_matches(self, _id): try: #modded for elasticsearch ver 6 library compatibility res = self.writeback_es.search(index=self.writeback_index, - doc_type='elastalert', body=query, size=self.max_aggregation) for match in res['hits']['hits']: matches.append(match['_source']) self.writeback_es.delete(index=self.writeback_index, - doc_type='elastalert', id=match['_id']) except (KeyError, ElasticsearchException) as e: self.handle_error("Error fetching aggregated matches: %s" % (e), {'id': _id}) @@ -1931,8 +1927,8 @@ def is_silenced(self, rule_name): doc_type = 'silence' index = self.writeback_es.resolve_writeback_index(self.writeback_index, doc_type) #modded for elasticsearch ver 6 library compatibility - res = self.writeback_es.search(index=index, doc_type='silence', - size=1, body=query, _source_include=['until', 'exponent']) + res = self.writeback_es.search(index=index, size=1, body=query, + _source_includes=['until', 'exponent']) except ElasticsearchException as e: self.handle_error("Error while querying for alert silence status: %s" % (e), {'rule': rule_name}) diff --git a/requirements.txt b/requirements.txt index f1f4ec190..149d162b9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ sortedcontainers==2.4.0 boto3==1.34.129 cffi==1.16.0 croniter==2.0.5 -elasticsearch==6.3.1 +elasticsearch==7.10.1 envparse==0.2.0 exotel==0.1.5 Jinja2==3.1.2 From b66348d99f448034f280a8b68889b7f962065ba8 Mon Sep 17 00:00:00 2001 From: aravind-musigumpula Date: Mon, 5 Aug 2024 16:21:05 +0530 Subject: [PATCH 1064/1065] fix timestamp issue --- elastalert/elastalert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index 7d17d2859..6a7356dd9 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -533,7 +533,7 @@ def get_hits_count(self, rule, starttime, endtime, index): if len(res['hits']['hits']) > 0 : event = self.process_hits(rule, res['hits']['hits']) else: - event= self.process_hits(rule,[{'_source': {'@timestamp': endtime}}]) + event= self.process_hits(rule,[{'_source': {rule['timestamp_field']: endtime}}]) return {"endtime":endtime,"count": (res['hits']['total']['value'] if isinstance(res['hits']['total'], dict) else res['hits']['total']),"event": event} #return {endtime: res['hits']['total']} From 471f96c667758c7b7fe6f64289450c8fa146f158 Mon Sep 17 00:00:00 2001 From: sivatarunp Date: Wed, 7 Aug 2024 11:05:45 +0530 Subject: [PATCH 1065/1065] fixing error --- elastalert/ruletypes.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 048c6c9b9..ddefc58e9 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -821,7 +821,8 @@ def check_for_match(self, key, end=True): # Do a deep-copy, otherwise we lose the datetime type in the timestamp field of the last event event = copy.deepcopy(self.occurrences[key].data[-1][0]) event.update(key=key, count=count) - event[self.rules['query_key']]=key + if self.rules['query_key']: + event[self.rules['query_key']]=key self.add_match(event) if not self.rules.get('forget_keys'):