diff --git a/Dockerfile-test b/Dockerfile-test index 3c153e644..9e7165720 100644 --- a/Dockerfile-test +++ b/Dockerfile-test @@ -1,7 +1,9 @@ -FROM ubuntu:latest +FROM ubuntu:21.10 -RUN apt-get update && apt-get upgrade -y -RUN apt-get -y install build-essential python3.6 python3.6-dev python3-pip libssl-dev git +RUN apt update && apt upgrade -y +RUN apt install software-properties-common -y +RUN add-apt-repository ppa:deadsnakes/ppa +RUN apt -y install build-essential python3.10 python3.10-dev python3-pip libssl-dev git WORKDIR /home/elastalert diff --git a/Makefile b/Makefile index 470062ce8..608c0bb8b 100644 --- a/Makefile +++ b/Makefile @@ -21,7 +21,7 @@ test-elasticsearch: test-docker: docker-compose --project-name elastalert build tox - docker-compose --project-name elastalert run tox + docker-compose --project-name elastalert run --rm tox clean: make -C docs clean diff --git a/README.md b/README.md index 99acc02e7..c6e973ea3 100644 --- a/README.md +++ b/README.md @@ -39,23 +39,33 @@ Several rule types with common monitoring paradigms are included with ElastAlert Currently, we have built-in support for the following alert types: +- Alerta +- Alertmanager +- Amazon Simple Notification Service (AWS SNS) +- Command +- Datadog +- Debug +- Discord - Email -- JIRA +- Exotel +- Gitter +- Google Chat +- HTTP POST +- Jira +- Line Notify +- Mattermost +- Microsoft Teams - OpsGenie -- Commands -- HipChat -- MS Teams -- Slack -- Telegram -- GoogleChat -- AWS SNS -- VictorOps +- Rocket.Chat - PagerDuty - PagerTree -- Exotel +- ServiceNow +- Slack +- Splunk On-Call (Formerly VictorOps) +- Stomp +- Telegram +- TheHive - Twilio -- Gitter -- Line Notify - Zabbix Additional rule types and alerts can be easily imported or written. @@ -115,13 +125,13 @@ A [Dockerized version](https://github.com/bitsensor/elastalert) of ElastAlert in ```bash git clone https://github.com/bitsensor/elastalert.git; cd elastalert -docker run -d -p 3030:3030 \ +docker run -d -p 3030:3030 -p 3333:3333 \ -v `pwd`/config/elastalert.yaml:/opt/elastalert/config.yaml \ -v `pwd`/config/config.json:/opt/elastalert-server/config/config.json \ -v `pwd`/rules:/opt/elastalert/rules \ -v `pwd`/rule_templates:/opt/elastalert/rule_templates \ --net="host" \ - --name elastalert bitsensor/elastalert:latest + --name elastalert bitsensor/elastalert:3.0.0-beta.1 ``` ## Documentation @@ -150,7 +160,7 @@ Examples of different types of rules can be found in example_rules/. increases by a given factor. This example will send an email alert when there are 3 times more events matching a filter occurring within the last 2 hours than the number of events in the previous 2 hours. -- ``example_frequency.yaml`` is an example of the "frequency" rule type, which will alert when there are a given number of events occuring +- ``example_frequency.yaml`` is an example of the "frequency" rule type, which will alert when there are a given number of events occurring within a time period. This example will send an email when 50 documents matching a given filter occur within a 4 hour timeframe. - ``example_change.yaml`` is an example of the "change" rule type, which will alert when a certain field in two documents changes. In this example, @@ -267,7 +277,7 @@ status: ### How can I make the alert come at a certain time? -The ``aggregation`` feature will take every alert that has occured over a period of time and send them together in one alert. You can use cron style syntax to send all alerts that have occured since the last once by using +The ``aggregation`` feature will take every alert that has occurred over a period of time and send them together in one alert. You can use cron style syntax to send all alerts that have occurred since the last once by using ``` aggregation: @@ -290,7 +300,7 @@ buffer_time: minutes: 5 ``` -By default, ElastAlert will download every document in full before processing them. Instead, you can have ElastAlert simply get a count of the number of documents that have occured in between each query. To do this, set ``use_count_query: true``. This cannot be used if you use ``query_key``, because ElastAlert will not know the contents of each documents, just the total number of them. This also reduces the precision of alerts, because all events that occur between each query will be rounded to a single timestamp. +By default, ElastAlert will download every document in full before processing them. Instead, you can have ElastAlert simply get a count of the number of documents that have occurred in between each query. To do this, set ``use_count_query: true``. This cannot be used if you use ``query_key``, because ElastAlert will not know the contents of each documents, just the total number of them. This also reduces the precision of alerts, because all events that occur between each query will be rounded to a single timestamp. If you are using ``query_key`` (a single key, not multiple keys) you can use ``use_terms_query``. This will make ElastAlert perform a terms aggregation to get the counts for each value of a certain field. Both ``use_terms_query`` and ``use_count_query`` also require ``doc_type`` to be set to the ``_type`` of the documents. They may not be compatible with all rule types. diff --git a/config.yaml.example b/config.yaml.example index 9d9176382..89db954be 100644 --- a/config.yaml.example +++ b/config.yaml.example @@ -48,7 +48,6 @@ es_port: 9200 # Use SSL authentication with client certificates client_cert must be # a pem file containing both cert and key for client -#verify_certs: True #ca_certs: /path/to/cacert.pem #client_cert: /path/to/client_cert.pem #client_key: /path/to/client_key.key @@ -78,38 +77,38 @@ alert_time_limit: # logline: # format: '%(asctime)s %(levelname)+8s %(name)+20s %(message)s' # -# handlers: -# console: -# class: logging.StreamHandler -# formatter: logline -# level: DEBUG -# stream: ext://sys.stderr +# handlers: +# console: +# class: logging.StreamHandler +# formatter: logline +# level: DEBUG +# stream: ext://sys.stderr # -# file: -# class : logging.FileHandler -# formatter: logline -# level: DEBUG -# filename: elastalert.log +# file: +# class : logging.FileHandler +# formatter: logline +# level: DEBUG +# filename: elastalert.log # -# loggers: -# elastalert: -# level: WARN -# handlers: [] -# propagate: true +# loggers: +# elastalert: +# level: WARN +# handlers: [] +# propagate: true # -# elasticsearch: -# level: WARN -# handlers: [] -# propagate: true +# elasticsearch: +# level: WARN +# handlers: [] +# propagate: true # -# elasticsearch.trace: -# level: WARN -# handlers: [] -# propagate: true +# elasticsearch.trace: +# level: WARN +# handlers: [] +# propagate: true # -# '': # root logger -# level: WARN -# handlers: -# - console -# - file -# propagate: false +# '': # root logger +# level: WARN +# handlers: +# - console +# - file +# propagate: false diff --git a/docs/source/elastalert.rst b/docs/source/elastalert.rst index b1008c3c4..cbe469fc7 100755 --- a/docs/source/elastalert.rst +++ b/docs/source/elastalert.rst @@ -31,18 +31,34 @@ Several rule types with common monitoring paradigms are included with ElastAlert Currently, we have support built in for these alert types: +- Alerta +- Alertmanager +- Amazon Simple Notification Service (AWS SNS) - Command +- Datadog +- Debug +- Discord - Email -- JIRA +- Exotel +- Gitter +- Google Chat +- HTTP POST +- Jira +- Line Notify +- Mattermost +- Microsoft Teams - OpsGenie -- SNS -- HipChat +- PagerDuty +- PagerTree +- Rocket.Chat +- ServiceNow - Slack -- Telegram -- GoogleChat -- Debug +- Splunk On-Call (Formerly VictorOps) - Stomp +- Telegram - TheHive +- Twilio +- Zabbix Additional rule types and alerts can be easily imported or written. (See :ref:`Writing rule types ` and :ref:`Writing alerts `) diff --git a/docs/source/ruletypes.rst b/docs/source/ruletypes.rst index ff3763712..208fe4cf4 100644 --- a/docs/source/ruletypes.rst +++ b/docs/source/ruletypes.rst @@ -140,7 +140,7 @@ Rule Configuration Cheat Sheet +----------------------------------------------------+--------+-----------+-----------+--------+-----------+-------+----------+--------+-----------+ | ``ignore_null`` (boolean, no default) | | | Req | Req | | | | | | +----------------------------------------------------+--------+-----------+-----------+--------+-----------+-------+----------+--------+-----------+ -| ``query_key`` (string, no default) | Opt | | | Req | Opt | Opt | Opt | Req | Opt | +| ``query_key`` (string or list, no default) | Opt | | | Req | Opt | Opt | Opt | Req | Opt | +----------------------------------------------------+--------+-----------+-----------+--------+-----------+-------+----------+--------+-----------+ | ``aggregation_key`` (string, no default) | Opt | | | | | | | | | +----------------------------------------------------+--------+-----------+-----------+--------+-----------+-------+----------+--------+-----------+ @@ -160,7 +160,7 @@ Rule Configuration Cheat Sheet | | | | | | | | | | | |``doc_type`` (string, no default) | | | | | | | | | | | | | | | | | | | | | -|``query_key`` (string, no default) | | | | | | | | | | +|``query_key`` (string or list, no default) | | | | | | | | | | | | | | | | | | | | | |``terms_size`` (int, default 50) | | | | | | | | | | +----------------------------------------------------+--------+-----------+-----------+--------+-----------+-------+----------+--------+-----------+ @@ -553,7 +553,7 @@ The currently supported versions of Kibana Discover are: - `5.6` - `6.0`, `6.1`, `6.2`, `6.3`, `6.4`, `6.5`, `6.6`, `6.7`, `6.8` -- `7.0`, `7.1`, `7.2`, `7.3` +- `7.0`, `7.1`, `7.2`, `7.3`, `7.4`, `7.5`, `7.6`, `7.7`, `7.8`, `7.9`, `7.10`, `7.11`, `7.12`, `7.13`, `7.14`, `7.15`, `7.16`, `7.17` ``kibana_discover_version: '7.3'`` @@ -938,7 +938,7 @@ Optional: ``field_value``: When set, uses the value of the field in the document and not the number of matching documents. This is useful to monitor for example a temperature sensor and raise an alarm if the temperature grows too fast. Note that the means of the field on the reference and current windows are used to determine if the ``spike_height`` value is reached. -Note also that the threshold parameters are ignored in this smode. +Note also that the threshold parameters are ignored in this mode. ``threshold_ref``: The minimum number of events that must exist in the reference window for an alert to trigger. For example, if @@ -1407,6 +1407,55 @@ come from an individual event, usually the one which triggers the alert. When using ``alert_text_args``, you can access nested fields and index into arrays. For example, if your match was ``{"data": {"ips": ["127.0.0.1", "12.34.56.78"]}}``, then by using ``"data.ips[1]"`` in ``alert_text_args``, it would replace value with ``"12.34.56.78"``. This can go arbitrarily deep into fields and will still work on keys that contain dots themselves. +Alertmanager +~~~~~~~~~~~~ + +This alert type will send alerts to Alertmanager postAlerts. ``alert_subject`` and ``alert_text`` are passed as the annotations labeled ``summary`` and ``description`` accordingly. The labels can be changed. +See https://prometheus.io/docs/alerting/clients/ for more details about the Alertmanager alert format. + +Required: + +``alertmanager_hosts``: The list of hosts pointing to the Alertmanager. + +Optional: + +``alertmanager_api_version``: Defaults to `v1`. Set to `v2` to enable the Alertmanager V2 API postAlerts. + +``alertmanager_alertname``: ``alertname`` is the only required label. Defaults to using the rule name of the alert. + +``alertmanager_labels``: Key:value pairs of arbitrary labels to be attached to every alert. Keys should match the regular expression ``^[a-zA-Z_][a-zA-Z0-9_]*$``. + +``alertmanager_annotations``: Key:value pairs of arbitrary annotations to be attached to every alert. Keys should match the regular expression ``^[a-zA-Z_][a-zA-Z0-9_]*$``. + +``alertmanager_fields``: Key:value pairs of labels and corresponding match fields. When using ``alertmanager_fields`` you can access nested fields and index into arrays the same way as with ``alert_text_args``. Keys should match the regular expression ``^[a-zA-Z_][a-zA-Z0-9_]*$``. This dictionary will be merged with the ``alertmanager_labels``. + +``alertmanager_alert_subject_labelname``: Rename the annotations' label name for ``alert_subject``. Default is ``summary``. + +``alertmanager_alert_text_labelname``: Rename the annotations' label name for ``alert_text``. Default is ``description``. + +``alertmanager_proxy``: By default ElastAlert 2 will not use a network proxy to send notifications to Alertmanager. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. + +``alertmanager_ca_certs``: Set this option to ``True`` if you want to validate the SSL certificate. + +``alertmanager_ignore_ssl_errors``: By default ElastAlert 2 will verify SSL certificate. Set this option to ``False`` if you want to ignore SSL errors. + +``alertmanager_timeout``: You can specify a timeout value, in seconds, for making communicating with Alertmanager. The default is 10. If a timeout occurs, the alert will be retried next time ElastAlert 2 cycles. + +Example usage:: + + alert: + - "alertmanager" + alertmanager_hosts: + - "http://alertmanager:9093" + alertmanager_alertname: "Title" + alertmanager_annotations: + severity: "error" + alertmanager_labels: + source: "elastalert" + alertmanager_fields: + msg: "message" + log: "@log_name" + Command ~~~~~~~ @@ -1450,6 +1499,63 @@ Example usage using new-style format:: - command command: ["/bin/send_alert", "--username", "{match[username]}"] +Datadog +~~~~~~~ + +This alert will create a `Datadog Event`_. Events are limited to 4000 characters. If an event is sent that contains +a message that is longer than 4000 characters, only his first 4000 characters will be displayed. + +This alert requires two additional options: + +``datadog_api_key``: `Datadog API key`_ + +``datadog_app_key``: `Datadog application key`_ + +Example usage:: + + alert: + - "datadog" + datadog_api_key: "Datadog API Key" + datadog_app_key: "Datadog APP Key" + +.. _`Datadog Event`: https://docs.datadoghq.com/events/ +.. _`Datadog API key`: https://docs.datadoghq.com/account_management/api-app-keys/#api-keys +.. _`Datadog application key`: https://docs.datadoghq.com/account_management/api-app-keys/#application-keys + +Discord +~~~~~~~ + +Discord will send notification to a Discord application. The body of the notification is formatted the same as with other alerters. + +Required: + +``discord_webhook_url``: The webhook URL. + +Optional: + +``discord_emoji_title``: By default ElastAlert will use the ``:warning:`` emoji when posting to the channel. You can use a different emoji per ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . If slack_icon_url_override parameter is provided, emoji is ignored. + +``discord_proxy``: By default ElastAlert will not use a network proxy to send notifications to Discord. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. + +``discord_proxy_login``: The Discord proxy auth username. + +``discord_proxy_password``: The Discord proxy auth username. + +``discord_embed_color``: embed color. By default ``0xffffff``. + +``discord_embed_footer``: embed footer. + +``discord_embed_icon_url``: You can provide icon_url to use custom image. Provide absolute address of the pciture. + +Example usage:: + + alert: + - "discord" + discord_webhook_url: "Your discord webhook url" + discord_emoji_title: ":lock:" + discord_embed_color: 0xE24D42 + discord_embed_footer: "Message sent by from your computer" + discord_embed_icon_url: "https://humancoders-formations.s3.amazonaws.com/uploads/course/logo/38/thumb_bigger_formation-elasticsearch.png" Email ~~~~~ @@ -1528,6 +1634,8 @@ For an example JIRA account file, see ``example_rules/jira_acct.yaml``. The acco Optional: +``jira_assignee``: Assigns an issue to a user. + ``jira_component``: The name of the component or components to set the ticket to. This can be a single string or a list of strings. This is provided for backwards compatibility and will eventually be deprecated. It is preferable to use the plural ``jira_components`` instead. ``jira_components``: The name of the component or components to set the ticket to. This can be a single string or a list of strings. @@ -1620,7 +1728,7 @@ OpsGenie alerter will create an alert which can be used to notify Operations peo integration must be created in order to acquire the necessary ``opsgenie_key`` rule variable. Currently the OpsGenieAlerter only creates an alert, however it could be extended to update or close existing alerts. -It is necessary for the user to create an OpsGenie Rest HTTPS API `integration page `_ in order to create alerts. +It is necessary for the user to create an OpsGenie Rest HTTPS API `integration page `_ in order to create alerts. The OpsGenie alert requires one option: @@ -1630,12 +1738,20 @@ Optional: ``opsgenie_account``: The OpsGenie account to integrate with. +``opsgenie_addr``: The OpsGenie URL to to connect against, default is ``https://api.opsgenie.com/v2/alerts``. If using the EU instance of Opsgenie, the URL needs to be ``https://api.eu.opsgenie.com/v2/alerts`` for requests to be successful. + ``opsgenie_recipients``: A list OpsGenie recipients who will be notified by the alert. + ``opsgenie_recipients_args``: Map of arguments used to format opsgenie_recipients. -``opsgenie_default_recipients``: List of default recipients to notify when the formatting of opsgenie_recipients is unsuccesful. + +``opsgenie_default_receipients``: List of default recipients to notify when the formatting of opsgenie_recipients is unsuccesful. + ``opsgenie_teams``: A list of OpsGenie teams to notify (useful for schedules with escalation). + ``opsgenie_teams_args``: Map of arguments used to format opsgenie_teams (useful for assigning the alerts to teams based on some data) + ``opsgenie_default_teams``: List of default teams to notify when the formatting of opsgenie_teams is unsuccesful. + ``opsgenie_tags``: A list of tags for this alert. ``opsgenie_message``: Set the OpsGenie message to something other than the rule name. The message can be formatted with fields from the first match e.g. "Error occurred for {app_name} at {timestamp}.". @@ -1650,6 +1766,8 @@ Optional: ``opsgenie_details``: Map of custom key/value pairs to include in the alert's details. The value can sourced from either fields in the first match, environment variables, or a constant value. +``opsgenie_proxy``: By default ElastAlert will not use a network proxy to send notifications to OpsGenie. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. + Example usage:: opsgenie_details: @@ -1657,8 +1775,8 @@ Example usage:: Environment: '$VAR' # environment variable Message: { field: message } # field in the first match -SNS -~~~ +Amazon Simple Notification Service (AWS SNS) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The SNS alerter will send an SNS notification. The body of the notification is formatted the same as with other alerters. The SNS alerter uses boto3 and can use credentials in the rule yaml, in a standard AWS credential and config files, or @@ -1670,75 +1788,116 @@ SNS requires one option: Optional: -``aws_access_key``: An access key to connect to SNS with. +``aws_access_key_id``: An access key to connect to SNS with. -``aws_secret_key``: The secret key associated with the access key. +``aws_secret_access_key``: The secret key associated with the access key. ``aws_region``: The AWS region in which the SNS resource is located. Default is us-east-1 -``profile``: The AWS profile to use. If none specified, the default will be used. +``aws_profile``: The AWS profile to use. If none specified, the default will be used. -HipChat -~~~~~~~ +Example usage:: -HipChat alerter will send a notification to a predefined HipChat room. The body of the notification is formatted the same as with other alerters. + alert: + - sns: + aws_region: 'us-east-1' # You must nest aws_region within your alert configuration so it is not used to sign AWS requests. + sns_topic_arn: 'arn:aws:sns:us-east-1:123456789:somesnstopic' + aws_access_key_id: 'XXXXXXXXXXXXXXXXXX'' + aws_secret_access_key: 'YYYYYYYYYYYYYYYYYYYY' -The alerter requires the following two options: +Rocket.Chat +~~~~~~~~~~~ -``hipchat_auth_token``: The randomly generated notification token created by HipChat. Go to https://XXXXX.hipchat.com/account/api and use -'Create new token' section, choosing 'Send notification' in Scopes list. +Rocket.Chat alerter will send a notification to a predefined channel. The body of the notification is formatted the same as with other alerters. +https://developer.rocket.chat/api/rest-api/methods/chat/postmessage -``hipchat_room_id``: The id associated with the HipChat room you want to send the alert to. Go to https://XXXXX.hipchat.com/rooms and choose -the room you want to post to. The room ID will be the numeric part of the URL. +The alerter requires the following option: -``hipchat_msg_color``: The color of the message background that is sent to HipChat. May be set to green, yellow or red. Default is red. +``rocket_chat_webhook_url``: The webhook URL that includes your auth data and the ID of the channel (room) you want to post to. You can use a list of URLs to send to multiple channels. -``hipchat_domain``: The custom domain in case you have HipChat own server deployment. Default is api.hipchat.com. +Optional: -``hipchat_ignore_ssl_errors``: Ignore TLS errors (self-signed certificates, etc.). Default is false. +``rocket_chat_username_override``: By default Rocket.Chat will use username defined in Integration when posting to the channel. Use this option to change it (free text). -``hipchat_proxy``: By default ElastAlert will not use a network proxy to send notifications to HipChat. Set this option using ``hostname:port`` if you need to use a proxy. +``rocket_chat_channel_override``: Incoming webhooks have a default channel, but it can be overridden. A public channel can be specified “#other-channel”, and a Direct Message with “@username”. -``hipchat_notify``: When set to true, triggers a hipchat bell as if it were a user. Default is true. +``rocket_chat_emoji_override``: By default ElastAlert will use the :ghost: emoji when posting to the channel. You can use a different emoji per +ElastAlert rule. Any Apple emoji can be used, see http://emojipedia.org/apple/ . -``hipchat_from``: When humans report to hipchat, a timestamp appears next to their name. For bots, the name is the name of the token. The from, instead of a timestamp, defaults to empty unless set, which you can do here. This is optional. +``rocket_chat_msg_color``: By default the alert will be posted with the ‘danger’ color. You can also use ‘good’ or ‘warning’ colors. -``hipchat_message_format``: Determines how the message is treated by HipChat and rendered inside HipChat applications -html - Message is rendered as HTML and receives no special treatment. Must be valid HTML and entities must be escaped (e.g.: '&' instead of '&'). May contain basic tags: a, b, i, strong, em, br, img, pre, code, lists, tables. -text - Message is treated just like a message sent by a user. Can include @mentions, emoticons, pastes, and auto-detected URLs (Twitter, YouTube, images, etc). -Valid values: html, text. -Defaults to 'html'. +``rocket_chat_text_string``: Notification message you want to add. -``hipchat_mentions``: When using a ``html`` message format, it's not possible to mentions specific users using the ``@user`` syntax. -In that case, you can set ``hipchat_mentions`` to a list of users which will be first mentioned using a single text message, then the normal ElastAlert message will be sent to Hipchat. -If set, it will mention the users, no matter if the original message format is set to HTML or text. -Valid values: list of strings. -Defaults to ``[]``. +``rocket_chat_proxy``: By default ElastAlert will not use a network proxy to send notifications to Rocket.Chat. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. +``rocket_chat_ca_certs``: Set this option to ``True`` if you want to validate the SSL certificate. -Stride -~~~~~~~ +``rocket_chat_ignore_ssl_errors``: By default ElastAlert will verify SSL certificate. Set this option to ``False`` if you want to ignore SSL errors. -Stride alerter will send a notification to a predefined Stride room. The body of the notification is formatted the same as with other alerters. -Simple HTML such as and tags will be parsed into a format that Stride can consume. +``rocket_chat_timeout``: You can specify a timeout value, in seconds, for making communicating with Rocket.Chat. The default is 10. If a timeout occurs, the alert will be retried next time ElastAlert cycles. -The alerter requires the following two options: +``rocket_chat_attach_kibana_discover_url``: Enables the attachment of the ``kibana_discover_url`` to the Rocket.Chat notification. The config ``generate_kibana_discover_url`` must also be ``True`` in order to generate the url. Defaults to ``False``. -``stride_access_token``: The randomly generated notification token created by Stride. +``rocket_chat_kibana_discover_color``: The color of the Kibana Discover url attachment. Defaults to ``#ec4b98``. -``stride_cloud_id``: The site_id associated with the Stride site you want to send the alert to. +``rocket_chat_kibana_discover_title``: The title of the Kibana Discover url attachment. Defaults to ``Discover in Kibana``. -``stride_conversation_id``: The conversation_id associated with the Stride conversation you want to send the alert to. +Example rocket_chat_attach_kibana_discover_url, rocket_chat_kibana_discover_color, rocket_chat_kibana_discover_title:: -``stride_ignore_ssl_errors``: Ignore TLS errors (self-signed certificates, etc.). Default is false. + # (Required) + generate_kibana_discover_url: True + kibana_discover_app_url: "http://localhost:5601/app/discover#/" + kibana_discover_index_pattern_id: "4babf380-c3b1-11eb-b616-1b59c2feec54" + kibana_discover_version: "7.15" -``stride_proxy``: By default ElastAlert will not use a network proxy to send notifications to Stride. Set this option using ``hostname:port`` if you need to use a proxy. + # (Optional) + kibana_discover_from_timedelta: + minutes: 10 + kibana_discover_to_timedelta: + minutes: 10 + # (Required) + rocket_chat_attach_kibana_discover_url: True -MS Teams -~~~~~~~~ + # (Optional) + rocket_chat_kibana_discover_color: "#ec4b98" + rocket_chat_kibana_discover_title: "Discover in Kibana" + +``rocket_chat_alert_fields``: You can add additional fields to your Rocket.Chat alerts using this field. Specify the title using `title` and a value for the field using `value`. Additionally you can specify whether or not this field should be a `short` field using `short: true`. + +Example rocket_chat_alert_fields:: + + rocket_chat_alert_fields: + - title: Host + value: monitor.host + short: true + - title: Status + value: monitor.status + short: true + - title: Zone + value: beat.name + short: true + +Squadcast +~~~~~~~~~ + +Alerts can be sent to Squadcast using the `http post` method described above and Squadcast will process it and send Phone, SMS, Email and Push notifications to the relevant person(s) and let them take actions. -MS Teams alerter will send a notification to a predefined Microsoft Teams channel. +Configuration variables in rules YAML file:: + + alert: post + http_post_url: + http_post_static_payload: + Title: + http_post_all_values: true + +For more details, you can refer the `Squadcast documentation `_. + + +Microsoft Teams +~~~~~~~~~~~~~~~ + +Microsoft Teams alerter will send a notification to a predefined Microsoft Teams channel. The alerter requires the following options: @@ -1788,7 +1947,11 @@ Provide absolute address of the pciture, for example: http://some.address.com/im ``slack_title_link``: You can add a link in your Slack notification by setting this to a valid URL. Requires slack_title to be set. -``slack_timeout``: You can specify a timeout value, in seconds, for making communicating with Slac. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. +``slack_footer``: Add a static footer text for alert. Defaults to "". + +``slack_footer_icon``: A Public Url for a footer icon. Defaults to "". + +``slack_timeout``: You can specify a timeout value, in seconds, for making communicating with Slack. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. ``slack_attach_kibana_discover_url``: Enables the attachment of the ``kibana_discover_url`` to the slack notification. The config ``generate_kibana_discover_url`` must also be ``True`` in order to generate the url. Defaults to ``False``. @@ -1796,6 +1959,10 @@ Provide absolute address of the pciture, for example: http://some.address.com/im ``slack_kibana_discover_title``: The title of the Kibana Discover url attachment. Defaults to ``Discover in Kibana``. +``slack_ca_certs``: Set this option to ``True`` if you want to validate the SSL certificate. + +``slack_ignore_ssl_errors``: By default ElastAlert will verify SSL certificate. Set this option to ``False`` if you want to ignore SSL errors. + Mattermost ~~~~~~~~~~ @@ -1832,7 +1999,7 @@ Telegram alerter will send a notification to a predefined Telegram username or c The alerter requires the following two options: -``telegram_bot_token``: The token is a string along the lines of ``110201543:AAHdqTcvCH1vGWJxfSeofSAs0K5PALDsaw`` that will be required to authorize the bot and send requests to the Bot API. You can learn about obtaining tokens and generating new ones in this document https://core.telegram.org/bots#botfather +``telegram_bot_token``: The token is a string along the lines of ``110201543:AAHdqTcvCH1vGWJxfSeofSAs0K5PALDsaw`` that will be required to authorize the bot and send requests to the Bot API. You can learn about obtaining tokens and generating new ones in this document https://core.telegram.org/bots#6-botfather ``telegram_room_id``: Unique identifier for the target chat or username of the target channel using telegram chat_id (in the format "-xxxxxxxx") @@ -1842,6 +2009,10 @@ Optional: ``telegram_proxy``: By default ElastAlert will not use a network proxy to send notifications to Telegram. Set this option using ``hostname:port`` if you need to use a proxy. +``telegram_proxy_login``: The Telegram proxy auth username. + +``telegram_proxy_pass``: The Telegram proxy auth password. + GoogleChat ~~~~~~~~~~ GoogleChat alerter will send a notification to a predefined GoogleChat channel. The body of the notification is formatted the same as with other alerters. @@ -1893,7 +2064,7 @@ V2 API Options (Optional): These options are specific to the PagerDuty V2 API -See https://v2.developer.pagerduty.com/docs/send-an-event-events-api-v2 +See https://developer.pagerduty.com/docs/events-api-v2/trigger-events/ ``pagerduty_api_version``: Defaults to `v1`. Set to `v2` to enable the PagerDuty V2 Event API. @@ -1915,6 +2086,11 @@ See https://v2.developer.pagerduty.com/docs/send-an-event-events-api-v2 ``pagerduty_v2_payload_source_args``: If set, and ``pagerduty_v2_payload_source`` is a formattable string, Elastalert will format the source based on the provided array of fields from the rule or match. +``pagerduty_v2_payload_custom_details``: List of keys:values to use as the content of the custom_details payload. Example - ip:clientip will map the value from the clientip index of Elasticsearch to JSON key named ip. + +``pagerduty_v2_payload_include_all_info``: If True, this will include the entire Elasticsearch document as a custom detail field called "information" in the PagerDuty alert. + + PagerTree ~~~~~~~~~ @@ -1924,6 +2100,8 @@ The alerter requires the following options: ``pagertree_integration_url``: URL generated by PagerTree for the integration. +``pagertree_proxy``: By default ElastAlert will not use a network proxy to send notifications to PagerTree. Set this option using ``hostname:port`` if you need to use a proxy. only supports https. + Exotel ~~~~~~ @@ -1935,7 +2113,7 @@ The alerter requires the following option: ``exotel_auth_token``: Auth token assosiated with your Exotel account. -If you don't know how to find your accound sid and auth token, refer - http://support.exotel.in/support/solutions/articles/3000023019-how-to-find-my-exotel-token-and-exotel-sid- +If you don't know how to find your accound sid and auth token, refer - https://support.exotel.com/support/solutions/articles/3000023019-how-to-find-my-exotel-token-and-exotel-sid ``exotel_to_number``: The phone number where you would like send the notification. @@ -1949,39 +2127,70 @@ The alerter has one optional argument: Twilio ~~~~~~ -Twilio alerter will trigger an incident to a mobile phone as sms from your twilio phone number. Alert name will arrive as sms once this option is chosen. +The Twilio alerter will send an alert to a mobile phone as an SMS from your Twilio +phone number. The SMS will contain the alert name. You may use either Twilio SMS +or Twilio Copilot to send the message, controlled by the ``twilio_use_copilot`` +option. -The alerter requires the following option: +Note that when Twilio Copilot *is* used the ``twilio_message_service_sid`` +option is required. Likewise, when *not* using Twilio Copilot, the +``twilio_from_number`` option is required. + +The alerter requires the following options: -``twilio_account_sid``: This is sid of your twilio account. +``twilio_account_sid``: The SID of your Twilio account. -``twilio_auth_token``: Auth token assosiated with your twilio account. +``twilio_auth_token``: Auth token associated with your Twilio account. -``twilio_to_number``: The phone number where you would like send the notification. +``twilio_to_number``: The phone number where you would like to send the alert. -``twilio_from_number``: Your twilio phone number from which message will be sent. +Either one of + * ``twilio_from_number``: The Twilio phone number from which the alert will be sent. + * ``twilio_message_service_sid``: The SID of your Twilio message service. +Optional: -VictorOps -~~~~~~~~~ +``twilio_use_copilot``: Whether or not to use Twilio Copilot, False by default. -VictorOps alerter will trigger an incident to a predefined VictorOps routing key. The body of the notification is formatted the same as with other alerters. +Example with Copilot usage:: + + alert: + - "twilio" + twilio_use_copilot: True + twilio_to_number: "0123456789" + twilio_auth_token: "abcdefghijklmnopqrstuvwxyz012345" + twilio_account_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" + twilio_message_service_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" + +Example with SMS usage:: + + alert: + - "twilio" + twilio_to_number: "0123456789" + twilio_from_number: "9876543210" + twilio_auth_token: "abcdefghijklmnopqrstuvwxyz012345" + twilio_account_sid: "ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567" + +Splunk On-Call (Formerly VictorOps) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Splunk On-Call (Formerly VictorOps) alerter will trigger an incident to a predefined Splunk On-Call (Formerly VictorOps) routing key. The body of the notification is formatted the same as with other alerters. The alerter requires the following options: ``victorops_api_key``: API key generated under the 'REST Endpoint' in the Integrations settings. -``victorops_routing_key``: VictorOps routing key to route the alert to. +``victorops_routing_key``: Splunk On-Call (Formerly VictorOps) routing key to route the alert to. -``victorops_message_type``: VictorOps field to specify severity level. Must be one of the following: INFO, WARNING, ACKNOWLEDGEMENT, CRITICAL, RECOVERY +``victorops_message_type``: Splunk On-Call (Formerly VictorOps) field to specify severity level. Must be one of the following: INFO, WARNING, ACKNOWLEDGEMENT, CRITICAL, RECOVERY Optional: -``victorops_entity_id``: The identity of the incident used by VictorOps to correlate incidents throughout the alert lifecycle. If not defined, VictorOps will assign a random string to each alert. +``victorops_entity_id``: The identity of the incident used by Splunk On-Call (Formerly VictorOps) to correlate incidents throughout the alert lifecycle. If not defined, Splunk On-Call (Formerly VictorOps) will assign a random string to each alert. ``victorops_entity_display_name``: Human-readable name of alerting entity to summarize incidents without affecting the life-cycle workflow. -``victorops_proxy``: By default ElastAlert will not use a network proxy to send notifications to VictorOps. Set this option using ``hostname:port`` if you need to use a proxy. +``victorops_proxy``: By default ElastAlert will not use a network proxy to send notifications to Splunk On-Call (Formerly VictorOps). Set this option using ``hostname:port`` if you need to use a proxy. Gitter ~~~~~~ @@ -2006,7 +2215,7 @@ The ServiceNow alerter will create a ne Incident in ServiceNow. The body of the The alerter requires the following options: -``servicenow_rest_url``: The ServiceNow RestApi url, this will look like https://instancename.service-now.com/api/now/v1/table/incident +``servicenow_rest_url``: The ServiceNow RestApi url, this will look like https://developer.servicenow.com/dev.do#!/reference/api/orlando/rest/c_TableAPI#r_TableAPI-POST ``username``: The ServiceNow Username to access the api. @@ -2043,12 +2252,20 @@ Stomp This alert type will use the STOMP protocol in order to push a message to a broker like ActiveMQ or RabbitMQ. The message body is a JSON string containing the alert details. The default values will work with a pristine ActiveMQ installation. -Optional: +The alerter requires the following option: ``stomp_hostname``: The STOMP host to use, defaults to localhost. + ``stomp_hostport``: The STOMP port to use, defaults to 61613. + ``stomp_login``: The STOMP login to use, defaults to admin. + ``stomp_password``: The STOMP password to use, defaults to admin. + +Optional: + +``stomp_ssl``: Connect the STOMP host using TLS, defaults to ``False``. + ``stomp_destination``: The STOMP destination to use, defaults to /queue/ALERT The stomp_destination field depends on the broker, the /queue/ALERT example is the nomenclature used by ActiveMQ. Each broker has its own logic. @@ -2057,7 +2274,7 @@ Alerta ~~~~~~ Alerta alerter will post an alert in the Alerta server instance through the alert API endpoint. -See http://alerta.readthedocs.io/en/latest/api/alert.html for more details on the Alerta JSON format. +See https://docs.alerta.io/en/latest/api/alert.html for more details on the Alerta JSON format. For Alerta 5.0 @@ -2073,6 +2290,8 @@ Optional: ``alerta_use_match_timestamp``: If true, it will use the timestamp of the first match as the ``createTime`` of the alert. otherwise, the current server time is used. +``alerta_api_skip_ssl``: Defaults to False. + ``alert_missing_value``: Text to replace any match field not found when formating strings. Defaults to ````. The following options dictate the values of the API JSON payload: @@ -2153,6 +2372,10 @@ Optional: ``http_post_timeout``: The timeout value, in seconds, for making the post. The default is 10. If a timeout occurs, the alert will be retried next time elastalert cycles. +``http_post_ca_certs``: Set this option to ``True`` if you want to validate the SSL certificate. + +``http_post_ignore_ssl_errors``: By default ElastAlert will verify SSL certificate. Set this option to ``False`` if you want to ignore SSL errors. + Example usage:: alert: post @@ -2201,6 +2424,8 @@ Optional: ``hive_proxies``: Proxy configuration. +``hive_verify``: Whether or not to enable SSL certificate validation. Defaults to False. + ``hive_observable_data_mapping``: If needed, matched data fields can be mapped to TheHive observable types using python string formatting. Example usage:: @@ -2240,6 +2465,9 @@ Zabbix will send notification to a Zabbix server. The item in the host specified Required: ``zbx_sender_host``: The address where zabbix server is running. + ``zbx_sender_port``: The port where zabbix server is listenning. + ``zbx_host``: This field setup the host in zabbix that receives the value sent by Elastalert. -``zbx_item``: This field setup the item in the host that receives the value sent by Elastalert. + +``zbx_key``: This field setup the key in the host that receives the value sent by Elastalert. diff --git a/elastalert/alerts.py b/elastalert/alerts.py index f2f31853f..62cbdd007 100644 --- a/elastalert/alerts.py +++ b/elastalert/alerts.py @@ -2,7 +2,6 @@ import copy import datetime import json -import logging import os import re import subprocess @@ -12,7 +11,6 @@ import warnings from email.mime.text import MIMEText from email.utils import formatdate -from html.parser import HTMLParser from smtplib import SMTP from smtplib import SMTP_SSL from smtplib import SMTPAuthenticationError @@ -371,7 +369,6 @@ def alert(self, matches): conn = stomp.Connection([(self.stomp_hostname, self.stomp_hostport)], use_ssl=self.stomp_ssl) - conn.start() conn.connect(self.stomp_login, self.stomp_password) # Ensures that the CONNECTED frame is received otherwise, the disconnect call will fail. time.sleep(1) @@ -586,7 +583,7 @@ def __init__(self, rule): msg = '%s Both have common statuses of (%s). As such, no tickets will ever be found.' % ( msg, ','.join(intersection)) msg += ' This should be simplified to use only one or the other.' - logging.warning(msg) + elastalert_logger.warning(msg) self.reset_jira_args() @@ -606,7 +603,7 @@ def set_priority(self): if self.priority is not None and self.client is not None: self.jira_args['priority'] = {'id': self.priority_ids[self.priority]} except KeyError: - logging.error("Priority %s not found. Valid priorities are %s" % (self.priority, list(self.priority_ids.keys()))) + elastalert_logger.error("Priority %s not found. Valid priorities are %s" % (self.priority, list(self.priority_ids.keys()))) def reset_jira_args(self): self.jira_args = {'project': {'key': self.project}, @@ -749,7 +746,7 @@ def find_existing_ticket(self, matches): try: issues = self.client.search_issues(jql) except JIRAError as e: - logging.exception("Error while searching for JIRA ticket using jql '%s': %s" % (jql, e)) + elastalert_logger.exception("Error while searching for JIRA ticket using jql '%s': %s" % (jql, e)) return None if len(issues): @@ -792,19 +789,19 @@ def alert(self, matches): try: self.comment_on_ticket(ticket, match) except JIRAError as e: - logging.exception("Error while commenting on ticket %s: %s" % (ticket, e)) + elastalert_logger.exception("Error while commenting on ticket %s: %s" % (ticket, e)) if self.labels: for label in self.labels: try: ticket.fields.labels.append(label) except JIRAError as e: - logging.exception("Error while appending labels to ticket %s: %s" % (ticket, e)) + elastalert_logger.exception("Error while appending labels to ticket %s: %s" % (ticket, e)) if self.transition: elastalert_logger.info('Transitioning existing ticket %s' % (ticket.key)) try: self.transition_ticket(ticket) except JIRAError as e: - logging.exception("Error while transitioning ticket %s: %s" % (ticket, e)) + elastalert_logger.exception("Error while transitioning ticket %s: %s" % (ticket, e)) if self.pipeline is not None: self.pipeline['jira_ticket'] = ticket @@ -895,13 +892,9 @@ def __init__(self, *args): if isinstance(self.rule['command'], str): self.shell = True if '%' in self.rule['command']: - logging.warning('Warning! You could be vulnerable to shell injection!') + elastalert_logger.warning('Warning! You could be vulnerable to shell injection!') self.rule['command'] = [self.rule['command']] - self.new_style_string_format = False - if 'new_style_string_format' in self.rule and self.rule['new_style_string_format']: - self.new_style_string_format = True - def alert(self, matches): # Format the command and arguments try: @@ -965,92 +958,6 @@ def alert(self, matches): elastalert_logger.info("Sent sns notification to %s" % (self.sns_topic_arn)) -class HipChatAlerter(Alerter): - """ Creates a HipChat room notification for each alert """ - required_options = frozenset(['hipchat_auth_token', 'hipchat_room_id']) - - def __init__(self, rule): - super(HipChatAlerter, self).__init__(rule) - self.hipchat_msg_color = self.rule.get('hipchat_msg_color', 'red') - self.hipchat_message_format = self.rule.get('hipchat_message_format', 'html') - self.hipchat_auth_token = self.rule['hipchat_auth_token'] - self.hipchat_room_id = self.rule['hipchat_room_id'] - self.hipchat_domain = self.rule.get('hipchat_domain', 'api.hipchat.com') - self.hipchat_ignore_ssl_errors = self.rule.get('hipchat_ignore_ssl_errors', False) - self.hipchat_notify = self.rule.get('hipchat_notify', True) - self.hipchat_from = self.rule.get('hipchat_from', '') - self.url = 'https://%s/v2/room/%s/notification?auth_token=%s' % ( - self.hipchat_domain, self.hipchat_room_id, self.hipchat_auth_token) - self.hipchat_proxy = self.rule.get('hipchat_proxy', None) - - def create_alert_body(self, matches): - body = super(HipChatAlerter, self).create_alert_body(matches) - - # HipChat sends 400 bad request on messages longer than 10000 characters - if self.hipchat_message_format == 'html': - # Use appropriate line ending for text/html - br = '
' - body = body.replace('\n', br) - - truncated_message = '
...(truncated)' - truncate_to = 10000 - len(truncated_message) - else: - truncated_message = '..(truncated)' - truncate_to = 10000 - len(truncated_message) - - if (len(body) > 9999): - body = body[:truncate_to] + truncated_message - - return body - - def alert(self, matches): - body = self.create_alert_body(matches) - - # Post to HipChat - headers = {'content-type': 'application/json'} - # set https proxy, if it was provided - proxies = {'https': self.hipchat_proxy} if self.hipchat_proxy else None - payload = { - 'color': self.hipchat_msg_color, - 'message': body, - 'message_format': self.hipchat_message_format, - 'notify': self.hipchat_notify, - 'from': self.hipchat_from - } - - try: - if self.hipchat_ignore_ssl_errors: - requests.packages.urllib3.disable_warnings() - - if self.rule.get('hipchat_mentions', []): - ping_users = self.rule.get('hipchat_mentions', []) - ping_msg = payload.copy() - ping_msg['message'] = "ping {}".format( - ", ".join("@{}".format(user) for user in ping_users) - ) - ping_msg['message_format'] = "text" - - response = requests.post( - self.url, - data=json.dumps(ping_msg, cls=DateTimeEncoder), - headers=headers, - verify=not self.hipchat_ignore_ssl_errors, - proxies=proxies) - - response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, - verify=not self.hipchat_ignore_ssl_errors, - proxies=proxies) - warnings.resetwarnings() - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to HipChat: %s" % e) - elastalert_logger.info("Alert sent to HipChat room %s" % self.hipchat_room_id) - - def get_info(self): - return {'type': 'hipchat', - 'hipchat_room_id': self.hipchat_room_id} - - class MsTeamsAlerter(Alerter): """ Creates a Microsoft Teams Conversation Message for each alert """ required_options = frozenset(['ms_teams_webhook_url', 'ms_teams_alert_summary']) @@ -1130,6 +1037,8 @@ def __init__(self, rule): self.slack_attach_kibana_discover_url = self.rule.get('slack_attach_kibana_discover_url', False) self.slack_kibana_discover_color = self.rule.get('slack_kibana_discover_color', '#ec4b98') self.slack_kibana_discover_title = self.rule.get('slack_kibana_discover_title', 'Discover in Kibana') + self.slack_footer = self.rule.get('slack_footer', '') + self.slack_footer_icon = self.rule.get('slack_footer_icon', '') def format_body(self, body): # https://api.slack.com/docs/formatting @@ -1192,6 +1101,12 @@ def alert(self, matches): if self.slack_title_link != '': payload['attachments'][0]['title_link'] = self.slack_title_link + if self.slack_footer != '': + payload['attachments'][0]['footer'] = self.slack_footer + + if self.slack_footer_icon != '': + payload['attachments'][0]['footer_icon'] = self.slack_footer_icon + if self.slack_attach_kibana_discover_url: kibana_discover_url = lookup_es_key(matches[0], 'kibana_discover_url') if kibana_discover_url: @@ -1207,7 +1122,7 @@ def alert(self, matches): if self.slack_ca_certs: verify = self.slack_ca_certs else: - verify = self.slack_ignore_ssl_errors + verify = not self.slack_ignore_ssl_errors if self.slack_ignore_ssl_errors: requests.packages.urllib3.disable_warnings() payload['channel'] = channel_override @@ -1359,6 +1274,8 @@ def __init__(self, rule): self.pagerduty_v2_payload_severity = self.rule.get('pagerduty_v2_payload_severity', 'critical') self.pagerduty_v2_payload_source = self.rule.get('pagerduty_v2_payload_source', 'ElastAlert') self.pagerduty_v2_payload_source_args = self.rule.get('pagerduty_v2_payload_source_args', None) + self.pagerduty_v2_payload_custom_details = self.rule.get('pagerduty_v2_payload_custom_details', {}) + self.pagerduty_v2_payload_include_all_info = self.rule.get('pagerduty_v2_payload_include_all_info', True) if self.pagerduty_api_version == 'v2': self.url = 'https://events.pagerduty.com/v2/enqueue' @@ -1371,6 +1288,13 @@ def alert(self, matches): # post to pagerduty headers = {'content-type': 'application/json'} if self.pagerduty_api_version == 'v2': + + custom_details_payload = {'information': body} if self.pagerduty_v2_payload_include_all_info else {} + if self.pagerduty_v2_payload_custom_details: + for match in matches: + for custom_details_key, es_key in list(self.pagerduty_v2_payload_custom_details.items()): + custom_details_payload[custom_details_key] = lookup_es_key(match, es_key) + payload = { 'routing_key': self.pagerduty_service_key, 'event_action': self.pagerduty_event_type, @@ -1391,9 +1315,7 @@ def alert(self, matches): self.pagerduty_v2_payload_source_args, matches), 'summary': self.create_title(matches), - 'custom_details': { - 'information': body, - }, + 'custom_details': custom_details_payload, }, } match_timestamp = lookup_es_key(matches[0], self.rule.get('timestamp_field', '@timestamp')) @@ -1416,7 +1338,7 @@ def alert(self, matches): try: response = requests.post( self.url, - data=json.dumps(payload, cls=DateTimeEncoder, ensure_ascii=False), + data=json.dumps(payload, cls=DateTimeEncoder, ensure_ascii=False).encode("utf-8"), headers=headers, proxies=proxies ) @@ -1531,23 +1453,35 @@ def get_info(self): class TwilioAlerter(Alerter): - required_options = frozenset(['twilio_account_sid', 'twilio_auth_token', 'twilio_to_number', 'twilio_from_number']) + required_options = frozenset(['twilio_account_sid', 'twilio_auth_token', 'twilio_to_number']) def __init__(self, rule): super(TwilioAlerter, self).__init__(rule) - self.twilio_account_sid = self.rule['twilio_account_sid'] - self.twilio_auth_token = self.rule['twilio_auth_token'] - self.twilio_to_number = self.rule['twilio_to_number'] - self.twilio_from_number = self.rule['twilio_from_number'] + self.twilio_account_sid = self.rule.get('twilio_account_sid', None) + self.twilio_auth_token = self.rule.get('twilio_auth_token', None) + self.twilio_to_number = self.rule.get('twilio_to_number', None) + self.twilio_from_number = self.rule.get('twilio_from_number', None) + self.twilio_message_service_sid = self.rule.get('twilio_message_service_sid', None) + self.twilio_use_copilot = self.rule.get('twilio_use_copilot', False) def alert(self, matches): client = TwilioClient(self.twilio_account_sid, self.twilio_auth_token) try: - client.messages.create(body=self.rule['name'], - to=self.twilio_to_number, - from_=self.twilio_from_number) + if self.twilio_use_copilot: + if self.twilio_message_service_sid is None: + raise EAException("Twilio Copilot requires the 'twilio_message_service_sid' option") + + client.messages.create(body=self.rule['name'], + to=self.twilio_to_number, + messaging_service_sid=self.twilio_message_service_sid) + else: + if self.twilio_from_number is None: + raise EAException("Twilio SMS requires the 'twilio_from_number' option") + client.messages.create(body=self.rule['name'], + to=self.twilio_to_number, + from_=self.twilio_from_number) except TwilioRestException as e: raise EAException("Error posting to twilio: %s" % e) @@ -1959,6 +1893,8 @@ def __init__(self, rule): self.post_all_values = self.rule.get('http_post_all_values', not self.post_payload) self.post_http_headers = self.rule.get('http_post_headers', {}) self.timeout = self.rule.get('http_post_timeout', 10) + self.post_ca_certs = self.rule.get('http_post_ca_certs') + self.post_ignore_ssl_errors = self.rule.get('http_post_ignore_ssl_errors', False) def alert(self, matches): """ Each match will trigger a POST to the specified endpoint(s). """ @@ -1971,12 +1907,20 @@ def alert(self, matches): "Content-Type": "application/json", "Accept": "application/json;charset=utf-8" } + if self.post_ca_certs: + verify = self.post_ca_certs + else: + verify = not self.post_ignore_ssl_errors + if self.post_ignore_ssl_errors: + requests.packages.urllib3.disable_warnings() + headers.update(self.post_http_headers) proxies = {'https': self.post_proxy} if self.post_proxy else None for url in self.post_url: try: response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder), - headers=headers, proxies=proxies, timeout=self.timeout) + headers=headers, proxies=proxies, timeout=self.timeout, + verify=verify) response.raise_for_status() except RequestException as e: raise EAException("Error posting HTTP Post alert: %s" % e) @@ -1987,99 +1931,6 @@ def get_info(self): 'http_post_webhook_url': self.post_url} -class StrideHTMLParser(HTMLParser): - """Parse html into stride's fabric structure""" - - def __init__(self): - """ - Define a couple markup place holders. - """ - self.content = [] - self.mark = None - HTMLParser.__init__(self) - - def handle_starttag(self, tag, attrs): - """Identify and verify starting tag is fabric compatible.""" - if tag == 'b' or tag == 'strong': - self.mark = dict(type='strong') - if tag == 'u': - self.mark = dict(type='underline') - if tag == 'a': - self.mark = dict(type='link', attrs=dict(attrs)) - - def handle_endtag(self, tag): - """Clear mark on endtag.""" - self.mark = None - - def handle_data(self, data): - """Construct data node for our data.""" - node = dict(type='text', text=data) - if self.mark: - node['marks'] = [self.mark] - self.content.append(node) - - -class StrideAlerter(Alerter): - """ Creates a Stride conversation message for each alert """ - - required_options = frozenset( - ['stride_access_token', 'stride_cloud_id', 'stride_conversation_id']) - - def __init__(self, rule): - super(StrideAlerter, self).__init__(rule) - - self.stride_access_token = self.rule['stride_access_token'] - self.stride_cloud_id = self.rule['stride_cloud_id'] - self.stride_conversation_id = self.rule['stride_conversation_id'] - self.stride_ignore_ssl_errors = self.rule.get('stride_ignore_ssl_errors', False) - self.stride_proxy = self.rule.get('stride_proxy', None) - self.url = 'https://api.atlassian.com/site/%s/conversation/%s/message' % ( - self.stride_cloud_id, self.stride_conversation_id) - - def alert(self, matches): - body = self.create_alert_body(matches).strip() - - # parse body with StrideHTMLParser - parser = StrideHTMLParser() - parser.feed(body) - - # Post to Stride - headers = { - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(self.stride_access_token) - } - - # set https proxy, if it was provided - proxies = {'https': self.stride_proxy} if self.stride_proxy else None - - # build stride json payload - # https://developer.atlassian.com/cloud/stride/apis/document/structure/ - payload = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': parser.content} - ]} - ]}} - - try: - if self.stride_ignore_ssl_errors: - requests.packages.urllib3.disable_warnings() - response = requests.post( - self.url, data=json.dumps(payload, cls=DateTimeEncoder), - headers=headers, verify=not self.stride_ignore_ssl_errors, - proxies=proxies) - warnings.resetwarnings() - response.raise_for_status() - except RequestException as e: - raise EAException("Error posting to Stride: %s" % e) - elastalert_logger.info( - "Alert sent to Stride conversation %s" % self.stride_conversation_id) - - def get_info(self): - return {'type': 'stride', - 'stride_cloud_id': self.stride_cloud_id, - 'stride_converstation_id': self.stride_converstation_id} - - class LineNotifyAlerter(Alerter): """ Created a Line Notify for each alert """ required_option = frozenset(["linenotify_access_token"]) @@ -2156,7 +2007,10 @@ def alert(self, matches): n += 1 custom_fields[cf_key] = cf elif isinstance(alert_config_value, str): - alert_config[alert_config_field] = alert_config_value.format(**context) + alert_value = alert_config_value.format(**context) + if alert_config_field in ['severity', 'tlp']: + alert_value = int(alert_value) + alert_config[alert_config_field] = alert_value elif isinstance(alert_config_value, (list, tuple)): formatted_list = [] for element in alert_config_value: @@ -2184,3 +2038,280 @@ def get_info(self): 'type': 'hivealerter', 'hive_host': self.rule.get('hive_connection', {}).get('hive_host', '') } + + +class DiscordAlerter(Alerter): + """ Created a Discord for each alert """ + required_options = frozenset(['discord_webhook_url']) + + def __init__(self, rule): + super(DiscordAlerter, self).__init__(rule) + self.discord_webhook_url = self.rule.get('discord_webhook_url', None) + self.discord_emoji_title = self.rule.get('discord_emoji_title', ':warning:') + self.discord_proxy = self.rule.get('discord_proxy', None) + self.discord_proxy_login = self.rule.get('discord_proxy_login', None) + self.discord_proxy_password = self.rule.get('discord_proxy_password', None) + self.discord_embed_color = self.rule.get('discord_embed_color', 0xffffff) + self.discord_embed_footer = self.rule.get('discord_embed_footer', None) + self.discord_embed_icon_url = self.rule.get('discord_embed_icon_url', None) + + def alert(self, matches): + body = '' + title = u'%s' % (self.create_title(matches)) + for match in matches: + body += str(BasicMatchString(self.rule, match)) + if len(matches) > 1: + body += '\n----------------------------------------\n' + if len(body) > 2047: + body = body[0:1950] + '\n *message was cropped according to discord embed description limits!*' + + proxies = {'https': self.discord_proxy} if self.discord_proxy else None + auth = HTTPProxyAuth(self.discord_proxy_login, self.discord_proxy_password) if self.discord_proxy_login else None + headers = {"Content-Type": "application/json"} + + data = {} + data["content"] = "%s %s %s" % (self.discord_emoji_title, title, self.discord_emoji_title) + data["embeds"] = [] + embed = {} + embed["description"] = "%s" % (body) + embed["color"] = (self.discord_embed_color) + + if self.discord_embed_footer: + embed["footer"] = {} + embed["footer"]["text"] = (self.discord_embed_footer) if self.discord_embed_footer else None + embed["footer"]["icon_url"] = (self.discord_embed_icon_url) if self.discord_embed_icon_url else None + else: + None + + data["embeds"].append(embed) + + try: + response = requests.post(self.discord_webhook_url, data=json.dumps(data), headers=headers, proxies=proxies, auth=auth) + warnings.resetwarnings() + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to Discord: %s. Details: %s" % (e, "" if e.response is None else e.response.text)) + + elastalert_logger.info( + "Alert sent to the webhook %s" % self.discord_webhook_url) + + def get_info(self): + return {'type': 'discord', + 'discord_webhook_url': self.discord_webhook_url} + + +class RocketChatAlerter(Alerter): + """ Creates a RocketChat notification for each alert """ + required_options = set(['rocket_chat_webhook_url']) + + def __init__(self, rule): + super(RocketChatAlerter, self).__init__(rule) + self.rocket_chat_webhook_url = self.rule.get('rocket_chat_webhook_url', None) + if isinstance(self.rocket_chat_webhook_url, str): + self.rocket_chat_webhook_url = [self.rocket_chat_webhook_url] + self.rocket_chat_proxy = self.rule.get('rocket_chat_proxy', None) + + self.rocket_chat_username_override = self.rule.get('rocket_chat_username_override', 'elastalert') + self.rocket_chat_channel_override = self.rule.get('rocket_chat_channel_override', '') + if isinstance(self.rocket_chat_channel_override, str): + self.rocket_chat_channel_override = [self.rocket_chat_channel_override] + self.rocket_chat_emoji_override = self.rule.get('rocket_chat_emoji_override', ':ghost:') + self.rocket_chat_msg_color = self.rule.get('rocket_chat_msg_color', 'danger') + self.rocket_chat_text_string = self.rule.get('rocket_chat_text_string', '') + self.rocket_chat_alert_fields = self.rule.get('rocket_chat_alert_fields', '') + self.rocket_chat_attach_kibana_discover_url = self.rule.get('rocket_chat_attach_kibana_discover_url', False) + self.rocket_chat_kibana_discover_color = self.rule.get('rocket_chat_kibana_discover_color', '#ec4b98') + self.rocket_chat_kibana_discover_title = self.rule.get('rocket_chat_kibana_discover_title', 'Discover in Kibana') + self.rocket_chat_ignore_ssl_errors = self.rule.get('rocket_chat_ignore_ssl_errors', False) + self.rocket_chat_timeout = self.rule.get('rocket_chat_timeout', 10) + self.rocket_chat_ca_certs = self.rule.get('rocket_chat_ca_certs') + + def format_body(self, body): + return body + + def get_aggregation_summary_text__maximum_width(self): + width = super(RocketChatAlerter, self).get_aggregation_summary_text__maximum_width() + + # Reduced maximum width for prettier Slack display. + return min(width, 75) + + def get_aggregation_summary_text(self, matches): + text = super(RocketChatAlerter, self).get_aggregation_summary_text(matches) + if text: + text = '```\n{0}```\n'.format(text) + return text + + def populate_fields(self, matches): + alert_fields = [] + for arg in self.rocket_chat_alert_fields: + arg = copy.copy(arg) + arg['value'] = lookup_es_key(matches[0], arg['value']) + alert_fields.append(arg) + return alert_fields + + def alert(self, matches): + body = self.create_alert_body(matches) + body = self.format_body(body) + headers = {'content-type': 'application/json'} + proxies = {'https': self.rocket_chat_proxy} if self.rocket_chat_proxy else None + payload = { + 'username': self.rocket_chat_username_override, + 'text': self.rocket_chat_text_string, + 'attachments': [ + { + 'color': self.rocket_chat_msg_color, + 'title': self.create_title(matches), + 'text': body, + 'fields': [] + } + ] + } + + # if we have defined fields, populate noteable fields for the alert + if self.rocket_chat_alert_fields != '': + payload['attachments'][0]['fields'] = self.populate_fields(matches) + + if self.rocket_chat_emoji_override != '': + payload['emoji'] = self.rocket_chat_emoji_override + + if self.rocket_chat_attach_kibana_discover_url: + kibana_discover_url = lookup_es_key(matches[0], 'kibana_discover_url') + if kibana_discover_url: + payload['attachments'].append({ + 'color': self.rocket_chat_kibana_discover_color, + 'title': self.rocket_chat_kibana_discover_title, + 'title_link': kibana_discover_url + }) + + for url in self.rocket_chat_webhook_url: + for channel_override in self.rocket_chat_channel_override: + try: + if self.rocket_chat_ca_certs: + verify = self.rocket_chat_ca_certs + else: + verify = not self.rocket_chat_ignore_ssl_errors + if self.rocket_chat_ignore_ssl_errors: + requests.packages.urllib3.disable_warnings() + payload['channel'] = channel_override + response = requests.post( + url, data=json.dumps(payload, cls=DateTimeEncoder), + headers=headers, + verify=verify, + proxies=proxies, + timeout=self.rocket_chat_timeout) + warnings.resetwarnings() + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to Rocket.Chat: %s" % e) + elastalert_logger.info("Alert sent to Rocket.Chat") + + def get_info(self): + return {'type': 'rocketchat', + 'rocket_chat_username_override': self.rocket_chat_username_override, + 'rocket_chat_webhook_url': self.rocket_chat_webhook_url} + + +class DatadogAlerter(Alerter): + """ Creates a Datadog Event for each alert """ + required_options = frozenset(['datadog_api_key', 'datadog_app_key']) + + def __init__(self, rule): + super(DatadogAlerter, self).__init__(rule) + self.dd_api_key = self.rule.get('datadog_api_key', None) + self.dd_app_key = self.rule.get('datadog_app_key', None) + + def alert(self, matches): + url = 'https://api.datadoghq.com/api/v1/events' + headers = { + 'Content-Type': 'application/json', + 'DD-API-KEY': self.dd_api_key, + 'DD-APPLICATION-KEY': self.dd_app_key + } + payload = { + 'title': self.create_title(matches), + 'text': self.create_alert_body(matches) + } + try: + response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers) + response.raise_for_status() + except RequestException as e: + raise EAException('Error posting event to Datadog: %s' % e) + elastalert_logger.info('Alert sent to Datadog') + + def get_info(self): + return {'type': 'datadog'} + + +class AlertmanagerAlerter(Alerter): + """ Sends an alert to Alertmanager """ + + required_options = frozenset({'alertmanager_hosts'}) + + def __init__(self, rule): + super(AlertmanagerAlerter, self).__init__(rule) + self.api_version = self.rule.get('alertmanager_api_version', 'v1') + self.hosts = self.rule.get('alertmanager_hosts') + self.alertname = self.rule.get('alertmanager_alertname', self.rule.get('name')) + self.labels = self.rule.get('alertmanager_labels', dict()) + self.annotations = self.rule.get('alertmanager_annotations', dict()) + self.fields = self.rule.get('alertmanager_fields', dict()) + self.title_labelname = self.rule.get('alertmanager_alert_subject_labelname', 'summary') + self.body_labelname = self.rule.get('alertmanager_alert_text_labelname', 'description') + self.proxies = self.rule.get('alertmanager_proxy', None) + self.ca_certs = self.rule.get('alertmanager_ca_certs') + self.ignore_ssl_errors = self.rule.get('alertmanager_ignore_ssl_errors', False) + self.timeout = self.rule.get('alertmanager_timeout', 10) + + @staticmethod + def _json_or_string(obj): + """helper to encode non-string objects to JSON""" + if isinstance(obj, str): + return obj + return json.dumps(obj, cls=DateTimeEncoder) + + def alert(self, matches): + headers = {'content-type': 'application/json'} + proxies = {'https': self.proxies} if self.proxies else None + + self.labels.update({ + label: self._json_or_string(lookup_es_key(matches[0], term)) + for label, term in self.fields.items()}) + self.labels.update( + alertname=self.alertname, + elastalert_rule=self.rule.get('name')) + self.annotations.update({ + self.title_labelname: self.create_title(matches), + self.body_labelname: self.create_alert_body(matches)}) + payload = { + 'annotations': self.annotations, + 'labels': self.labels + } + + for host in self.hosts: + try: + url = '{}/api/{}/alerts'.format(host, self.api_version) + + if self.ca_certs: + verify = self.ca_certs + else: + verify = not self.ignore_ssl_errors + if self.ignore_ssl_errors: + requests.packages.urllib3.disable_warnings() + + response = requests.post( + url, + data=json.dumps([payload], cls=DateTimeEncoder), + headers=headers, + verify=verify, + proxies=proxies, + timeout=self.timeout + ) + + warnings.resetwarnings() + response.raise_for_status() + except RequestException as e: + raise EAException("Error posting to Alertmanager: %s" % e) + elastalert_logger.info("Alert sent to Alertmanager") + + def get_info(self): + return {'type': 'alertmanager'} diff --git a/elastalert/elastalert.py b/elastalert/elastalert.py index b078c86db..f383bd76f 100755 --- a/elastalert/elastalert.py +++ b/elastalert/elastalert.py @@ -159,6 +159,7 @@ def __init__(self, args): self.starttime = self.args.start self.disabled_rules = [] self.replace_dots_in_field_names = self.conf.get('replace_dots_in_field_names', False) + self.thread_data.alerts_sent = 0 self.thread_data.num_hits = 0 self.thread_data.num_dupes = 0 self.scheduler = BackgroundScheduler() @@ -170,7 +171,10 @@ def __init__(self, args): remove = [] for rule in self.rules: - if not self.init_rule(rule): + if 'is_enabled' in rule and not rule['is_enabled']: + self.disabled_rules.append(rule) + remove.append(rule) + elif not self.init_rule(rule): remove.append(rule) list(map(self.rules.remove, remove)) @@ -401,7 +405,7 @@ def get_hits(self, rule, starttime, endtime, index, scroll=False): # Different versions of ES have this formatted in different ways. Fallback to str-ing the whole thing raise ElasticsearchException(str(res['_shards']['failures'])) - logging.debug(str(res)) + elastalert_logger.debug(str(res)) except ElasticsearchException as e: # Elasticsearch sometimes gives us GIGANTIC error messages # (so big that they will fill the entire terminal buffer) @@ -844,7 +848,7 @@ def enhance_filter(self, rule): filters.append(query_str_filter) else: filters.append({'query': query_str_filter}) - logging.debug("Enhanced filter with {} terms: {}".format(listname, str(query_str_filter))) + elastalert_logger.debug("Enhanced filter with {} terms: {}".format(listname, str(query_str_filter))) def run_rule(self, rule, endtime, starttime=None): """ Run a rule for a given time period, including querying and alerting on results. @@ -871,15 +875,16 @@ def run_rule(self, rule, endtime, starttime=None): rule['original_starttime'] = rule['starttime'] rule['scrolling_cycle'] = 0 + self.thread_data.num_hits = 0 + self.thread_data.num_dupes = 0 + self.thread_data.cumulative_hits = 0 + # Don't run if starttime was set to the future if ts_now() <= rule['starttime']: - logging.warning("Attempted to use query start time in the future (%s), sleeping instead" % (starttime)) + elastalert_logger.warning("Attempted to use query start time in the future (%s), sleeping instead" % (starttime)) return 0 # Run the rule. If querying over a large time period, split it up into segments - self.thread_data.num_hits = 0 - self.thread_data.num_dupes = 0 - self.thread_data.cumulative_hits = 0 segment_size = self.get_segment_size(rule) tmp_endtime = rule['starttime'] @@ -968,7 +973,7 @@ def run_rule(self, rule, endtime, starttime=None): def init_rule(self, new_rule, new=True): ''' Copies some necessary non-config state from an exiting rule to a new rule. ''' - if not new: + if not new and self.scheduler.get_job(job_id=new_rule['name']): self.scheduler.remove_job(job_id=new_rule['name']) try: @@ -1082,12 +1087,21 @@ def load_rule_changes(self): try: new_rule = self.rules_loader.load_configuration(rule_file, self.conf) if not new_rule: - logging.error('Invalid rule file skipped: %s' % rule_file) + elastalert_logger.error('Invalid rule file skipped: %s' % rule_file) continue if 'is_enabled' in new_rule and not new_rule['is_enabled']: elastalert_logger.info('Rule file %s is now disabled.' % (rule_file)) # Remove this rule if it's been disabled self.rules = [rule for rule in self.rules if rule['rule_file'] != rule_file] + # Stop job if is running + if self.scheduler.get_job(job_id=new_rule['name']): + self.scheduler.remove_job(job_id=new_rule['name']) + # Append to disabled_rule + for disabled_rule in self.disabled_rules: + if disabled_rule['name'] == new_rule['name']: + break + else: + self.disabled_rules.append(new_rule) continue except EAException as e: message = 'Could not load rule %s: %s' % (rule_file, e) @@ -1106,7 +1120,6 @@ def load_rule_changes(self): # Re-enable if rule had been disabled for disabled_rule in self.disabled_rules: if disabled_rule['name'] == new_rule['name']: - self.rules.append(disabled_rule) self.disabled_rules.remove(disabled_rule) break @@ -1122,7 +1135,7 @@ def load_rule_changes(self): try: new_rule = self.rules_loader.load_configuration(rule_file, self.conf) if not new_rule: - logging.error('Invalid rule file skipped: %s' % rule_file) + elastalert_logger.error('Invalid rule file skipped: %s' % rule_file) continue if 'is_enabled' in new_rule and not new_rule['is_enabled']: continue @@ -1205,12 +1218,12 @@ def wait_until_responsive(self, timeout, clock=timeit.default_timer): time.sleep(1.0) if self.writeback_es.ping(): - logging.error( + elastalert_logger.error( 'Writeback alias "%s" does not exist, did you run `elastalert-create-index`?', self.writeback_alias, ) else: - logging.error( + elastalert_logger.error( 'Could not reach ElasticSearch at "%s:%d".', self.conf['es_host'], self.conf['es_port'], @@ -1285,7 +1298,7 @@ def handle_rule_execution(self, rule): # We were processing for longer than our refresh interval # This can happen if --start was specified with a large time period # or if we are running too slow to process events in real time. - logging.warning( + elastalert_logger.warning( "Querying from %s to %s took longer than %s!" % ( old_starttime, pretty_ts(endtime, rule.get('use_local_time')), @@ -1618,7 +1631,7 @@ def writeback(self, doc_type, body, rule=None, match_body=None): res = self.writeback_es.index(index=index, doc_type=doc_type, body=body) return res except ElasticsearchException as e: - logging.exception("Error writing alert info to Elasticsearch: %s" % (e)) + elastalert_logger.exception("Error writing alert info to Elasticsearch: %s" % (e)) def find_recent_pending_alerts(self, time_limit): """ Queries writeback_es to find alerts that did not send @@ -1646,7 +1659,7 @@ def find_recent_pending_alerts(self, time_limit): if res['hits']['hits']: return res['hits']['hits'] except ElasticsearchException as e: - logging.exception("Error finding recent pending alerts: %s %s" % (e, query)) + elastalert_logger.exception("Error finding recent pending alerts: %s %s" % (e, query)) return [] def send_pending_alerts(self): @@ -1724,7 +1737,7 @@ def get_aggregated_matches(self, _id): """ Removes and returns all matches from writeback_es that have aggregate_id == _id """ # XXX if there are more than self.max_aggregation matches, you have big alerts and we will leave entries in ES. - query = {'query': {'query_string': {'query': 'aggregate_id:%s' % (_id)}}, 'sort': {'@timestamp': 'asc'}} + query = {'query': {'query_string': {'query': 'aggregate_id:"%s"' % (_id)}}, 'sort': {'@timestamp': 'asc'}} matches = [] try: if self.writeback_es.is_atleastsixtwo(): @@ -1846,11 +1859,11 @@ def add_aggregated_alert(self, match, rule): def silence(self, silence_cache_key=None): """ Silence an alert for a period of time. --silence and --rule must be passed as args. """ if self.debug: - logging.error('--silence not compatible with --debug') + elastalert_logger.error('--silence not compatible with --debug') exit(1) if not self.args.rule: - logging.error('--silence must be used with --rule') + elastalert_logger.error('--silence must be used with --rule') exit(1) # With --rule, self.rules will only contain that specific rule @@ -1860,11 +1873,11 @@ def silence(self, silence_cache_key=None): try: silence_ts = parse_deadline(self.args.silence) except (ValueError, TypeError): - logging.error('%s is not a valid time period' % (self.args.silence)) + elastalert_logger.error('%s is not a valid time period' % (self.args.silence)) exit(1) if not self.set_realert(silence_cache_key, silence_ts, 0): - logging.error('Failed to save silence command to Elasticsearch') + elastalert_logger.error('Failed to save silence command to Elasticsearch') exit(1) elastalert_logger.info('Success. %s will be silenced until %s' % (silence_cache_key, silence_ts)) @@ -1925,7 +1938,7 @@ def is_silenced(self, rule_name): def handle_error(self, message, data=None): ''' Logs message at error level and writes message, data and traceback to Elasticsearch. ''' - logging.error(message) + elastalert_logger.error(message) body = {'message': message} tb = traceback.format_exc() body['traceback'] = tb.strip().split('\n') @@ -1935,7 +1948,7 @@ def handle_error(self, message, data=None): def handle_uncaught_exception(self, exception, rule): """ Disables a rule and sends a notification. """ - logging.error(traceback.format_exc()) + elastalert_logger.error(traceback.format_exc()) self.handle_error('Uncaught exception running rule %s: %s' % (rule['name'], exception), {'rule': rule['name']}) if self.disable_rules_on_error: self.rules = [running_rule for running_rule in self.rules if running_rule['name'] != rule['name']] diff --git a/elastalert/es_mappings/6/elastalert.json b/elastalert/es_mappings/6/elastalert.json index 645a67762..2cc97bcfb 100644 --- a/elastalert/es_mappings/6/elastalert.json +++ b/elastalert/es_mappings/6/elastalert.json @@ -29,6 +29,7 @@ "format": "dateOptionalTime" }, "match_body": { + "enabled": "false", "type": "object" }, "aggregate_id": { diff --git a/elastalert/kibana_discover.py b/elastalert/kibana_discover.py index 7e4dbb5d1..78db3118b 100644 --- a/elastalert/kibana_discover.py +++ b/elastalert/kibana_discover.py @@ -8,20 +8,21 @@ import urllib.parse from .util import EAException +from .util import elastalert_logger from .util import lookup_es_key from .util import ts_add kibana_default_timedelta = datetime.timedelta(minutes=10) kibana5_kibana6_versions = frozenset(['5.6', '6.0', '6.1', '6.2', '6.3', '6.4', '6.5', '6.6', '6.7', '6.8']) -kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3']) +kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3', '7.4', '7.5', '7.6', '7.7', '7.8', '7.9', '7.10', '7.11', '7.12', '7.13', '7.14', '7.15', '7.16', '7.17']) def generate_kibana_discover_url(rule, match): ''' Creates a link for a kibana discover app. ''' discover_app_url = rule.get('kibana_discover_app_url') if not discover_app_url: - logging.warning( + elastalert_logger.warning( 'Missing kibana_discover_app_url for rule %s' % ( rule.get('name', '') ) @@ -30,7 +31,7 @@ def generate_kibana_discover_url(rule, match): kibana_version = rule.get('kibana_discover_version') if not kibana_version: - logging.warning( + elastalert_logger.warning( 'Missing kibana_discover_version for rule %s' % ( rule.get('name', '') ) @@ -39,7 +40,7 @@ def generate_kibana_discover_url(rule, match): index = rule.get('kibana_discover_index_pattern_id') if not index: - logging.warning( + elastalert_logger.warning( 'Missing kibana_discover_index_pattern_id for rule %s' % ( rule.get('name', '') ) @@ -70,7 +71,7 @@ def generate_kibana_discover_url(rule, match): appState = kibana_discover_app_state(index, columns, filters, query_keys, match) else: - logging.warning( + elastalert_logger.warning( 'Unknown kibana discover application version %s for rule %s' % ( kibana_version, rule.get('name', '') diff --git a/elastalert/loaders.py b/elastalert/loaders.py index 771194768..b092f9f1f 100644 --- a/elastalert/loaders.py +++ b/elastalert/loaders.py @@ -2,7 +2,6 @@ import copy import datetime import hashlib -import logging import os import sys @@ -15,11 +14,13 @@ from . import enhancements from . import ruletypes from .opsgenie import OpsGenieAlerter +from .zabbix import ZabbixAlerter from .util import dt_to_ts from .util import dt_to_ts_with_format from .util import dt_to_unix from .util import dt_to_unixms from .util import EAException +from .util import elastalert_logger from .util import get_module from .util import ts_to_dt from .util import ts_to_dt_with_format @@ -60,14 +61,15 @@ class RulesLoader(object): 'opsgenie': OpsGenieAlerter, 'stomp': alerts.StompAlerter, 'debug': alerts.DebugAlerter, + 'datadog': alerts.DatadogAlerter, + 'discord': alerts.DiscordAlerter, 'command': alerts.CommandAlerter, 'sns': alerts.SnsAlerter, - 'hipchat': alerts.HipChatAlerter, - 'stride': alerts.StrideAlerter, 'ms_teams': alerts.MsTeamsAlerter, 'slack': alerts.SlackAlerter, 'mattermost': alerts.MattermostAlerter, 'pagerduty': alerts.PagerDutyAlerter, + 'pagertree': alerts.PagerTreeAlerter, 'exotel': alerts.ExotelAlerter, 'twilio': alerts.TwilioAlerter, 'victorops': alerts.VictorOpsAlerter, @@ -76,8 +78,12 @@ class RulesLoader(object): 'gitter': alerts.GitterAlerter, 'servicenow': alerts.ServiceNowAlerter, 'alerta': alerts.AlertaAlerter, + 'alertmanager': alerts.AlertmanagerAlerter, 'post': alerts.HTTPPostAlerter, - 'hivealerter': alerts.HiveAlerter + 'linenotify': alerts.LineNotifyAlerter, + 'hivealerter': alerts.HiveAlerter, + 'rocketchat': alerts.RocketChatAlerter, + 'zabbix': ZabbixAlerter } # A partial ordering of alert types. Relative order will be preserved in the resulting alerts list @@ -115,10 +121,7 @@ def load(self, conf, args=None): rule = self.load_configuration(rule_file, conf, args) # A rule failed to load, don't try to process it if not rule: - logging.error('Invalid rule file skipped: %s' % rule_file) - continue - # By setting "is_enabled: False" in rule file, a rule is easily disabled - if 'is_enabled' in rule and not rule['is_enabled']: + elastalert_logger.error('Invalid rule file skipped: %s' % rule_file) continue if rule['name'] in names: raise EAException('Duplicate rule named %s' % (rule['name'])) @@ -315,13 +318,6 @@ def _dt_to_ts_with_format(dt): rule.setdefault('client_cert', conf.get('client_cert')) rule.setdefault('client_key', conf.get('client_key')) - # Set HipChat options from global config - rule.setdefault('hipchat_msg_color', 'red') - rule.setdefault('hipchat_domain', 'api.hipchat.com') - rule.setdefault('hipchat_notify', True) - rule.setdefault('hipchat_from', '') - rule.setdefault('hipchat_ignore_ssl_errors', False) - # Make sure we have required options if self.required_locals - frozenset(list(rule.keys())): raise EAException('Missing required option(s): %s' % (', '.join(self.required_locals - frozenset(list(rule.keys()))))) @@ -393,10 +389,10 @@ def _dt_to_ts_with_format(dt): if rule.get('use_strftime_index'): for token in ['%y', '%M', '%D']: if token in rule.get('index'): - logging.warning('Did you mean to use %s in the index? ' - 'The index will be formatted like %s' % (token, - datetime.datetime.now().strftime( - rule.get('index')))) + elastalert_logger.warning('Did you mean to use %s in the index? ' + 'The index will be formatted like %s' % (token, + datetime.datetime.now().strftime( + rule.get('index')))) if rule.get('scan_entire_timeframe') and not rule.get('timeframe'): raise EAException('scan_entire_timeframe can only be used if there is a timeframe specified') @@ -485,7 +481,7 @@ def adjust_deprecated_values(rule): rule['http_post_proxy'] = rule['simple_proxy'] if 'simple_webhook_url' in rule: rule['http_post_url'] = rule['simple_webhook_url'] - logging.warning( + elastalert_logger.warning( '"simple" alerter has been renamed "post" and comptability may be removed in a future release.') diff --git a/elastalert/opsgenie.py b/elastalert/opsgenie.py index bcdaf2d05..8db52d89f 100644 --- a/elastalert/opsgenie.py +++ b/elastalert/opsgenie.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- import json -import logging import os.path import requests @@ -46,11 +45,11 @@ def _parse_responders(self, responders, responder_args, matches, default_respond try: formated_responders.append(responder.format(**responders_values)) except KeyError as error: - logging.warn("OpsGenieAlerter: Cannot create responder for OpsGenie Alert. Key not foud: %s. " % (error)) + elastalert_logger.warning("OpsGenieAlerter: Cannot create responder for OpsGenie Alert. Key not foud: %s. " % (error)) if not formated_responders: - logging.warn("OpsGenieAlerter: no responders can be formed. Trying the default responder ") + elastalert_logger.warning("OpsGenieAlerter: no responders can be formed. Trying the default responder ") if not default_responders: - logging.warn("OpsGenieAlerter: default responder not set. Falling back") + elastalert_logger.warning("OpsGenieAlerter: default responder not set. Falling back") formated_responders = responders else: formated_responders = default_responders @@ -90,7 +89,7 @@ def alert(self, matches): post['tags'] = self.tags if self.priority and self.priority not in ('P1', 'P2', 'P3', 'P4', 'P5'): - logging.warn("Priority level does not appear to be specified correctly. \ + elastalert_logger.warning("Priority level does not appear to be specified correctly. \ Please make sure to set it to a value between P1 and P5") else: post['priority'] = self.priority @@ -102,7 +101,7 @@ def alert(self, matches): if details: post['details'] = details - logging.debug(json.dumps(post)) + elastalert_logger.debug(json.dumps(post)) headers = { 'Content-Type': 'application/json', @@ -114,12 +113,12 @@ def alert(self, matches): try: r = requests.post(self.to_addr, json=post, headers=headers, proxies=proxies) - logging.debug('request response: {0}'.format(r)) + elastalert_logger.debug('request response: {0}'.format(r)) if r.status_code != 202: elastalert_logger.info("Error response from {0} \n " "API Response: {1}".format(self.to_addr, r)) r.raise_for_status() - logging.info("Alert sent to OpsGenie") + elastalert_logger.info("Alert sent to OpsGenie") except Exception as err: raise EAException("Error sending alert: {0}".format(err)) diff --git a/elastalert/ruletypes.py b/elastalert/ruletypes.py index 2f1d2f82c..e000b71b6 100644 --- a/elastalert/ruletypes.py +++ b/elastalert/ruletypes.py @@ -3,7 +3,7 @@ import datetime import sys -from blist import sortedlist +from sortedcontainers import SortedKeyList as sortedlist from .util import add_raw_postfix from .util import dt_to_ts @@ -318,12 +318,14 @@ def append(self, event): This will also pop the oldest events and call onRemoved on them until the window size is less than timeframe. """ self.data.add(event) - self.running_count += event[1] + if event and event[1]: + self.running_count += event[1] while self.duration() >= self.timeframe: oldest = self.data[0] self.data.remove(oldest) - self.running_count -= oldest[1] + if oldest and oldest[1]: + self.running_count -= oldest[1] self.onRemoved and self.onRemoved(oldest) def duration(self): @@ -363,7 +365,8 @@ def append_middle(self, event): # Append left if ts is earlier than first event if self.get_ts(self.data[0]) > ts: self.data.appendleft(event) - self.running_count += event[1] + if event and event[1]: + self.running_count += event[1] return # Rotate window until we can insert event @@ -374,7 +377,8 @@ def append_middle(self, event): # This should never happen return self.data.append(event) - self.running_count += event[1] + if event and event[1]: + self.running_count += event[1] self.data.rotate(-rotation) @@ -674,7 +678,7 @@ def get_all_terms(self, args): time_filter = {self.rules['timestamp_field']: {'lt': self.rules['dt_to_ts'](tmp_end), 'gte': self.rules['dt_to_ts'](tmp_start)}} query_template['filter'] = {'bool': {'must': [{'range': time_filter}]}} - query = {'aggs': {'filtered': query_template}} + query = {'aggs': {'filtered': query_template}, 'size': 0} if 'filter' in self.rules: for item in self.rules['filter']: @@ -1026,6 +1030,7 @@ class MetricAggregationRule(BaseAggregationRule): """ A rule that matches when there is a low number of events given a timeframe. """ required_options = frozenset(['metric_agg_key', 'metric_agg_type']) allowed_aggregations = frozenset(['min', 'max', 'avg', 'sum', 'cardinality', 'value_count']) + allowed_percent_aggregations = frozenset(['percentiles']) def __init__(self, *args): super(MetricAggregationRule, self).__init__(*args) @@ -1035,8 +1040,10 @@ def __init__(self, *args): self.metric_key = 'metric_' + self.rules['metric_agg_key'] + '_' + self.rules['metric_agg_type'] - if not self.rules['metric_agg_type'] in self.allowed_aggregations: + if not self.rules['metric_agg_type'] in self.allowed_aggregations.union(self.allowed_percent_aggregations): raise EAException("metric_agg_type must be one of %s" % (str(self.allowed_aggregations))) + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations and self.rules['percentile_range'] is None: + raise EAException("percentile_range must be specified for percentiles aggregation") self.rules['aggregation_query_element'] = self.generate_aggregation_query() @@ -1051,14 +1058,20 @@ def get_match_str(self, match): return message def generate_aggregation_query(self): - return {self.metric_key: {self.rules['metric_agg_type']: {'field': self.rules['metric_agg_key']}}} + query = {self.metric_key: {self.rules['metric_agg_type']: {'field': self.rules['metric_agg_key']}}} + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: + query[self.metric_key][self.rules['metric_agg_type']]['percents'] = [self.rules['percentile_range']] + return query def check_matches(self, timestamp, query_key, aggregation_data): if "compound_query_key" in self.rules: self.check_matches_recursive(timestamp, query_key, aggregation_data, self.rules['compound_query_key'], dict()) else: - metric_val = aggregation_data[self.metric_key]['value'] + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: + metric_val = list(aggregation_data[self.metric_key]['values'].values())[0] + else: + metric_val = aggregation_data[self.metric_key]['value'] if self.crossed_thresholds(metric_val): match = {self.rules['timestamp_field']: timestamp, self.metric_key: metric_val} @@ -1106,6 +1119,7 @@ class SpikeMetricAggregationRule(BaseAggregationRule, SpikeRule): """ A rule that matches when there is a spike in an aggregated event compared to its reference point """ required_options = frozenset(['metric_agg_key', 'metric_agg_type', 'spike_height', 'spike_type']) allowed_aggregations = frozenset(['min', 'max', 'avg', 'sum', 'cardinality', 'value_count']) + allowed_percent_aggregations = frozenset(['percentiles']) def __init__(self, *args): # We inherit everything from BaseAggregation and Spike, overwrite only what we need in functions below @@ -1113,8 +1127,11 @@ def __init__(self, *args): # MetricAgg alert things self.metric_key = 'metric_' + self.rules['metric_agg_key'] + '_' + self.rules['metric_agg_type'] - if not self.rules['metric_agg_type'] in self.allowed_aggregations: + + if not self.rules['metric_agg_type'] in self.allowed_aggregations.union(self.allowed_percent_aggregations): raise EAException("metric_agg_type must be one of %s" % (str(self.allowed_aggregations))) + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations and self.rules['percentile_range'] is None: + raise EAException("percentile_range must be specified for percentiles aggregation") # Disabling bucket intervals (doesn't make sense in context of spike to split up your time period) if self.rules.get('bucket_interval'): @@ -1126,7 +1143,10 @@ def generate_aggregation_query(self): """Lifted from MetricAggregationRule, added support for scripted fields""" if self.rules.get('metric_agg_script'): return {self.metric_key: {self.rules['metric_agg_type']: self.rules['metric_agg_script']}} - return {self.metric_key: {self.rules['metric_agg_type']: {'field': self.rules['metric_agg_key']}}} + query = {self.metric_key: {self.rules['metric_agg_type']: {'field': self.rules['metric_agg_key']}}} + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: + query[self.metric_key][self.rules['metric_agg_type']]['percents'] = [self.rules['percentile_range']] + return query def add_aggregation_data(self, payload): """ @@ -1140,7 +1160,10 @@ def add_aggregation_data(self, payload): else: # no time / term split, just focus on the agg event = {self.ts_field: timestamp} - agg_value = payload_data[self.metric_key]['value'] + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: + agg_value = list(payload_data[self.metric_key]['values'].values())[0] + else: + agg_value = payload_data[self.metric_key]['value'] self.handle_event(event, agg_value, 'all') return @@ -1160,7 +1183,10 @@ def unwrap_term_buckets(self, timestamp, term_buckets, qk=[]): continue qk_str = ','.join(qk) - agg_value = term_data[self.metric_key]['value'] + if self.rules['metric_agg_type'] in self.allowed_percent_aggregations: + agg_value = list(term_data[self.metric_key]['values'].values())[0] + else: + agg_value = term_data[self.metric_key]['value'] event = {self.ts_field: timestamp, self.rules['query_key']: qk_str} # pass to SpikeRule's tracker diff --git a/elastalert/schema.yaml b/elastalert/schema.yaml index 1241315dc..5d5ab5f01 100644 --- a/elastalert/schema.yaml +++ b/elastalert/schema.yaml @@ -110,7 +110,7 @@ oneOf: type: {enum: [spike_aggregation]} spike_height: {type: number} spike_type: {enum: ["up", "down", "both"]} - metric_agg_type: {enum: ["min", "max", "avg", "sum", "cardinality", "value_count"]} + metric_agg_type: {enum: ["min", "max", "avg", "sum", "cardinality", "value_count", "percentiles"]} timeframe: *timeframe use_count_query: {type: boolean} doc_type: {type: string} @@ -120,6 +120,7 @@ oneOf: threshold_ref: {type: number} threshold_cur: {type: number} min_doc_count: {type: integer} + percentile_range: {type: integer} - title: Flatline required: [threshold, timeframe] @@ -153,8 +154,9 @@ oneOf: required: [metric_agg_key,metric_agg_type] properties: type: {enum: [metric_aggregation]} - metric_agg_type: {enum: ["min", "max", "avg", "sum", "cardinality", "value_count"]} + metric_agg_type: {enum: ["min", "max", "avg", "sum", "cardinality", "value_count", "percentiles"]} #timeframe: *timeframe + percentile_range: {type: integer} - title: Percentage Match required: [match_bucket_filter] @@ -217,7 +219,7 @@ properties: ### Kibana Discover App Link generate_kibana_discover_url: {type: boolean} kibana_discover_app_url: {type: string, format: uri} - kibana_discover_version: {type: string, enum: ['7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} + kibana_discover_version: {type: string, enum: ['7.17', '7.16', '7.15', '7.14', '7.13', '7.12', '7.11', '7.10', '7.9', '7.8', '7.7', '7.6', '7.5', '7.4', '7.3', '7.2', '7.1', '7.0', '6.8', '6.7', '6.6', '6.5', '6.4', '6.3', '6.2', '6.1', '6.0', '5.6']} kibana_discover_index_pattern_id: {type: string, minLength: 1} kibana_discover_columns: {type: array, items: {type: string, minLength: 1}, minItems: 1} kibana_discover_from_timedelta: *timedelta @@ -237,6 +239,20 @@ properties: pipe_match_json: {type: boolean} fail_on_non_zero_exit: {type: boolean} + ### Datadog + datadog_api_key: {type: string} + datadog_app_key: {type: string} + + ## Discord + discord_webhook_url: {type: string} + discord_emoji_title: {type: string} + discord_proxy: {type: string} + discord_proxy_login: {type: string} + discord_proxy_password: {type: string} + discord_embed_color: {type: integer} + discord_embed_footer: {type: string} + discord_embed_icon_url: {type: string} + ### Email email: *arrayOfString email_reply_to: {type: string} @@ -261,21 +277,6 @@ properties: jira_max_age: {type: number} jira_watchers: *arrayOfString - ### HipChat - hipchat_auth_token: {type: string} - hipchat_room_id: {type: [string, integer]} - hipchat_domain: {type: string} - hipchat_ignore_ssl_errors: {type: boolean} - hipchat_notify: {type: boolean} - hipchat_from: {type: string} - hipchat_mentions: {type: array, items: {type: string}} - - ### Stride - stride_access_token: {type: string} - stride_cloud_id: {type: string} - stride_conversation_id: {type: string} - stride_ignore_ssl_errors: {type: boolean} - ### Slack slack_webhook_url: *arrayOfString slack_username_override: {type: string} @@ -320,15 +321,30 @@ properties: pagerduty_client_name: {type: string} pagerduty_event_type: {enum: [none, trigger, resolve, acknowledge]} -### PagerTree + ### PagerTree pagertree_integration_url: {type: string} + ### RocketChat + rocket_chat_webhook_url: *arrayOfString + rocket_chat_username_override: {type: string} + rocket_chat_channel_override: *arrayOfString + rocket_chat_emoji_override: {type: string} + rocket_chat_msg_color: {enum: [good, warning, danger]} + rocket_chat_text_string: {type: string} + rocket_chat_proxy: {type: string} + rocket_chat_attach_kibana_discover_url {type: boolean} + rocket_chat_kibana_discover_color {type: string} + rocket_chat_kibana_discover_title {type: string} + rocket_chat_ca_certs: {type: boolean} + rocket_chat_ignore_ssl_errors: {type: boolean} + rocket_chat_timeout: {type: integer} ### Exotel exotel_account_sid: {type: string} exotel_auth_token: {type: string} exotel_to_number: {type: string} exotel_from_number: {type: string} + exotel_message_body: {type: string} ### Twilio twilio_account_sid: {type: string} @@ -362,7 +378,6 @@ properties: alerta_origin: {type: string} # Python format string alerta_group: {type: string} # Python format string alerta_service: {type: array, items: {type: string}} # Python format string - alerta_service: {type: array, items: {type: string}} # Python format string alerta_correlate: {type: array, items: {type: string}} # Python format string alerta_tags: {type: array, items: {type: string}} # Python format string alerta_event: {type: string} # Python format string @@ -372,8 +387,52 @@ properties: alerta_value: {type: string} # Python format string alerta_attributes_keys: {type: array, items: {type: string}} alerta_attributes_values: {type: array, items: {type: string}} # Python format string - alerta_new_style_string_format: {type: boolean} + ### Alertmanager + alertmanager_hosts: {type: array, items: {type: string}} + alertmanager_api_version: {type: string, enum: ['v1', 'v2']} + alertmanager_alert_subject_labelname: {type: string} + alertmanager_alert_text_labelname: {type: string} + alertmanager_proxy: {type: string} + alertmanager_ca_certs: {type: boolean} + alertmanager_ignore_ssl_errors: {type: boolean} + alertmanager_timeout: {type: integer} + alertmanager_labels: + type: object + minProperties: 1 + patternProperties: + "^.+$": + oneOf: + - type: string + - type: object + additionalProperties: false + required: [field] + properties: + field: {type: string, minLength: 1} + alertmanager_annotations: + type: object + minProperties: 1 + patternProperties: + "^.+$": + oneOf: + - type: string + - type: object + additionalProperties: false + required: [field] + properties: + field: {type: string, minLength: 1} + alertmanager_fields: + type: object + minProperties: 1 + patternProperties: + "^.+$": + oneOf: + - type: string + - type: object + additionalProperties: false + required: [field] + properties: + field: {type: string, minLength: 1} ### Simple simple_webhook_url: *arrayOfString @@ -386,4 +445,4 @@ properties: zbx_sender_host: {type: string} zbx_sender_port: {type: integer} zbx_host: {type: string} - zbx_item: {type: string} + zbx_key: {type: string} diff --git a/elastalert/test_rule.py b/elastalert/test_rule.py index 06100aa0f..965e4972c 100644 --- a/elastalert/test_rule.py +++ b/elastalert/test_rule.py @@ -10,7 +10,7 @@ import string import sys -import mock +from unittest import mock from elastalert.config import load_conf from elastalert.elastalert import ElastAlerter @@ -83,7 +83,7 @@ def test_file(self, conf, args): # Get one document for schema try: - res = es_client.search(index, size=1, body=query, ignore_unavailable=True) + res = es_client.search(index=index, size=1, body=query, ignore_unavailable=True) except Exception as e: print("Error running your filter:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) @@ -109,7 +109,7 @@ def test_file(self, conf, args): five=conf['five'] ) try: - res = es_client.count(index, doc_type=doc_type, body=count_query, ignore_unavailable=True) + res = es_client.count(index=index, doc_type=doc_type, body=count_query, ignore_unavailable=True) except Exception as e: print("Error querying Elasticsearch:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) @@ -153,7 +153,7 @@ def test_file(self, conf, args): # Download up to max_query_size (defaults to 10,000) documents to save if (args.save or args.formatted_output) and not args.count: try: - res = es_client.search(index, size=args.max_query_size, body=query, ignore_unavailable=True) + res = es_client.search(index=index, size=args.max_query_size, body=query, ignore_unavailable=True) except Exception as e: print("Error running your filter:", file=sys.stderr) print(repr(e)[:2048], file=sys.stderr) diff --git a/elastalert/util.py b/elastalert/util.py index bbb0600ff..3e9c9f664 100644 --- a/elastalert/util.py +++ b/elastalert/util.py @@ -152,7 +152,7 @@ def ts_to_dt(timestamp): def dt_to_ts(dt): if not isinstance(dt, datetime.datetime): - logging.warning('Expected datetime, got %s' % (type(dt))) + elastalert_logger.warning('Expected datetime, got %s' % (type(dt))) return dt ts = dt.isoformat() # Round microseconds to milliseconds @@ -176,7 +176,7 @@ def ts_to_dt_with_format(timestamp, ts_format): def dt_to_ts_with_format(dt, ts_format): if not isinstance(dt, datetime.datetime): - logging.warning('Expected datetime, got %s' % (type(dt))) + elastalert_logger.warning('Expected datetime, got %s' % (type(dt))) return dt ts = dt.strftime(ts_format) return ts @@ -361,7 +361,7 @@ def build_es_conn_config(conf): # Deprecated if 'boto_profile' in conf: - logging.warning('Found deprecated "boto_profile", use "profile" instead!') + elastalert_logger.warning('Found deprecated "boto_profile", use "profile" instead!') parsed_conf['profile'] = conf['boto_profile'] if 'profile' in conf: diff --git a/elastalert/zabbix.py b/elastalert/zabbix.py index e3f13aa03..acced28d7 100644 --- a/elastalert/zabbix.py +++ b/elastalert/zabbix.py @@ -1,42 +1,44 @@ -from alerts import Alerter # , BasicMatchString -import logging -from pyzabbix.api import ZabbixAPI -from pyzabbix import ZabbixSender, ZabbixMetric from datetime import datetime +from pyzabbix import ZabbixSender, ZabbixMetric, ZabbixAPI + +from .alerts import Alerter +from .util import elastalert_logger, EAException + class ZabbixClient(ZabbixAPI): - def __init__(self, url='http://localhost', use_authenticate=False, user='Admin', password='zabbix', sender_host='localhost', - sender_port=10051): + def __init__(self, url='http://localhost', use_authenticate=False, user='Admin', password='zabbix', + sender_host='localhost', sender_port=10051): self.url = url self.use_authenticate = use_authenticate self.sender_host = sender_host self.sender_port = sender_port self.metrics_chunk_size = 200 self.aggregated_metrics = [] - self.logger = logging.getLogger(self.__class__.__name__) - super(ZabbixClient, self).__init__(url=self.url, use_authenticate=self.use_authenticate, user=user, password=password) + + super(ZabbixClient, self).__init__(url=self.url, + use_authenticate=self.use_authenticate, + user=user, + password=password) def send_metric(self, hostname, key, data): zm = ZabbixMetric(hostname, key, data) if self.send_aggregated_metrics: - self.aggregated_metrics.append(zm) if len(self.aggregated_metrics) > self.metrics_chunk_size: - self.logger.info("Sending: %s metrics" % (len(self.aggregated_metrics))) + elastalert_logger.info("Sending: %s metrics" % (len(self.aggregated_metrics))) try: - ZabbixSender(zabbix_server=self.sender_host, zabbix_port=self.sender_port).send(self.aggregated_metrics) + ZabbixSender(zabbix_server=self.sender_host, zabbix_port=self.sender_port) \ + .send(self.aggregated_metrics) self.aggregated_metrics = [] except Exception as e: - self.logger.exception(e) - pass + elastalert_logger.exception(e) else: try: - ZabbixSender(zabbix_server=self.sender_host, zabbix_port=self.sender_port).send(zm) + ZabbixSender(zabbix_server=self.sender_host, zabbix_port=self.sender_port).send([zm]) except Exception as e: - self.logger.exception(e) - pass + elastalert_logger.exception(e) class ZabbixAlerter(Alerter): @@ -54,6 +56,9 @@ def __init__(self, *args): self.zbx_sender_port = self.rule.get('zbx_sender_port', 10051) self.zbx_host = self.rule.get('zbx_host') self.zbx_key = self.rule.get('zbx_key') + self.timestamp_field = self.rule.get('timestamp_field', '@timestamp') + self.timestamp_type = self.rule.get('timestamp_type', 'iso') + self.timestamp_strptime = self.rule.get('timestamp_strptime', '%Y-%m-%dT%H:%M:%S.%f%z') # Alert is called def alert(self, matches): @@ -63,10 +68,26 @@ def alert(self, matches): # the aggregation option set zm = [] for match in matches: - ts_epoch = int(datetime.strptime(match['@timestamp'], "%Y-%m-%dT%H:%M:%S.%fZ").strftime('%s')) - zm.append(ZabbixMetric(host=self.zbx_host, key=self.zbx_key, value=1, clock=ts_epoch)) + if ':' not in match[self.timestamp_field] or '-' not in match[self.timestamp_field]: + ts_epoch = int(match[self.timestamp_field]) + else: + try: + ts_epoch = int(datetime.strptime(match[self.timestamp_field], self.timestamp_strptime) + .timestamp()) + except ValueError: + ts_epoch = int(datetime.strptime(match[self.timestamp_field], '%Y-%m-%dT%H:%M:%S%z') + .timestamp()) + zm.append(ZabbixMetric(host=self.zbx_host, key=self.zbx_key, value='1', clock=ts_epoch)) - ZabbixSender(zabbix_server=self.zbx_sender_host, zabbix_port=self.zbx_sender_port).send(zm) + try: + response = ZabbixSender(zabbix_server=self.zbx_sender_host, zabbix_port=self.zbx_sender_port).send(zm) + if response.failed: + elastalert_logger.warning("Missing zabbix host '%s' or host's item '%s', alert will be discarded" + % (self.zbx_host, self.zbx_key)) + else: + elastalert_logger.info("Alert sent to Zabbix") + except Exception as e: + raise EAException("Error sending alert to Zabbix: %s" % e) # get_info is called after an alert is sent to get data that is written back # to Elasticsearch in the field "alert_info" diff --git a/example_rules/ssh.yaml b/example_rules/ssh.yaml index 7af890784..a7147217b 100644 --- a/example_rules/ssh.yaml +++ b/example_rules/ssh.yaml @@ -1,5 +1,5 @@ # Rule name, must be unique - name: SSH abuse (ElastAlert 3.0.1) - 2 +name: SSH abuse (ElastAlert 3.0.1) - 2 # Alert on x events in y seconds type: frequency diff --git a/pytest.ini b/pytest.ini index 0ad3341d9..259ba35a2 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,3 +1,5 @@ [pytest] markers = elasticsearch: mark a test as using elasticsearch. +filterwarnings = + ignore::pytest.PytestUnhandledThreadExceptionWarning \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt index 558761d9e..2adb92368 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,9 +1,9 @@ -r requirements.txt -coverage==4.5.4 +coverage==6.2 flake8 pre-commit -pylint<1.4 -pytest<3.3.0 +pylint==2.12.2 +pytest==7.0.0 setuptools sphinx_rtd_theme -tox<2.0 +tox==3.24.5 diff --git a/requirements.txt b/requirements.txt index 9c32052d0..b5db239db 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,22 +1,21 @@ -apscheduler>=3.3.0 -aws-requests-auth>=0.3.0 -blist>=1.3.6 -boto3>=1.4.4 -cffi>=1.11.5 -configparser>=3.5.0 -croniter>=0.3.16 -elasticsearch>=7.0.0 +apscheduler>=3.3.0,<4.0 +aws-requests-auth>=0.4.3 +sortedcontainers>=2.4.0 +boto3>=1.19.7 +cffi>=1.15.0 +croniter>=1.0.15 +elasticsearch==7.0.0 envparse>=0.2.0 -exotel>=0.1.3 -jira>=1.0.10,<1.0.15 -jsonschema>=3.0.2 -mock>=2.0.0 -prison>=0.1.2 -py-zabbix==1.1.3 -PyStaticConfiguration>=0.10.3 -python-dateutil>=2.6.0,<2.7.0 -PyYAML>=5.1 -requests>=2.0.0 -stomp.py>=4.1.17 -texttable>=0.8.8 -twilio==6.0.0 +exotel>=0.1.5 +jira>=3.0.1 +jsonschema>=4.1.2 +prison>=0.2.1 +py-zabbix==1.1.7 +PyStaticConfiguration>=0.10.5 +python-dateutil>=2.8.2 +PyYAML>=6.0 +requests>=2.26.0 +stomp.py>=7.0.0 +texttable>=1.6.4 +twilio>=6.0.0,<6.58 +tzlocal==2.1 \ No newline at end of file diff --git a/setup.py b/setup.py index 2845836a7..7b15575b0 100644 --- a/setup.py +++ b/setup.py @@ -16,6 +16,10 @@ license='Copyright 2014 Yelp', classifiers=[ 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', ], @@ -27,26 +31,26 @@ packages=find_packages(), package_data={'elastalert': ['schema.yaml', 'es_mappings/**/*.json']}, install_requires=[ - 'apscheduler>=3.3.0', - 'aws-requests-auth>=0.3.0', - 'blist>=1.3.6', - 'boto3>=1.4.4', - 'configparser>=3.5.0', - 'croniter>=0.3.16', + 'apscheduler>=3.3.0,<4.0', + 'aws-requests-auth>=0.4.3', + 'sortedcontainers>=2.4.0', + 'boto3>=1.19.7', + 'croniter>=1.0.15', 'elasticsearch==7.0.0', 'envparse>=0.2.0', - 'exotel>=0.1.3', - 'jira>=2.0.0', - 'jsonschema>=3.0.2', - 'mock>=2.0.0', - 'prison>=0.1.2', - 'PyStaticConfiguration>=0.10.3', - 'python-dateutil>=2.6.0,<2.7.0', - 'PyYAML>=3.12', - 'requests>=2.10.0', - 'stomp.py>=4.1.17', - 'texttable>=0.8.8', - 'twilio>=6.0.0,<6.1', - 'cffi>=1.11.5' + 'exotel>=0.1.5', + 'jira>=3.0.1', + 'jsonschema>=4.1.2', + 'prison>=0.2.1', + 'PyStaticConfiguration>=0.10.5', + 'python-dateutil>=2.8.2', + 'PyYAML>=6.0', + 'py-zabbix==1.1.7', + 'requests>=2.26.0', + 'stomp.py>=7.0.0', + 'texttable>=1.6.4', + 'twilio>=6.0.0,<6.58', + 'cffi>=1.11.5', + 'tzlocal==2.1' ] ) diff --git a/tests/alerts_test.py b/tests/alerts_test.py index 5cd61ae75..01ac79701 100644 --- a/tests/alerts_test.py +++ b/tests/alerts_test.py @@ -2,29 +2,37 @@ import base64 import datetime import json +import logging import subprocess -import mock +from requests import RequestException +from requests.auth import HTTPProxyAuth + +from unittest import mock import pytest from jira.exceptions import JIRAError from elastalert.alerts import AlertaAlerter +from elastalert.alerts import AlertmanagerAlerter from elastalert.alerts import Alerter from elastalert.alerts import BasicMatchString from elastalert.alerts import CommandAlerter +from elastalert.alerts import DatadogAlerter +from elastalert.alerts import DiscordAlerter from elastalert.alerts import EmailAlerter -from elastalert.alerts import HipChatAlerter from elastalert.alerts import HTTPPostAlerter from elastalert.alerts import JiraAlerter from elastalert.alerts import JiraFormattedMatchString from elastalert.alerts import MsTeamsAlerter from elastalert.alerts import PagerDutyAlerter +from elastalert.alerts import RocketChatAlerter from elastalert.alerts import SlackAlerter -from elastalert.alerts import StrideAlerter +from elastalert.alerts import TwilioAlerter from elastalert.loaders import FileRulesLoader from elastalert.opsgenie import OpsGenieAlerter from elastalert.util import ts_add from elastalert.util import ts_now +from elastalert.util import EAException class mock_rule: @@ -1242,7 +1250,7 @@ def test_slack_uses_custom_title(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1288,7 +1296,7 @@ def test_slack_uses_custom_timeout(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=20 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1332,7 +1340,7 @@ def test_slack_uses_rule_name_when_custom_title_is_not_provided(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1377,7 +1385,7 @@ def test_slack_uses_custom_slack_channel(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1438,7 +1446,7 @@ def test_slack_uses_list_of_custom_slack_channel(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) assert expected_data1 == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1489,7 +1497,7 @@ def test_slack_attach_kibana_discover_url_when_generated(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1534,7 +1542,7 @@ def test_slack_attach_kibana_discover_url_when_not_generated(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1586,7 +1594,7 @@ def test_slack_kibana_discover_title(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1638,13 +1646,58 @@ def test_slack_kibana_discover_color(): data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None, - verify=False, + verify=True, timeout=10 ) actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) assert expected_data == actual_data +def test_slack_ignore_ssl_errors(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'slack_webhook_url': 'http://please.dontgohere.slack', + 'slack_ignore_ssl_errors': True, + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = SlackAlerter(rule) + match = { + '@timestamp': '2016-01-01T00:00:00' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + mock_post_request.assert_called_once_with( + rule['slack_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=False, + timeout=10 + ) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'icon_emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'mrkdwn_in': ['text', 'pretext'], + 'fields': [] + } + ], + 'text': '', + 'parse': 'none' + } + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + def test_http_alerter_with_payload(): rule = { 'name': 'Test HTTP Post Alerter With Payload', @@ -1672,7 +1725,8 @@ def test_http_alerter_with_payload(): data=mock.ANY, headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, proxies=None, - timeout=10 + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1707,7 +1761,8 @@ def test_http_alerter_with_payload_all_values(): data=mock.ANY, headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, proxies=None, - timeout=10 + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -1739,7 +1794,8 @@ def test_http_alerter_without_payload(): data=mock.ANY, headers={'Content-Type': 'application/json', 'Accept': 'application/json;charset=utf-8'}, proxies=None, - timeout=10 + timeout=10, + verify=True ) assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) @@ -2086,528 +2142,2078 @@ def test_resolving_rule_references(ea): assert 'the_owner' == alert.rule['nested_dict']['nested_owner'] -def test_stride_plain_text(): +def test_alerta_no_auth(ea): rule = { - 'name': 'Test Rule', + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_api_skip_ssl': True, + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["%(key)s", "%(logdate)s", "%(sender_ip)s"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "Elastalert", + 'alerta_severity': "debug", + 'alerta_text': "Probe %(hostname)s is UP at %(logdate)s GMT", + 'alerta_value': "UP", 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', - 'alert_subject': 'Cool subject', - 'alert': [] + 'alerta_use_match_timestamp': True, + 'alert': 'alerta' } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = StrideAlerter(rule) + match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' + '@timestamp': '2014-10-10T00:00:00', + # 'key': ---- missing field on purpose, to verify that simply the text is left empty + # 'logdate': ---- missing field on purpose, to verify that simply the text is left empty + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - body = "{0}\n\n@timestamp: {1}\nsomefield: {2}".format( - rule['name'], match['@timestamp'], match['somefield'] - ) - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': body} - ]} - ]} - ]}} + expected_data = { + "origin": "Elastalert", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" + } mock_post_request.assert_called_once_with( alert.url, data=mock.ANY, headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None + 'content-type': 'application/json'}, + verify=False ) assert expected_data == json.loads( mock_post_request.call_args_list[0][1]['data']) -def test_stride_underline_text(): +def test_alerta_auth(ea): rule = { - 'name': 'Test Rule', + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'alerta_api_key': '123456789ABCDEF', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_severity': "debug", 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', - 'alert_subject': 'Cool subject', - 'alert_text': 'Underline Text', - 'alert_text_type': 'alert_text_only', - 'alert': [] + 'alerta_use_match_timestamp': True, + 'alert': 'alerta' } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = StrideAlerter(rule) + match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' + '@timestamp': '2014-10-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - body = "Underline Text" - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': body, 'marks': [ - {'type': 'underline'} - ]} - ]} - ]} - ]}} - mock_post_request.assert_called_once_with( alert.url, data=mock.ANY, + verify=True, headers={ 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) + 'Authorization': 'Key {}'.format(rule['alerta_api_key'])}) -def test_stride_bold_text(): +def test_alerta_new_style(ea): rule = { - 'name': 'Test Rule', + 'name': 'Test Alerta rule!', + 'alerta_api_url': 'http://elastalerthost:8080/api/alert', + 'timeframe': datetime.timedelta(hours=1), + 'timestamp_field': '@timestamp', + 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], + 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], + 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], + 'alerta_event': "ProbeUP", + 'alerta_group': "Health", + 'alerta_origin': "Elastalert", + 'alerta_severity': "debug", + 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", + 'alerta_value': "UP", 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', - 'alert_subject': 'Cool subject', - 'alert_text': 'Bold Text', - 'alert_text_type': 'alert_text_only', - 'alert': [] + 'alerta_use_match_timestamp': True, + 'alert': 'alerta' } - rules_loader = FileRulesLoader({}) - rules_loader.load_modules(rule) - alert = StrideAlerter(rule) + match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' + '@timestamp': '2014-10-10T00:00:00', + # 'key': ---- missing field on purpose, to verify that simply the text is left empty + # 'logdate': ---- missing field on purpose, to verify that simply the text is left empty + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertaAlerter(rule) with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - body = "Bold Text" - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': body, 'marks': [ - {'type': 'strong'} - ]} - ]} - ]} - ]}} + expected_data = { + "origin": "Elastalert", + "resource": "elastalert", + "severity": "debug", + "service": ["elastalert"], + "tags": [], + "text": "Probe aProbe is UP at GMT", + "value": "UP", + "createTime": "2014-10-10T00:00:00.000000Z", + "environment": "Production", + "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", + "timeout": 86400, + "correlate": ["ProbeUP", "ProbeDOWN"], + "group": "Health", + "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, + "type": "elastalert", + "event": "ProbeUP" + } mock_post_request.assert_called_once_with( alert.url, data=mock.ANY, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, verify=True, - proxies=None + headers={ + 'content-type': 'application/json'} ) assert expected_data == json.loads( mock_post_request.call_args_list[0][1]['data']) -def test_stride_strong_text(): +def test_alert_subject_size_limit_no_args(ea): + rule = { + 'name': 'test_rule', + 'type': mock_rule(), + 'owner': 'the_owner', + 'priority': 2, + 'alert_subject': 'A very long subject', + 'alert_subject_max_len': 5 + } + alert = Alerter(rule) + alertSubject = alert.create_custom_title([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + assert 5 == len(alertSubject) + + +def test_alert_subject_size_limit_with_args(ea): + rule = { + 'name': 'test_rule', + 'type': mock_rule(), + 'owner': 'the_owner', + 'priority': 2, + 'alert_subject': 'Test alert for {0} {1}', + 'alert_subject_args': ['test_term', 'test.term'], + 'alert_subject_max_len': 6 + } + alert = Alerter(rule) + alertSubject = alert.create_custom_title([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) + assert 6 == len(alertSubject) + + +def test_twilio_getinfo(): rule = { 'name': 'Test Rule', 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', 'alert_subject': 'Cool subject', - 'alert_text': 'Bold Text', - 'alert_text_type': 'alert_text_only', + 'twilio_account_sid': 'xxxxx1', + 'twilio_auth_token': 'xxxxx2', + 'twilio_to_number': 'xxxxx3', + 'twilio_from_number': 'xxxxx4', 'alert': [] } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = StrideAlerter(rule) - match = { - '@timestamp': '2016-01-01T00:00:00', - 'somefield': 'foobarbaz' + alert = TwilioAlerter(rule) + + expected_data = { + 'type': 'twilio', + 'twilio_client_name': 'xxxxx4' } - with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) + actual_data = alert.get_info() + assert expected_data == actual_data - body = "Bold Text" - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': body, 'marks': [ - {'type': 'strong'} - ]} - ]} - ]} - ]}} - mock_post_request.assert_called_once_with( - alert.url, - data=mock.ANY, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None - ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) +@pytest.mark.parametrize('twilio_account_sid, twilio_auth_token, twilio_to_number, expected_data', [ + ('', '', '', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('xxxx1', '', '', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('', 'xxxx2', '', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('', '', 'INFO', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('xxxx1', 'xxxx2', '', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('xxxx1', '', 'INFO', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('', 'xxxx2', 'INFO', 'Missing required option(s): twilio_account_sid, twilio_auth_token, twilio_to_number'), + ('xxxx1', 'xxxx2', 'INFO', + { + 'type': 'twilio', + 'twilio_client_name': 'xxxxx4' + }), +]) +def test_twilio_required_error(twilio_account_sid, twilio_auth_token, twilio_to_number, expected_data): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'twilio_from_number': 'xxxxx4', + 'alert': [] + } + + if twilio_account_sid: + rule['twilio_account_sid'] = twilio_account_sid + + if twilio_auth_token: + rule['twilio_auth_token'] = twilio_auth_token + + if twilio_to_number: + rule['twilio_to_number'] = twilio_to_number + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TwilioAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except Exception as ea: + assert expected_data in str(ea) + + +@pytest.mark.parametrize('twilio_use_copilot, twilio_message_service_sid, twilio_from_number, expected_data', [ + (True, None, 'test', True), + (False, 'test', None, True), +]) +def test_twilio_use_copilot(twilio_use_copilot, twilio_message_service_sid, twilio_from_number, expected_data): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'twilio_account_sid': 'xxxxx1', + 'twilio_auth_token': 'xxxxx2', + 'twilio_to_number': 'xxxxx3', + 'alert': [] + } + + if twilio_use_copilot: + rule['twilio_use_copilot'] = twilio_use_copilot + + if twilio_message_service_sid: + rule['twilio_message_service_sid'] = twilio_message_service_sid + + if twilio_from_number: + rule['twilio_from_number'] = twilio_from_number + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TwilioAlerter(rule) + + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + alert.alert([match]) + except EAException: + assert expected_data -def test_stride_hyperlink(): +def test_twilio(caplog): + caplog.set_level(logging.INFO) rule = { 'name': 'Test Rule', 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', 'alert_subject': 'Cool subject', - 'alert_text': '
Link', - 'alert_text_type': 'alert_text_only', + 'twilio_account_sid': 'xxxxx1', + 'twilio_auth_token': 'xxxxx2', + 'twilio_to_number': 'xxxxx3', + 'twilio_from_number': 'xxxxx4', 'alert': [] } + match = { + '@timestamp': '2021-01-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = StrideAlerter(rule) + + with mock.patch('elastalert.alerts.TwilioClient.messages') as mock_twilio: + mock_twilio.messages.create() + mock_twilio.return_value = 200 + alert = TwilioAlerter(rule) + alert.alert([match]) + expected = [ + mock.call.messages.create(), + mock.call.create(body='Test Rule', from_='xxxxx4', to='xxxxx3'), + ] + + assert mock_twilio.mock_calls == expected + assert ('elastalert', logging.INFO, 'Trigger sent to Twilio') == caplog.record_tuples[0] + + +def test_twilio_copilot(caplog): + caplog.set_level(logging.INFO) + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'twilio_account_sid': 'xxxxx1', + 'twilio_auth_token': 'xxxxx2', + 'twilio_to_number': 'xxxxx3', + 'twilio_message_service_sid': 'xxxxx5', + 'twilio_use_copilot': True, + 'alert': [] + } match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2021-01-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + + with mock.patch('elastalert.alerts.TwilioClient.messages') as mock_twilio: + mock_twilio.messages.create() + mock_twilio.return_value = 200 + alert = TwilioAlerter(rule) + alert.alert([match]) + expected = [ + mock.call.messages.create(), + mock.call.create(body='Test Rule', messaging_service_sid='xxxxx5', to='xxxxx3'), + ] + + assert mock_twilio.mock_calls == expected + assert ('elastalert', logging.INFO, 'Trigger sent to Twilio') == caplog.record_tuples[0] + + +def test_twilio_rest_exception(): + with pytest.raises(EAException) as ea: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'twilio_account_sid': 'xxxxx1', + 'twilio_auth_token': 'xxxxx2', + 'twilio_to_number': 'xxxxx3', + 'twilio_from_number': 'xxxxx4', + 'alert': [] + } + match = { + '@timestamp': '2021-01-10T00:00:00', + 'sender_ip': '1.1.1.1', + 'hostname': 'aProbe' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = TwilioAlerter(rule) + alert.alert([match]) + + assert 'Error posting to twilio: ' in str(ea) + + +def test_discord(caplog): + caplog.set_level(logging.INFO) + rule = { + 'name': 'Test Discord Rule', + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'discord_embed_footer': 'footer', + 'discord_embed_icon_url': 'http://xxxx/image.png', + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - body = "Link" - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': body, 'marks': [ - {'type': 'link', 'attrs': {'href': 'http://stride.com'}} - ]} - ]} - ]} - ]}} + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'color': 0xffffff, + 'footer': { + 'text': 'footer', + 'icon_url': 'http://xxxx/image.png' + } + }] + } mock_post_request.assert_called_once_with( - alert.url, + rule['discord_webhook_url'], data=mock.ANY, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None + headers={'Content-Type': 'application/json'}, + proxies=None, + auth=None ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + assert ('elastalert', logging.INFO, 'Alert sent to the webhook http://xxxxxxx') == caplog.record_tuples[0] -def test_stride_html(): +def test_discord_not_footer(): rule = { - 'name': 'Test Rule', + 'name': 'Test Discord Rule', 'type': 'any', - 'stride_access_token': 'token', - 'stride_cloud_id': 'cloud_id', - 'stride_conversation_id': 'conversation_id', - 'alert_subject': 'Cool subject', - 'alert_text': 'Alert: we found something. Link', - 'alert_text_type': 'alert_text_only', - 'alert': [] + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'alert': [], + 'alert_subject': 'Test Discord' } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = StrideAlerter(rule) + alert = DiscordAlerter(rule) match = { - '@timestamp': '2016-01-01T00:00:00', + '@timestamp': '2021-01-01T00:00:00', 'somefield': 'foobarbaz' } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) - expected_data = {'body': {'version': 1, 'type': "doc", 'content': [ - {'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [ - {'type': 'paragraph', 'content': [ - {'type': 'text', 'text': 'Alert', 'marks': [ - {'type': 'strong'} - ]}, - {'type': 'text', 'text': ': we found something. '}, - {'type': 'text', 'text': 'Link', 'marks': [ - {'type': 'link', 'attrs': {'href': 'http://stride.com'}} - ]} - ]} - ]} - ]}} + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'color': 0xffffff + }] + } mock_post_request.assert_called_once_with( - alert.url, + rule['discord_webhook_url'], data=mock.ANY, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Bearer {}'.format(rule['stride_access_token'])}, - verify=True, - proxies=None + headers={'Content-Type': 'application/json'}, + proxies=None, + auth=None ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data -def test_hipchat_body_size_limit_text(): + +def test_discord_proxy(): rule = { - 'name': 'Test Rule', + 'name': 'Test Discord Rule', 'type': 'any', - 'hipchat_auth_token': 'token', - 'hipchat_room_id': 'room_id', - 'hipchat_message_format': 'text', - 'alert_subject': 'Cool subject', - 'alert_text': 'Alert: we found something.\n\n{message}', - 'alert_text_type': 'alert_text_only', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'discord_proxy': 'http://proxy.url', + 'discord_proxy_login': 'admin', + 'discord_proxy_password': 'password', 'alert': [], - 'alert_text_kw': { - '@timestamp': 'time', - 'message': 'message', - }, + 'alert_subject': 'Test Discord' } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = HipChatAlerter(rule) + alert = DiscordAlerter(rule) match = { - '@timestamp': '2018-01-01T00:00:00', - 'message': 'foo bar\n' * 5000, + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' } - body = alert.create_alert_body([match]) + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule\n\n@timestamp: 2021-01-01T00:00:00\nsomefield: foobarbaz\n', + 'color': 0xffffff + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies={'https': 'http://proxy.url'}, + auth=HTTPProxyAuth('admin', 'password') + ) - assert len(body) <= 10000 + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data -def test_hipchat_body_size_limit_html(): +def test_discord_description_maxlength(): rule = { - 'name': 'Test Rule', + 'name': 'Test Discord Rule' + ('a' * 2069), 'type': 'any', - 'hipchat_auth_token': 'token', - 'hipchat_room_id': 'room_id', - 'hipchat_message_format': 'html', - 'alert_subject': 'Cool subject', - 'alert_text': 'Alert: we found something.\n\n{message}', - 'alert_text_type': 'alert_text_only', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, 'alert': [], - 'alert_text_kw': { - '@timestamp': 'time', - 'message': 'message', - }, + 'alert_subject': 'Test Discord' } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = HipChatAlerter(rule) + alert = DiscordAlerter(rule) match = { - '@timestamp': '2018-01-01T00:00:00', - 'message': 'foo bar\n' * 5000, + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule' + ('a' * 1933) + + '\n *message was cropped according to discord embed description limits!*', + 'color': 0xffffff + }] + } + + mock_post_request.assert_called_once_with( + rule['discord_webhook_url'], + data=mock.ANY, + headers={'Content-Type': 'application/json'}, + proxies=None, + auth=None + ) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data - body = alert.create_alert_body([match]) - assert len(body) <= 10000 +def test_discord_ea_exception(): + with pytest.raises(EAException) as ea: + rule = { + 'name': 'Test Discord Rule' + ('a' * 2069), + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'alert': [], + 'alert_subject': 'Test Discord' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + assert 'Error posting to Discord: . Details: ' in str(ea) -def test_alerta_no_auth(ea): +def test_discord_getinfo(): rule = { - 'name': 'Test Alerta rule!', - 'alerta_api_url': 'http://elastalerthost:8080/api/alert', - 'timeframe': datetime.timedelta(hours=1), - 'timestamp_field': '@timestamp', - 'alerta_api_skip_ssl': True, - 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], - 'alerta_attributes_values': ["%(key)s", "%(logdate)s", "%(sender_ip)s"], - 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], - 'alerta_event': "ProbeUP", - 'alerta_group': "Health", - 'alerta_origin': "Elastalert", - 'alerta_severity': "debug", - 'alerta_text': "Probe %(hostname)s is UP at %(logdate)s GMT", - 'alerta_value': "UP", + 'name': 'Test Discord Rule' + ('a' * 2069), 'type': 'any', - 'alerta_use_match_timestamp': True, - 'alert': 'alerta' + 'discord_webhook_url': 'http://xxxxxxx', + 'alert': [], + 'alert_subject': 'Test Discord' } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) - match = { - '@timestamp': '2014-10-10T00:00:00', - # 'key': ---- missing field on purpose, to verify that simply the text is left empty - # 'logdate': ---- missing field on purpose, to verify that simply the text is left empty - 'sender_ip': '1.1.1.1', - 'hostname': 'aProbe' + expected_data = { + 'type': 'discord', + 'discord_webhook_url': 'http://xxxxxxx' } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('discord_webhook_url, expected_data', [ + ('', 'Missing required option(s): discord_webhook_url'), + ('http://xxxxxxx', + { + 'type': 'discord', + 'discord_webhook_url': 'http://xxxxxxx' + }), +]) +def test_discord_required_error(discord_webhook_url, expected_data): + try: + rule = { + 'name': 'Test Discord Rule' + ('a' * 2069), + 'type': 'any', + 'alert': [], + 'alert_subject': 'Test Discord' + } + + if discord_webhook_url: + rule['discord_webhook_url'] = discord_webhook_url + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DiscordAlerter(rule) + actual_data = alert.get_info() + assert expected_data == actual_data + except Exception as ea: + assert expected_data in str(ea) + + +def test_discord_matches(): + rule = { + 'name': 'Test Discord Rule', + 'type': 'any', + 'discord_webhook_url': 'http://xxxxxxx', + 'discord_emoji_title': ':warning:', + 'discord_embed_color': 0xffffff, + 'discord_embed_footer': 'footer', + 'discord_embed_icon_url': 'http://xxxx/image.png', + 'alert': [], + 'alert_subject': 'Test Discord' + } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = AlertaAlerter(rule) + alert = DiscordAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } with mock.patch('requests.post') as mock_post_request: - alert.alert([match]) + alert.alert([match, match]) expected_data = { - "origin": "Elastalert", - "resource": "elastalert", - "severity": "debug", - "service": ["elastalert"], - "tags": [], - "text": "Probe aProbe is UP at GMT", - "value": "UP", - "createTime": "2014-10-10T00:00:00.000000Z", - "environment": "Production", - "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", - "timeout": 86400, - "correlate": ["ProbeUP", "ProbeDOWN"], - "group": "Health", - "attributes": {"senderIP": "1.1.1.1", "hostname": "", "TimestampEvent": ""}, - "type": "elastalert", - "event": "ProbeUP" + 'content': ':warning: Test Discord :warning:', + 'embeds': + [{ + 'description': 'Test Discord Rule\n' + + '\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'somefield: foobarbaz\n' + + '\n' + + '----------------------------------------\n' + + 'Test Discord Rule\n' + + '\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'somefield: foobarbaz\n' + + '\n' + + '----------------------------------------\n', + 'color': 0xffffff, + 'footer': { + 'text': 'footer', + 'icon_url': 'http://xxxx/image.png' + } + }] } mock_post_request.assert_called_once_with( - alert.url, + rule['discord_webhook_url'], data=mock.ANY, - headers={ - 'content-type': 'application/json'}, - verify=False + headers={'Content-Type': 'application/json'}, + proxies=None, + auth=None ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) + + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data -def test_alerta_auth(ea): +def test_rocketchat_uses_custom_title(caplog): + caplog.set_level(logging.INFO) rule = { - 'name': 'Test Alerta rule!', - 'alerta_api_url': 'http://elastalerthost:8080/api/alert', - 'alerta_api_key': '123456789ABCDEF', - 'timeframe': datetime.timedelta(hours=1), - 'timestamp_field': '@timestamp', - 'alerta_severity': "debug", + 'name': 'Test Rule', 'type': 'any', - 'alerta_use_match_timestamp': True, - 'alert': 'alerta' - } - - match = { - '@timestamp': '2014-10-10T00:00:00', - 'sender_ip': '1.1.1.1', - 'hostname': 'aProbe' + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'alert_subject': 'Cool subject', + 'alert': [] } - rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = AlertaAlerter(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } mock_post_request.assert_called_once_with( - alert.url, + rule['rocket_chat_webhook_url'], data=mock.ANY, - verify=True, - headers={ - 'content-type': 'application/json', - 'Authorization': 'Key {}'.format(rule['alerta_api_key'])}) + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + assert ('elastalert', logging.INFO, 'Alert sent to Rocket.Chat') == caplog.record_tuples[0] -def test_alerta_new_style(ea): +def test_rocketchat_uses_rule_name_when_custom_title_is_not_provided(): rule = { - 'name': 'Test Alerta rule!', - 'alerta_api_url': 'http://elastalerthost:8080/api/alert', - 'timeframe': datetime.timedelta(hours=1), - 'timestamp_field': '@timestamp', - 'alerta_attributes_keys': ["hostname", "TimestampEvent", "senderIP"], - 'alerta_attributes_values': ["{hostname}", "{logdate}", "{sender_ip}"], - 'alerta_correlate': ["ProbeUP", "ProbeDOWN"], - 'alerta_event': "ProbeUP", - 'alerta_group': "Health", - 'alerta_origin': "Elastalert", - 'alerta_severity': "debug", - 'alerta_text': "Probe {hostname} is UP at {logdate} GMT", - 'alerta_value': "UP", - 'alerta_new_style_string_format': True, + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_username_override(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'test elastalert', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'test elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_chat_channel(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'], + 'rocket_chat_channel_override': '#test-alert', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '#test-alert', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_uses_list_of_custom_rocket_chat_channel(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'], + 'rocket_chat_channel_override': ['#test-alert', '#test-alert2'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data1 = { + 'username': 'elastalert', + 'channel': '#test-alert', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + expected_data2 = { + 'username': 'elastalert', + 'channel': '#test-alert2', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_with( + rule['rocket_chat_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data1 == json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data2 == json.loads(mock_post_request.call_args_list[1][1]['data']) + + +def test_rocketchat_emoji_override(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'], + 'rocket_chat_emoji_override': ':shushing_face:', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':shushing_face:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_emoji_override_blank(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'], + 'rocket_chat_emoji_override': '', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['name'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'][0], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +@pytest.mark.parametrize('msg_color, except_msg_color', [ + ('', 'danger'), + ('danger', 'danger'), + ('good', 'good'), + ('warning', 'warning') +]) +def test_rocketchat_msg_color(msg_color, except_msg_color): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'elastalert', + 'alert_subject': 'Cool subject', + 'alert': [] + } + + if msg_color: + rule['rocket_chat_msg_color'] = msg_color + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': except_msg_color, + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_text_string(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'elastalert', + 'rocket_chat_text_string': 'text str', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': 'text str' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_proxy(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_proxy': 'http://proxy.url', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': rule['rocket_chat_proxy']}, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_alert_fields(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'elastalert', + 'rocket_chat_alert_fields': [ + { + 'title': 'Host', + 'value': 'somefield', + 'short': 'true' + }, + { + 'title': 'Sensors', + 'value': '@timestamp', + 'short': 'true' + } + ], + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': rule['alert_subject'], + 'text': BasicMatchString(rule, match).__str__(), + 'fields': + [ + { + 'short': 'true', + 'title': 'Host', + 'value': 'foobarbaz' + }, + { + 'short': 'true', + 'title': 'Sensors', + 'value': '2021-01-01T00:00:00' + } + ], + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_msg_color_required_error(): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_msg_color': 'abc', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post'): + alert.alert([match]) + except KeyError: + assert True + + +def test_rocketchat_ea_exception(): + with pytest.raises(EAException) as ea: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'elastalert', + 'rocket_chat_msg_pretext': 'pretext value', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + assert 'Error posting to Rocket.Chat: ' in str(ea) + + +def test_rocketchat_get_aggregation_summary_text__maximum_width(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_username_override': 'elastalert', + 'rocket_chat_msg_pretext': 'pretext value', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + assert 75 == alert.get_aggregation_summary_text__maximum_width() + + +def test_rocketchat_getinfo(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'alert_subject': 'Cool subject', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + + expected_data = { + 'type': 'rocketchat', + 'rocket_chat_username_override': 'elastalert', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'] + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('rocket_chat_webhook_url, expected_data', [ + ('', 'Missing required option(s): rocket_chat_webhook_url'), + ('http://please.dontgohere.rocketchat', + { + 'type': 'rocketchat', + 'rocket_chat_username_override': 'elastalert', + 'rocket_chat_webhook_url': ['http://please.dontgohere.rocketchat'] + }) +]) +def test_rocketchat_required_error(rocket_chat_webhook_url, expected_data): + try: + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'alert': [] + } + + if rocket_chat_webhook_url: + rule['rocket_chat_webhook_url'] = rocket_chat_webhook_url + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except Exception as ea: + assert expected_data in str(ea) + + +def test_rocketchat_attach_kibana_discover_url_when_generated(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'alert': [], + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_attach_kibana_discover_url': True + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'kibana_discover_url': 'http://localhost:5601/app/discover#/' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Cool subject', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + }, + { + 'color': '#ec4b98', + 'title': 'Discover in Kibana', + 'title_link': 'http://localhost:5601/app/discover#/' + } + ], + 'text': '' + } + + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_attach_kibana_discover_url_when_not_generated(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'alert': [], + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_attach_kibana_discover_url': True + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + 'somefield': 'foobarbaz', + '@timestamp': '2021-01-01T00:00:00' + } + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Cool subject', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_kibana_discover_title(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_subject': 'Cool subject', + 'alert': [], + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'rocket_chat_attach_kibana_discover_url': True, + 'rocket_chat_kibana_discover_title': 'Click to discover in Kibana' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + 'somefield': 'foobarbaz', + '@timestamp': '2021-01-01T00:00:00', + 'kibana_discover_url': 'http://localhost:5601/app/discover#/' + } + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Cool subject', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + }, + { + 'color': '#ec4b98', + 'title': 'Click to discover in Kibana', + 'title_link': 'http://localhost:5601/app/discover#/' + } + ], + 'text': '' + } + + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_kibana_discover_color(): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'alert_text_type': 'alert_text_only', + 'alert': [], + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocket_chat', + 'rocket_chat_attach_kibana_discover_url': True, + 'rocket_chat_kibana_discover_color': 'blue' + } + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + 'somefield': 'foobarbaz', + '@timestamp': '2021-01-01T00:00:00', + 'kibana_discover_url': 'http://localhost:5601/app/discover#/' + } + + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Test Rule', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + }, + { + 'color': 'blue', + 'title': 'Discover in Kibana', + 'title_link': 'http://localhost:5601/app/discover#/' + } + ], + 'text': '' + } + + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + timeout=10, + verify=True + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +@pytest.mark.parametrize('ca_certs, ignore_ssl_errors, excpet_verify', [ + ('', '', True), + ('', True, False), + ('', False, True), + (True, '', True), + (True, True, True), + (True, False, True), + (False, '', True), + (False, True, False), + (False, False, True) +]) +def test_rocketchat_ca_certs(ca_certs, ignore_ssl_errors, excpet_verify): + rule = { + 'name': 'Test Rule', + 'type': 'any', + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'alert_subject': 'Cool subject', + 'alert': [] + } + if ca_certs: + rule['rocket_chat_ca_certs'] = ca_certs + + if ignore_ssl_errors: + rule['rocket_chat_ignore_ssl_errors'] = ignore_ssl_errors + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) + match = { + '@timestamp': '2017-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Cool subject', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' + } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=excpet_verify, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_rocketchat_uses_custom_timeout(): + rule = { + 'name': 'Test Rule', 'type': 'any', - 'alerta_use_match_timestamp': True, - 'alert': 'alerta' + 'rocket_chat_webhook_url': 'http://please.dontgohere.rocketchat', + 'alert_subject': 'Cool subject', + 'alert': [], + 'rocket_chat_timeout': 20 } - + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = RocketChatAlerter(rule) match = { - '@timestamp': '2014-10-10T00:00:00', - # 'key': ---- missing field on purpose, to verify that simply the text is left empty - # 'logdate': ---- missing field on purpose, to verify that simply the text is left empty - 'sender_ip': '1.1.1.1', - 'hostname': 'aProbe' + '@timestamp': '2016-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = { + 'username': 'elastalert', + 'channel': '', + 'emoji': ':ghost:', + 'attachments': [ + { + 'color': 'danger', + 'title': 'Cool subject', + 'text': BasicMatchString(rule, match).__str__(), + 'fields': [] + } + ], + 'text': '' } + mock_post_request.assert_called_once_with( + rule['rocket_chat_webhook_url'], + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=20 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + +def test_datadog_alerter(caplog): + caplog.set_level(logging.INFO) + rule = { + 'name': 'Test Datadog Event Alerter', + 'type': 'any', + 'datadog_api_key': 'test-api-key', + 'datadog_app_key': 'test-app-key', + 'alert': [], + 'alert_subject': 'Test Datadog Event Alert' + } rules_loader = FileRulesLoader({}) rules_loader.load_modules(rule) - alert = AlertaAlerter(rule) + alert = DatadogAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'name': 'datadog-test-name' + } with mock.patch('requests.post') as mock_post_request: alert.alert([match]) expected_data = { - "origin": "Elastalert", - "resource": "elastalert", - "severity": "debug", - "service": ["elastalert"], - "tags": [], - "text": "Probe aProbe is UP at GMT", - "value": "UP", - "createTime": "2014-10-10T00:00:00.000000Z", - "environment": "Production", - "rawData": "Test Alerta rule!\n\n@timestamp: 2014-10-10T00:00:00\nhostname: aProbe\nsender_ip: 1.1.1.1\n", - "timeout": 86400, - "correlate": ["ProbeUP", "ProbeDOWN"], - "group": "Health", - "attributes": {"senderIP": "1.1.1.1", "hostname": "aProbe", "TimestampEvent": ""}, - "type": "elastalert", - "event": "ProbeUP" + 'title': rule['alert_subject'], + 'text': "Test Datadog Event Alerter\n\n@timestamp: 2021-01-01T00:00:00\nname: datadog-test-name\n" + } + mock_post_request.assert_called_once_with( + "https://api.datadoghq.com/api/v1/events", + data=mock.ANY, + headers={ + 'Content-Type': 'application/json', + 'DD-API-KEY': rule['datadog_api_key'], + 'DD-APPLICATION-KEY': rule['datadog_app_key'] + } + ) + actual_data = json.loads(mock_post_request.call_args_list[0][1]['data']) + assert expected_data == actual_data + assert ('elastalert', logging.INFO, 'Alert sent to Datadog') == caplog.record_tuples[0] + + +def test_datadog_ea_exception(): + with pytest.raises(EAException) as ea: + rule = { + 'name': 'Test Datadog Event Alerter', + 'type': 'any', + 'datadog_api_key': 'test-api-key', + 'datadog_app_key': 'test-app-key', + 'alert': [], + 'alert_subject': 'Test Datadog Event Alert' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DatadogAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'name': 'datadog-test-name' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + assert 'Error posting event to Datadog:' in str(ea) + + +def test_datadog_getinfo(): + rule = { + 'name': 'Test Datadog Event Alerter', + 'type': 'any', + 'datadog_api_key': 'test-api-key', + 'datadog_app_key': 'test-app-key', + 'alert': [], + 'alert_subject': 'Test Datadog Event Alert' + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DatadogAlerter(rule) + + expected_data = {'type': 'datadog'} + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('datadog_api_key, datadog_app_key, expected_data', [ + ('', '', 'Missing required option(s): datadog_api_key, datadog_app_key'), + ('xxxx1', '', 'Missing required option(s): datadog_api_key, datadog_app_key'), + ('', 'xxxx2', 'Missing required option(s): datadog_api_key, datadog_app_key'), + ('xxxx1', 'xxxx2', + { + 'type': 'datadog' + }), +]) +def test_datadog_required_error(datadog_api_key, datadog_app_key, expected_data): + try: + rule = { + 'name': 'Test Datadog Event Alerter', + 'type': 'any', + 'alert': [], + 'alert_subject': 'Test Datadog Event Alert' + } + + if datadog_api_key: + rule['datadog_api_key'] = datadog_api_key + + if datadog_app_key: + rule['datadog_app_key'] = datadog_app_key + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = DatadogAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except Exception as ea: + assert expected_data in str(ea) + + +def test_alertmanager(caplog): + caplog.set_level(logging.INFO) + rule = { + 'name': 'Test Alertmanager Rule', + 'type': 'any', + 'alertmanager_hosts': ['http://alertmanager:9093'], + 'alertmanager_alertname': 'Title', + 'alertmanager_annotations': {'severity': 'error'}, + 'alertmanager_labels': {'source': 'elastalert'}, + 'alertmanager_fields': {'msg': 'message', 'log': '@log_name'}, + 'alert_subject_args': ['message', '@log_name'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertmanagerAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'message': 'Quit 123', + '@log_name': 'mysqld.general' } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = [ + { + 'annotations': + { + 'severity': 'error', + 'summary': 'Test Alertmanager Rule', + 'description': 'Test Alertmanager Rule\n\n' + + '@log_name: mysqld.general\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'message: Quit 123\nsomefield: foobarbaz\n' + }, + 'labels': { + 'source': 'elastalert', + 'msg': 'Quit 123', + 'log': 'mysqld.general', + 'alertname': 'Title', + 'elastalert_rule': 'Test Alertmanager Rule' + } + } + ] mock_post_request.assert_called_once_with( - alert.url, + 'http://alertmanager:9093/api/v1/alerts', data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, verify=True, - headers={ - 'content-type': 'application/json'} + timeout=10 ) - assert expected_data == json.loads( - mock_post_request.call_args_list[0][1]['data']) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + assert ('elastalert', logging.INFO, "Alert sent to Alertmanager") == caplog.record_tuples[0] -def test_alert_subject_size_limit_no_args(ea): +def test_alertmanager_porxy(): rule = { - 'name': 'test_rule', - 'type': mock_rule(), - 'owner': 'the_owner', - 'priority': 2, - 'alert_subject': 'A very long subject', - 'alert_subject_max_len': 5 + 'name': 'Test Alertmanager Rule', + 'type': 'any', + 'alertmanager_hosts': ['http://alertmanager:9093'], + 'alertmanager_alertname': 'Title', + 'alertmanager_annotations': {'severity': 'error'}, + 'alertmanager_labels': {'source': 'elastalert'}, + 'alertmanager_fields': {'msg': 'message', 'log': '@log_name'}, + 'alertmanager_proxy': 'http://proxy.url', + 'alert_subject_args': ['message', '@log_name'], + 'alert': [] } - alert = Alerter(rule) - alertSubject = alert.create_custom_title([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - assert 5 == len(alertSubject) + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertmanagerAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'message': 'Quit 123', + '@log_name': 'mysqld.general' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + expected_data = [ + { + 'annotations': + { + 'severity': 'error', + 'summary': 'Test Alertmanager Rule', + 'description': 'Test Alertmanager Rule\n\n' + + '@log_name: mysqld.general\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'message: Quit 123\nsomefield: foobarbaz\n' + }, + 'labels': { + 'source': 'elastalert', + 'msg': 'Quit 123', + 'log': 'mysqld.general', + 'alertname': 'Title', + 'elastalert_rule': 'Test Alertmanager Rule' + } + } + ] -def test_alert_subject_size_limit_with_args(ea): + mock_post_request.assert_called_once_with( + 'http://alertmanager:9093/api/v1/alerts', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies={'https': 'http://proxy.url'}, + verify=True, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_alertmanager_timeout(): rule = { - 'name': 'test_rule', - 'type': mock_rule(), - 'owner': 'the_owner', - 'priority': 2, - 'alert_subject': 'Test alert for {0} {1}', - 'alert_subject_args': ['test_term', 'test.term'], - 'alert_subject_max_len': 6 + 'name': 'Test Alertmanager Rule', + 'type': 'any', + 'alertmanager_hosts': ['http://alertmanager:9093'], + 'alertmanager_alertname': 'Title', + 'alertmanager_annotations': {'severity': 'error'}, + 'alertmanager_labels': {'source': 'elastalert'}, + 'alertmanager_fields': {'msg': 'message', 'log': '@log_name'}, + 'alertmanager_timeout': 20, + 'alert_subject_args': ['message', '@log_name'], + 'alert': [] } - alert = Alerter(rule) - alertSubject = alert.create_custom_title([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}]) - assert 6 == len(alertSubject) + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertmanagerAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'message': 'Quit 123', + '@log_name': 'mysqld.general' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = [ + { + 'annotations': + { + 'severity': 'error', + 'summary': 'Test Alertmanager Rule', + 'description': 'Test Alertmanager Rule\n\n' + + '@log_name: mysqld.general\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'message: Quit 123\nsomefield: foobarbaz\n' + }, + 'labels': { + 'source': 'elastalert', + 'msg': 'Quit 123', + 'log': 'mysqld.general', + 'alertname': 'Title', + 'elastalert_rule': 'Test Alertmanager Rule' + } + } + ] + + mock_post_request.assert_called_once_with( + 'http://alertmanager:9093/api/v1/alerts', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=True, + timeout=20 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +@pytest.mark.parametrize('ca_certs, ignore_ssl_errors, excpet_verify', [ + ('', '', True), + ('', True, False), + ('', False, True), + (True, '', True), + (True, True, True), + (True, False, True), + (False, '', True), + (False, True, False), + (False, False, True) +]) +def test_alertmanager_ca_certs(ca_certs, ignore_ssl_errors, excpet_verify): + rule = { + 'name': 'Test Alertmanager Rule', + 'type': 'any', + 'alertmanager_hosts': ['http://alertmanager:9093'], + 'alertmanager_alertname': 'Title', + 'alertmanager_annotations': {'severity': 'error'}, + 'alertmanager_labels': {'source': 'elastalert'}, + 'alertmanager_fields': {'msg': 'message', 'log': '@log_name'}, + 'alert_subject_args': ['message', '@log_name'], + 'alert': [] + } + if ca_certs: + rule['alertmanager_ca_certs'] = ca_certs + + if ignore_ssl_errors: + rule['alertmanager_ignore_ssl_errors'] = ignore_ssl_errors + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertmanagerAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz', + 'message': 'Quit 123', + '@log_name': 'mysqld.general' + } + with mock.patch('requests.post') as mock_post_request: + alert.alert([match]) + + expected_data = [ + { + 'annotations': + { + 'severity': 'error', + 'summary': 'Test Alertmanager Rule', + 'description': 'Test Alertmanager Rule\n\n' + + '@log_name: mysqld.general\n' + + '@timestamp: 2021-01-01T00:00:00\n' + + 'message: Quit 123\nsomefield: foobarbaz\n' + }, + 'labels': { + 'source': 'elastalert', + 'msg': 'Quit 123', + 'log': 'mysqld.general', + 'alertname': 'Title', + 'elastalert_rule': 'Test Alertmanager Rule' + } + } + ] + + mock_post_request.assert_called_once_with( + 'http://alertmanager:9093/api/v1/alerts', + data=mock.ANY, + headers={'content-type': 'application/json'}, + proxies=None, + verify=excpet_verify, + timeout=10 + ) + assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data']) + + +def test_alertmanager_ea_exception(): + with pytest.raises(EAException) as ea: + rule = { + 'name': 'Test Alertmanager Rule', + 'type': 'any', + 'alertmanager_hosts': ['http://alertmanager:9093'], + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertmanagerAlerter(rule) + match = { + '@timestamp': '2021-01-01T00:00:00', + 'somefield': 'foobarbaz' + } + mock_run = mock.MagicMock(side_effect=RequestException) + with mock.patch('requests.post', mock_run), pytest.raises(RequestException): + alert.alert([match]) + assert 'Error posting to Alertmanager' in str(ea) + + +def test_alertmanager_getinfo(): + rule = { + 'name': 'Test Alertmanager Rule', + 'type': 'any', + 'alertmanager_hosts': 'http://alertmanager:9093', + 'alert': [] + } + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertmanagerAlerter(rule) + + expected_data = { + 'type': 'alertmanager' + } + actual_data = alert.get_info() + assert expected_data == actual_data + + +@pytest.mark.parametrize('alertmanager_hosts, expected_data', [ + ([], 'Missing required option(s): alertmanager_hosts'), + (['http://alertmanager:9093'], + { + 'type': 'alertmanager' + }), +]) +def test_alertmanager_required_error(alertmanager_hosts, expected_data): + try: + rule = { + 'name': 'Test Alertmanager Rule', + 'type': 'any', + 'alert': [] + } + + if alertmanager_hosts: + rule['alertmanager_hosts'] = alertmanager_hosts + + rules_loader = FileRulesLoader({}) + rules_loader.load_modules(rule) + alert = AlertmanagerAlerter(rule) + + actual_data = alert.get_info() + assert expected_data == actual_data + except Exception as ea: + print('ea %s' % str(ea)) + assert expected_data in str(ea) diff --git a/tests/base_test.py b/tests/base_test.py index 92dc35f7e..e93c5f762 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -5,7 +5,7 @@ import threading import elasticsearch -import mock +from unittest import mock import pytest from elasticsearch.exceptions import ConnectionError from elasticsearch.exceptions import ElasticsearchException @@ -427,8 +427,8 @@ def test_agg_matchtime(ea): call4 = ea.writeback_es.deprecated_search.call_args_list[10][1]['body'] assert 'alert_time' in call2['filter']['range'] - assert call3['query']['query_string']['query'] == 'aggregate_id:ABCD' - assert call4['query']['query_string']['query'] == 'aggregate_id:CDEF' + assert call3['query']['query_string']['query'] == 'aggregate_id:"ABCD"' + assert call4['query']['query_string']['query'] == 'aggregate_id:"CDEF"' assert ea.writeback_es.deprecated_search.call_args_list[9][1]['size'] == 1337 @@ -596,8 +596,8 @@ def test_agg_with_aggregation_key(ea): call4 = ea.writeback_es.deprecated_search.call_args_list[10][1]['body'] assert 'alert_time' in call2['filter']['range'] - assert call3['query']['query_string']['query'] == 'aggregate_id:ABCD' - assert call4['query']['query_string']['query'] == 'aggregate_id:CDEF' + assert call3['query']['query_string']['query'] == 'aggregate_id:"ABCD"' + assert call4['query']['query_string']['query'] == 'aggregate_id:"CDEF"' assert ea.writeback_es.deprecated_search.call_args_list[9][1]['size'] == 1337 @@ -1159,7 +1159,7 @@ def test_wait_until_responsive(ea): ] -def test_wait_until_responsive_timeout_es_not_available(ea, capsys): +def test_wait_until_responsive_timeout_es_not_available(ea, caplog): """Bail out if ElasticSearch doesn't (quickly) become responsive.""" # Never becomes responsive :-) @@ -1175,8 +1175,8 @@ def test_wait_until_responsive_timeout_es_not_available(ea, capsys): assert exc.value.code == 1 # Ensure we get useful diagnostics. - output, errors = capsys.readouterr() - assert 'Could not reach ElasticSearch at "es:14900".' in errors + user, level, message = caplog.record_tuples[0] + assert 'Could not reach ElasticSearch at "es:14900".' in message # Slept until we passed the deadline. sleep.mock_calls == [ @@ -1186,7 +1186,7 @@ def test_wait_until_responsive_timeout_es_not_available(ea, capsys): ] -def test_wait_until_responsive_timeout_index_does_not_exist(ea, capsys): +def test_wait_until_responsive_timeout_index_does_not_exist(ea, caplog): """Bail out if ElasticSearch doesn't (quickly) become responsive.""" # Never becomes responsive :-) @@ -1202,8 +1202,8 @@ def test_wait_until_responsive_timeout_index_does_not_exist(ea, capsys): assert exc.value.code == 1 # Ensure we get useful diagnostics. - output, errors = capsys.readouterr() - assert 'Writeback alias "wb_a" does not exist, did you run `elastalert-create-index`?' in errors + user, level, message = caplog.record_tuples[0] + assert 'Writeback alias "wb_a" does not exist, did you run `elastalert-create-index`?' in message # Slept until we passed the deadline. sleep.mock_calls == [ diff --git a/tests/conftest.py b/tests/conftest.py index 6844296ee..853c0ddbd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,7 +3,7 @@ import logging import os -import mock +from unittest import mock import pytest import elastalert.elastalert diff --git a/tests/kibana_discover_test.py b/tests/kibana_discover_test.py index f06fe4e0c..635fbcadb 100644 --- a/tests/kibana_discover_test.py +++ b/tests/kibana_discover_test.py @@ -38,7 +38,26 @@ def test_generate_kibana_discover_url_with_kibana_5x_and_6x(kibana_version): assert url == expectedUrl -@pytest.mark.parametrize("kibana_version", ['7.0', '7.1', '7.2', '7.3']) +@pytest.mark.parametrize("kibana_version", [ + '7.0', + '7.1', + '7.2', + '7.3', + '7.4', + '7.5', + '7.6', + '7.7', + '7.8', + '7.9', + '7.10', + '7.11', + '7.12', + '7.13', + '7.14', + '7.15', + '7.16', + '7.17' +]) def test_generate_kibana_discover_url_with_kibana_7x(kibana_version): url = generate_kibana_discover_url( rule={ diff --git a/tests/loaders_test.py b/tests/loaders_test.py index bb8d3d873..009207fd5 100644 --- a/tests/loaders_test.py +++ b/tests/loaders_test.py @@ -3,7 +3,7 @@ import datetime import os -import mock +from unittest import mock import pytest import elastalert.alerts @@ -340,7 +340,7 @@ def test_raises_on_missing_config(): mock_rule_open.return_value = test_rule_copy with mock.patch('os.walk') as mock_walk: mock_walk.return_value = [('', [], ['testrule.yaml'])] - with pytest.raises(EAException, message='key %s should be required' % key): + with pytest.raises(EAException): rules = load_conf(test_args) rules['rules'] = rules['rules_loader'].load(rules) diff --git a/tests/rules_test.py b/tests/rules_test.py index 1954b5d54..12d4f9298 100644 --- a/tests/rules_test.py +++ b/tests/rules_test.py @@ -2,7 +2,7 @@ import copy import datetime -import mock +from unittest import mock import pytest from elastalert.ruletypes import AnyRule diff --git a/tests/util_test.py b/tests/util_test.py index 55a2f9c8f..95f026b5b 100644 --- a/tests/util_test.py +++ b/tests/util_test.py @@ -2,7 +2,7 @@ from datetime import datetime from datetime import timedelta -import mock +from unittest import mock import pytest from dateutil.parser import parse as dt diff --git a/tox.ini b/tox.ini index 71099e17c..b5763f264 100644 --- a/tox.ini +++ b/tox.ini @@ -1,11 +1,11 @@ [tox] project = elastalert -envlist = py36,docs +envlist = py310,docs [testenv] deps = -rrequirements-dev.txt commands = - coverage run --source=elastalert/,tests/ -m pytest --strict {posargs} + coverage run --source=elastalert/,tests/ -m pytest --strict-markers {posargs} coverage report -m flake8 . @@ -25,6 +25,6 @@ norecursedirs = .* virtualenv_run docs build venv env [testenv:docs] deps = {[testenv]deps} - sphinx==1.6.6 + sphinx==4.4.0 changedir = docs commands = sphinx-build -b html -d build/doctrees -W source build/html