Commit 1cdde8cc authored by Sophie Brun's avatar Sophie Brun

New upstream version 0.1.21

parent e5e2bbed
......@@ -28,16 +28,16 @@ This is configured by a set of rules, each of which defines a query, a rule type
Several rule types with common monitoring paradigms are included with ElastAlert:
- "Match where there are X events in Y time" (``frequency`` type)
- "Match when the rate of events increases or decreases" (``spike`` type)
- "Match when there are less than X events in Y time" (``flatline`` type)
- "Match when a certain field matches a blacklist/whitelist" (``blacklist`` and ``whitelist`` type)
- "Match on any event matching a given filter" (``any`` type)
- "Match when a field has two different values within some time" (``change`` type)
- "Match when a never before seen term appears in a field" (``new_term`` type)
- "Match when the number of unique values for a field is above or below a threshold (``cardinality`` type)
- Match where there are at least X events in Y time" (``frequency`` type)
- Match when the rate of events increases or decreases" (``spike`` type)
- Match when there are less than X events in Y time" (``flatline`` type)
- Match when a certain field matches a blacklist/whitelist" (``blacklist`` and ``whitelist`` type)
- Match on any event matching a given filter" (``any`` type)
- Match when a field has two different values within some time" (``change`` type)
- Match when a never before seen term appears in a field" (``new_term`` type)
- Match when the number of unique values for a field is above or below a threshold (``cardinality`` type)
Currently, we have support built in for the following alert types:
Currently, we have built-in support for the following alert types:
- Email
- JIRA
......@@ -70,9 +70,9 @@ To get started, check out `Running ElastAlert For The First Time` in the [docume
``$ python elastalert/elastalert.py [--debug] [--verbose] [--start <timestamp>] [--end <timestamp>] [--rule <filename.yaml>] [--config <filename.yaml>]``
``--debug`` will print additional information to the screen as well as suppresses alerts and instead prints the alert body.
``--debug`` will print additional information to the screen as well as suppresses alerts and instead prints the alert body. Not compatible with `--verbose`.
``--verbose`` will print additional information without supressing alerts.
``--verbose`` will print additional information without suppressing alerts. Not compatible with `--debug.`
``--start`` will begin querying at the given timestamp. By default, ElastAlert will begin querying from the present.
Timestamp format is ``YYYY-MM-DDTHH-MM-SS[-/+HH:MM]`` (Note the T between date and hour).
......
# Change Log
## v0.1.21
### Fixed
- Fixed an incomplete bug fix for preventing duplicate enhancement runs
## v0.1.20
### Added
......
......@@ -201,7 +201,10 @@ Several arguments are available when running ElastAlert:
``--debug`` will run ElastAlert in debug mode. This will increase the logging verboseness, change
all alerts to ``DebugAlerter``, which prints alerts and suppresses their normal action, and skips writing
search and alert metadata back to Elasticsearch.
search and alert metadata back to Elasticsearch. Not compatible with `--verbose`.
``--verbose`` will increase the logging verboseness, which allows you to see information about the state
of queries. Not compatible with `--debug`.
``--start <timestamp>`` will force ElastAlert to begin querying from the given time, instead of the default,
querying from the present. The timestamp should be ISO8601, e.g. ``YYYY-MM-DDTHH:MM:SS`` (UTC) or with timezone
......@@ -218,9 +221,6 @@ or its subdirectories.
``--rule``. <unit> is one of days, weeks, hours, minutes or seconds. <number> is an integer. For example,
``--rule noisy_rule.yaml --silence hours=4`` will stop noisy_rule from generating any alerts for 4 hours.
``--verbose`` will increase the logging verboseness, which allows you to see information about the state
of queries.
``--es_debug`` will enable logging for all queries made to Elasticsearch.
``--es_debug_trace`` will enable logging curl commands for all queries made to Elasticsearch to a file.
......
......@@ -1501,6 +1501,8 @@ The alerter requires the following option:
``pagerduty_client_name``: The name of the monitoring client that is triggering this event.
``pagerduty_event_type``: Any of the following: `trigger`, `resolve`, or `acknowledge`. (Optional, defaults to `trigger`)
Optional:
``alert_subject``: If set, this will be used as the Incident description within PagerDuty. If not set, ElastAlert will default to using the rule name of the alert for the incident.
......
......@@ -1077,6 +1077,7 @@ class PagerDutyAlerter(Alerter):
self.pagerduty_client_name = self.rule['pagerduty_client_name']
self.pagerduty_incident_key = self.rule.get('pagerduty_incident_key', '')
self.pagerduty_incident_key_args = self.rule.get('pagerduty_incident_key_args', None)
self.pagerduty_event_type = self.rule.get('pagerduty_event_type', 'trigger')
self.pagerduty_proxy = self.rule.get('pagerduty_proxy', None)
self.url = 'https://events.pagerduty.com/generic/2010-04-15/create_event.json'
......@@ -1088,7 +1089,7 @@ class PagerDutyAlerter(Alerter):
payload = {
'service_key': self.pagerduty_service_key,
'description': self.create_title(matches),
'event_type': 'trigger',
'event_type': self.pagerduty_event_type,
'incident_key': self.get_incident_key(matches),
'client': self.pagerduty_client_name,
'details': {
......@@ -1108,7 +1109,13 @@ class PagerDutyAlerter(Alerter):
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to pagerduty: %s" % e)
elastalert_logger.info("Trigger sent to PagerDuty")
if self.pagerduty_event_type == 'trigger':
elastalert_logger.info("Trigger sent to PagerDuty")
elif self.pagerduty_event_type == 'resolve':
elastalert_logger.info("Resolve sent to PagerDuty")
elif self.pagerduty_event_type == 'acknowledge':
elastalert_logger.info("acknowledge sent to PagerDuty")
def get_incident_key(self, matches):
if self.pagerduty_incident_key_args:
......
......@@ -64,7 +64,7 @@ def main():
host = args.host if args.host else data.get('es_host')
port = args.port if args.port else data.get('es_port')
username = args.username if args.username else data.get('es_username')
password = args.passowrd if args.password else data.get('es_password')
password = args.password if args.password else data.get('es_password')
url_prefix = args.url_prefix if args.url_prefix is not None else data.get('es_url_prefix', '')
use_ssl = args.ssl if args.ssl is not None else data.get('use_ssl')
verify_certs = args.verify_certs if args.verify_certs is not None else data.get('verify_certs') is not False
......@@ -75,7 +75,7 @@ def main():
client_key = data.get('client_key')
else:
username = args.username if args.username else data.get('es_username')
password = args.passowrd if args.password else data.get('es_password')
password = args.password if args.password else data.get('es_password')
aws_region = args.aws_region
host = args.host if args.host else raw_input('Enter Elasticsearch host: ')
port = args.port if args.port else int(raw_input('Enter Elasticsearch port: '))
......
......@@ -71,14 +71,16 @@ class ElastAlerter():
dest='config',
default="config.yaml",
help='Global config file (default: config.yaml)')
parser.add_argument('--debug', action='store_true', dest='debug', help='Suppresses alerts and prints information instead')
parser.add_argument('--debug', action='store_true', dest='debug', help='Suppresses alerts and prints information instead. '
'Not compatible with `--verbose`')
parser.add_argument('--rule', dest='rule', help='Run only a specific rule (by filename, must still be in rules folder)')
parser.add_argument('--silence', dest='silence', help='Silence rule for a time period. Must be used with --rule. Usage: '
'--silence <units>=<number>, eg. --silence hours=2')
parser.add_argument('--start', dest='start', help='YYYY-MM-DDTHH:MM:SS Start querying from this timestamp.'
'Use "NOW" to start from current time. (Default: present)')
parser.add_argument('--end', dest='end', help='YYYY-MM-DDTHH:MM:SS Query to this timestamp. (Default: present)')
parser.add_argument('--verbose', action='store_true', dest='verbose', help='Increase verbosity without suppressing alerts')
parser.add_argument('--verbose', action='store_true', dest='verbose', help='Increase verbosity without suppressing alerts. '
'Not compatible with `--debug`')
parser.add_argument('--patience', action='store', dest='timeout',
type=parse_duration,
default=datetime.timedelta(),
......@@ -102,12 +104,18 @@ class ElastAlerter():
self.debug = self.args.debug
self.verbose = self.args.verbose
if self.verbose and self.debug:
elastalert_logger.info(
"Note: --debug and --verbose flags are set. --debug takes precedent."
)
if self.verbose or self.debug:
elastalert_logger.setLevel(logging.INFO)
if self.debug:
elastalert_logger.info(
"Note: In debug mode, alerts will be logged to console but NOT actually sent. To send them, use --verbose."
"""Note: In debug mode, alerts will be logged to console but NOT actually sent.
To send them but remain verbose, use --verbose instead."""
)
if not self.args.es_debug:
......@@ -329,7 +337,7 @@ class ElastAlerter():
:param rule: The rule configuration.
:param starttime: The earliest time to query.
:param endtime: The latest time to query.
:return: A list of hits, bounded by rule['max_query_size'].
:return: A list of hits, bounded by rule['max_query_size'] (or self.max_query_size).
"""
query = self.get_query(
rule['filter'],
......@@ -355,7 +363,7 @@ class ElastAlerter():
res = self.current_es.search(
scroll=scroll_keepalive,
index=index,
size=rule['max_query_size'],
size=rule.get('max_query_size', self.max_query_size),
body=query,
ignore_unavailable=True,
**extra_args
......@@ -1441,7 +1449,7 @@ class ElastAlerter():
else:
# If this rule isn't using aggregation, this must be a retry of a failed alert
retried = False
if 'aggregation' not in rule:
if not rule.get('aggregation'):
retried = True
self.alert([match_body], rule, alert_time=alert_time, retried=retried)
......
......@@ -232,6 +232,7 @@ properties:
### PagerDuty
pagerduty_service_key: {type: string}
pagerduty_client_name: {type: string}
pagerduty_event_type: {enum: [none, trigger, resolve, acknowledge]}
### Exotel
exotel_account_sid: {type: string}
......
......@@ -8,7 +8,7 @@ from setuptools import setup
base_dir = os.path.dirname(__file__)
setup(
name='elastalert',
version='0.1.20',
version='0.1.21',
description='Runs custom filters on Elasticsearch and alerts on matches',
author='Quentin Long',
author_email='qlo@yelp.com',
......
......@@ -1301,6 +1301,42 @@ def test_pagerduty_alerter_custom_alert_subject_with_args():
assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data'])
def test_pagerduty_alerter_custom_alert_subject_with_args_specifying_trigger():
rule = {
'name': 'Test PD Rule',
'type': 'any',
'alert_subject': '{0} kittens',
'alert_subject_args': ['somefield'],
'pagerduty_service_key': 'magicalbadgers',
'pagerduty_event_type': 'trigger',
'pagerduty_client_name': 'ponies inc.',
'pagerduty_incident_key': 'custom {0}',
'pagerduty_incident_key_args': ['someotherfield'],
'alert': []
}
load_modules(rule)
alert = PagerDutyAlerter(rule)
match = {
'@timestamp': '2017-01-01T00:00:00',
'somefield': 'Stinkiest',
'someotherfield': 'foobarbaz'
}
with mock.patch('requests.post') as mock_post_request:
alert.alert([match])
expected_data = {
'client': 'ponies inc.',
'description': 'Stinkiest kittens',
'details': {
'information': 'Test PD Rule\n\n@timestamp: 2017-01-01T00:00:00\nsomefield: Stinkiest\nsomeotherfield: foobarbaz\n'
},
'event_type': 'trigger',
'incident_key': 'custom foobarbaz',
'service_key': 'magicalbadgers',
}
mock_post_request.assert_called_once_with(alert.url, data=mock.ANY, headers={'content-type': 'application/json'}, proxies=None)
assert expected_data == json.loads(mock_post_request.call_args_list[0][1]['data'])
def test_alert_text_kw(ea):
rule = ea.rules[0].copy()
rule['alert_text'] = '{field} at {time}'
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment