Commit efe80e19 authored by Sophie Brun's avatar Sophie Brun

New upstream version 0.1.29

parent f8e0454c
# Change Log
## v0.1.29
###
- Added a feature forget_keys to prevent realerting when using flatline with query_key
- Added a new alert_text_type, aggregation_summary_only
### Fixed
- Fixed incorrect documentation about es_conn_timeout default
## v0.1.28
### Added
......
......@@ -129,7 +129,7 @@ The environment variable ``ES_USE_SSL`` will override this field.
``es_send_get_body_as``: Optional; Method for querying Elasticsearch - ``GET``, ``POST`` or ``source``. The default is ``GET``
``es_conn_timeout``: Optional; sets timeout for connecting to and reading from ``es_host``; defaults to ``10``.
``es_conn_timeout``: Optional; sets timeout for connecting to and reading from ``es_host``; defaults to ``20``.
``rules_folder``: The name of the folder which contains rule configuration files. ElastAlert will load all
files in this folder, and all subdirectories, that end in .yaml. If the contents of this folder change, ElastAlert will load, reload
......
......@@ -940,6 +940,9 @@ default 50, unique terms.
``query_key``: With flatline rule, ``query_key`` means that an alert will be triggered if any value of ``query_key`` has been seen at least once
and then falls below the threshold.
``forget_keys``: Only valid when used with ``query_key``. If this is set to true, ElastAlert will "forget" about the ``query_key`` value that
triggers an alert, therefore preventing any more alerts for it until it's seen again.
New Term
~~~~~~~~
......@@ -1167,6 +1170,12 @@ With ``alert_text_type: exclude_fields``::
{top_counts}
With ``alert_text_type: aggregation_summary_only``::
body = rule_name
aggregation_summary
+
ruletype_text is the string returned by RuleType.get_match_str.
field_values will contain every key value pair included in the results from Elasticsearch. These fields include "@timestamp" (or the value of ``timestamp_field``),
......
......@@ -241,11 +241,12 @@ class Alerter(object):
def create_alert_body(self, matches):
body = self.get_aggregation_summary_text(matches)
for match in matches:
body += unicode(BasicMatchString(self.rule, match))
# Separate text of aggregated alerts with dashes
if len(matches) > 1:
body += '\n----------------------------------------\n'
if self.rule.get('alert_text_type') != 'aggregation_summary_only':
for match in matches:
body += unicode(BasicMatchString(self.rule, match))
# Separate text of aggregated alerts with dashes
if len(matches) > 1:
body += '\n----------------------------------------\n'
return body
def get_aggregation_summary_text__maximum_width(self):
......@@ -797,10 +798,11 @@ class JiraAlerter(Alerter):
def create_alert_body(self, matches):
body = self.description + '\n'
body += self.get_aggregation_summary_text(matches)
for match in matches:
body += unicode(JiraFormattedMatchString(self.rule, match))
if len(matches) > 1:
body += '\n----------------------------------------\n'
if self.rule.get('alert_text_type') != 'aggregation_summary_only':
for match in matches:
body += unicode(JiraFormattedMatchString(self.rule, match))
if len(matches) > 1:
body += '\n----------------------------------------\n'
return body
def get_aggregation_summary_text(self, matches):
......
......@@ -6,6 +6,7 @@ from alerts import Alerter
from alerts import BasicMatchString
from util import EAException
from util import elastalert_logger
from util import lookup_es_key
class OpsGenieAlerter(Alerter):
......@@ -22,6 +23,8 @@ class OpsGenieAlerter(Alerter):
self.tags = self.rule.get('opsgenie_tags', []) + ['ElastAlert', self.rule['name']]
self.to_addr = self.rule.get('opsgenie_addr', 'https://api.opsgenie.com/v1/json/alert')
self.custom_message = self.rule.get('opsgenie_message')
self.opsgenie_subject = self.rule.get('opsgenie_subject')
self.opsgenie_subject_args = self.rule.get('opsgenie_subject_args')
self.alias = self.rule.get('opsgenie_alias')
self.opsgenie_proxy = self.rule.get('opsgenie_proxy', None)
......@@ -34,7 +37,7 @@ class OpsGenieAlerter(Alerter):
body += '\n----------------------------------------\n'
if self.custom_message is None:
self.message = self.create_default_title(matches)
self.message = self.create_title(matches)
else:
self.message = self.custom_message.format(**matches[0])
......@@ -83,6 +86,30 @@ class OpsGenieAlerter(Alerter):
return subject
def create_title(self, matches):
""" Creates custom alert title to be used as subject for opsgenie alert."""
if self.opsgenie_subject:
return self.create_custom_title(matches)
return self.create_default_title(matches)
def create_custom_title(self, matches):
opsgenie_subject = unicode(self.rule['opsgenie_subject'])
if self.opsgenie_subject_args:
opsgenie_subject_values = [lookup_es_key(matches[0], arg) for arg in self.opsgenie_subject_args]
for i in xrange(len(opsgenie_subject_values)):
if opsgenie_subject_values[i] is None:
alert_value = self.rule.get(self.opsgenie_subject_args[i])
if alert_value:
opsgenie_subject_values[i] = alert_value
opsgenie_subject_values = ['<MISSING VALUE>' if val is None else val for val in opsgenie_subject_values]
return opsgenie_subject.format(*opsgenie_subject_values)
return opsgenie_subject
def get_info(self):
ret = {'type': 'opsgenie'}
if self.recipients:
......
......@@ -527,12 +527,17 @@ class FlatlineRule(FrequencyRule):
event.update(key=key, count=count)
self.add_match(event)
# After adding this match, leave the occurrences windows alone since it will
# be pruned in the next add_data or garbage_collect, but reset the first_event
# so that alerts continue to fire until the threshold is passed again.
least_recent_ts = self.get_ts(self.occurrences[key].data[0])
timeframe_ago = most_recent_ts - self.rules['timeframe']
self.first_event[key] = min(least_recent_ts, timeframe_ago)
if not self.rules.get('forget_keys'):
# After adding this match, leave the occurrences windows alone since it will
# be pruned in the next add_data or garbage_collect, but reset the first_event
# so that alerts continue to fire until the threshold is passed again.
least_recent_ts = self.get_ts(self.occurrences[key].data[0])
timeframe_ago = most_recent_ts - self.rules['timeframe']
self.first_event[key] = min(least_recent_ts, timeframe_ago)
else:
# Forget about this key until we see it again
self.first_event.pop(key)
self.occurrences.pop(key)
def get_match_str(self, match):
ts = match[self.rules['timestamp_field']]
......
......@@ -179,7 +179,7 @@ properties:
alert_text: {type: string} # Python format string
alert_text_args: {type: array, items: {type: string}}
alert_text_kw: {type: object}
alert_text_type: {enum: [alert_text_only, exclude_fields]}
alert_text_type: {enum: [alert_text_only, exclude_fields, aggregation_summary_only]}
alert_missing_value: {type: string}
timestamp_field: {type: string}
field: {}
......
......@@ -8,7 +8,7 @@ from setuptools import setup
base_dir = os.path.dirname(__file__)
setup(
name='elastalert',
version='0.1.28',
version='0.1.29',
description='Runs custom filters on Elasticsearch and alerts on matches',
author='Quentin Long',
author_email='qlo@yelp.com',
......
......@@ -878,6 +878,34 @@ def test_flatline_query_key():
assert set(['key1', 'key2', 'key3']) == set([m['key'] for m in rule.matches if m['@timestamp'] == timestamp])
def test_flatline_forget_query_key():
rules = {'timeframe': datetime.timedelta(seconds=30),
'threshold': 1,
'query_key': 'qk',
'forget_keys': True,
'timestamp_field': '@timestamp'}
rule = FlatlineRule(rules)
# Adding two separate query keys, the flatline rule should trigger for both
rule.add_data(hits(1, qk='key1'))
assert rule.matches == []
# This will be run at the end of the hits
rule.garbage_collect(ts_to_dt('2014-09-26T12:00:11Z'))
assert rule.matches == []
# Key1 should not alert
timestamp = '2014-09-26T12:00:45Z'
rule.garbage_collect(ts_to_dt(timestamp))
assert len(rule.matches) == 1
rule.matches = []
# key1 was forgotten, so no more matches
rule.garbage_collect(ts_to_dt('2014-09-26T12:01:11Z'))
assert rule.matches == []
def test_cardinality_max():
rules = {'max_cardinality': 4,
'timeframe': datetime.timedelta(minutes=10),
......@@ -1145,7 +1173,7 @@ def test_metric_aggregation_complex_query_key():
{"cpu_pct_avg": {"value": 0.91}, "key": "sub_qk_val1"},
{"cpu_pct_avg": {"value": 0.95}, "key": "sub_qk_val2"},
{"cpu_pct_avg": {"value": 0.89}, "key": "sub_qk_val3"}]
}, "key": "qk_val"}
}, "key": "qk_val"}
rule = MetricAggregationRule(rules)
rule.check_matches(datetime.datetime.now(), 'qk_val', query)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment