|
1 | | -import sys |
| 1 | +#!/usr/bin/env python3 |
| 2 | +# -*- coding: utf-8 -*- |
| 3 | + |
| 4 | +# SPDX-FileCopyrightText: 2017 Dale McDiarmid <[email protected]> |
| 5 | +# SPDX-FileCopyrightText: 2017-2020 Robin Schneider <[email protected]> |
| 6 | +# SPDX-FileCopyrightText: 2020 Dan Roscigno <[email protected]> |
| 7 | +# SPDX-License-Identifier: Apache-2.0 |
2 | 8 |
|
3 | | - |
| 9 | +from __future__ import (print_function, unicode_literals, |
| 10 | + absolute_import, division) |
4 | 11 |
|
5 | 12 | import datetime |
| 13 | +import json |
| 14 | +import logging |
| 15 | +import subprocess |
| 16 | +import sys |
| 17 | + |
| 18 | +import yaml |
| 19 | + |
6 | 20 | from elasticsearch7 import Elasticsearch |
7 | 21 | from elasticsearch7.client.ingest import IngestClient |
8 | | -import argparse |
9 | | -import json |
10 | 22 |
|
11 | | -parser = argparse.ArgumentParser(description='Index Connection Log data into ES with the last event at the current time') |
12 | | -parser.add_argument('--user',help='user') |
13 | | -parser.add_argument('--password',help='password') |
14 | | -parser.add_argument('--endpoint',help='endpoint') |
15 | | -parser.add_argument('--port',help='port') |
16 | | -parser.add_argument('--protocol',help='protocol') |
17 | | -parser.add_argument('--test_file',help='test file') |
18 | | - |
19 | | -parser.set_defaults(endpoint='localhost',port="9200",protocol="http",test_file='data.json',user='elastic',password='changeme') |
20 | | -args = parser.parse_args() |
21 | | -es = Elasticsearch([args.protocol+"://"+args.endpoint+":"+args.port],http_auth=(args.user, args.password)) |
22 | | - |
23 | | -def find_item(list, key): |
24 | | - for item in list: |
25 | | - if key in item: |
26 | | - return item |
27 | | - return None |
28 | | - |
29 | | -with open(args.test_file,'r') as test_file: |
30 | | - test=json.loads(test_file.read()) |
31 | | - try: |
32 | | - es.indices.delete(test['index']) |
33 | | - except: |
34 | | - print("Unable to delete current dataset") |
35 | | - pass |
36 | | - with open(test['mapping_file'],'r') as mapping_file: |
37 | | - es.indices.create(index=test["index"],body=json.loads(mapping_file.read())) |
38 | | - params={} |
39 | | - if "ingest_pipeline_file" in test: |
40 | | - with open(test['ingest_pipeline_file'],'r') as ingest_pipeline_file: |
41 | | - pipeline=json.loads(ingest_pipeline_file.read()) |
| 23 | + |
| 24 | +def set_value_as_default_for_leaf(nested_dict, path_exp, value): |
| 25 | + if len(path_exp) == 1: |
| 26 | + nested_dict.setdefault(path_exp[0], value) |
| 27 | + elif path_exp[0] in nested_dict: |
| 28 | + set_value_as_default_for_leaf(nested_dict[path_exp[0]], path_exp[1:], value) |
| 29 | + |
| 30 | + |
| 31 | +def load_file(serialized_file): |
| 32 | + with open(serialized_file, 'r') as serialized_file_fh: |
| 33 | + if serialized_file.endswith('.json'): |
| 34 | + decoded_object = json.loads(serialized_file_fh.read()) |
| 35 | + elif serialized_file.endswith('.yml') or serialized_file.endswith('.yaml'): |
| 36 | + decoded_object = yaml.safe_load(serialized_file_fh) |
| 37 | + return decoded_object |
| 38 | + |
| 39 | + |
| 40 | +if __name__ == '__main__': |
| 41 | + import argparse |
| 42 | + |
| 43 | + parser = argparse.ArgumentParser(description='Index Connection Log data into ES with the last event at the current time') |
| 44 | + parser.add_argument('-v', '--verbose', help='verbose output', action='store_true') |
| 45 | + parser.add_argument('--endpoint', help='endpoint') |
| 46 | + parser.add_argument('--port', help='port') |
| 47 | + parser.add_argument('--protocol', help='protocol') |
| 48 | + parser.add_argument('--cacert', help='CA certificate to trust for HTTPS') |
| 49 | + parser.add_argument('--user', help='user') |
| 50 | + parser.add_argument('--password', help='password') |
| 51 | + parser.add_argument('--test_file', help='test file') |
| 52 | + parser.add_argument( |
| 53 | + '--minify-scripts', |
| 54 | + help='Minify script source code as workaround for' + |
| 55 | + ' "Scripts may be no longer than 16384 characters." in ES < v6.6.', |
| 56 | + action='store_true') |
| 57 | + # Ref: https://github.com/elastic/elasticsearch/pull/35184 |
| 58 | + parser.add_argument('--keep-index', help='Keep the index where test documents have been loaded to after the test', action='store_true') |
| 59 | + parser.add_argument('--metadata-git-commit', help='Include the git commit hash in the metadata field of the watcher', action='store_true') |
| 60 | + parser.add_argument('--modify-watch-by-eval', help='Python code to modify the watch before loading it into Elastic') |
| 61 | + parser.add_argument( |
| 62 | + '--no-test-index', |
| 63 | + help='Don’t put the test data into an index.', |
| 64 | + action='store_false', |
| 65 | + dest='test_index') |
| 66 | + parser.add_argument( |
| 67 | + '--no-execute-watch', |
| 68 | + help='Do not force watch execution. This can be useful when you use this script to deploy the watch.', |
| 69 | + action='store_false', |
| 70 | + dest='execute_watch') |
| 71 | + |
| 72 | + parser.set_defaults(endpoint='localhost', port="9200", protocol="http", test_file='data.json', user='elastic', password='changeme') |
| 73 | + args = parser.parse_args() |
| 74 | + |
| 75 | + if args.verbose: |
| 76 | + logging.basicConfig(level=logging.DEBUG) |
| 77 | + |
| 78 | + es = Elasticsearch([args.protocol+"://"+args.endpoint+":"+args.port], http_auth=(args.user, args.password), ca_certs=args.cacert) |
| 79 | + |
| 80 | + test = load_file(args.test_file) |
| 81 | + |
| 82 | + if args.test_index: |
| 83 | + # Load Mapping |
| 84 | + try: |
| 85 | + es.indices.delete(test['index']) |
| 86 | + except Exception as err: |
| 87 | + print("Unable to delete current dataset") |
| 88 | + pass |
| 89 | + index_template = load_file(test['mapping_file']) |
| 90 | + for unneeded_keys in ['order', 'version', 'index_patterns']: |
| 91 | + index_template.pop(unneeded_keys, None) |
| 92 | + es.indices.create(index=test["index"], body=index_template) |
| 93 | + |
| 94 | + # Load pipeline if its declared |
| 95 | + params = {} |
| 96 | + if "ingest_pipeline_file" in test: |
| 97 | + pipeline = load_file(test['ingest_pipeline_file']) |
42 | 98 | p = IngestClient(es) |
43 | | - p.put_pipeline(id=test["watch_name"],body=pipeline) |
44 | | - params["pipeline"]=test["watch_name"] |
45 | | - current_data=last_time=datetime.datetime.utcnow() |
46 | | - i=0 |
47 | | - time_field = test["time_field"] if "time_field" in test else "@timestamp" |
48 | | - for event in test['events']: |
49 | | - event_time=current_data+datetime.timedelta(seconds=int(event['offset'] if 'offset' in event else 0)) |
50 | | - event[time_field]=event_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ') if not time_field in event else event[time_field] |
51 | | - es.index(index=test['index'],body=event,id=event['id'] if "id" in event else i,params=params) |
52 | | - i+=1 |
53 | | - es.indices.refresh(index=test["index"]) |
| 99 | + p.put_pipeline(id=test["watch_name"], body=pipeline) |
| 100 | + params["pipeline"] = test["watch_name"] |
| 101 | + |
| 102 | + # Index data |
| 103 | + current_data = last_time = datetime.datetime.utcnow() |
| 104 | + i = 0 |
| 105 | + time_fields = test.get('time_fields', test.get('time_field', '@timestamp')) |
| 106 | + time_fields = set([time_fields] if isinstance(time_fields, str) else time_fields) |
| 107 | + for event in test['events']: |
| 108 | + # All offsets are in seconds. |
| 109 | + event_time = current_data+datetime.timedelta(seconds=int(event.get('offset', 0))) |
| 110 | + for time_field in time_fields: |
| 111 | + time_field = time_field.split('.') |
| 112 | + set_value_as_default_for_leaf(event, time_field, event_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ')) |
| 113 | + es.index(index=test['index'], body=event, id=event.get('id', i), params=params) |
| 114 | + i += 1 |
| 115 | + es.indices.refresh(index=test["index"]) |
| 116 | + |
| 117 | + # Load Scripts |
54 | 118 | if 'scripts' in test: |
55 | 119 | for script in test['scripts']: |
56 | | - with open(script['path'], 'r') as script_file: |
57 | | - es.put_script(id=script["name"],body=json.loads(script_file.read())) |
| 120 | + script_content = load_file(script['path']) |
| 121 | + if args.minify_scripts: |
| 122 | + # https://stackoverflow.com/questions/30795954/how-to-uglify-or-minify-c-code |
| 123 | + p = subprocess.Popen(['gcc', '-fpreprocessed', '-dD', '-E', '-P', '-'], stdin=subprocess.PIPE, stdout=subprocess.PIPE) |
| 124 | + script_content['script']['source'] = p.communicate(input=script_content['script']['source'].encode('utf-8'))[0].decode('utf-8') |
| 125 | + es.put_script(id=script["name"], body=script_content) |
58 | 126 |
|
59 | | - with open(test['watch_file'],'r') as watch_file: |
60 | | - watch=json.loads(watch_file.read()) |
61 | | - es.watcher.put_watch(id=test["watch_name"],body=watch) |
62 | | - response=es.watcher.execute_watch(id=test["watch_name"]) |
| 127 | + # Load Watch and Execute |
| 128 | + watch = load_file(test['watch_file']) |
63 | 129 |
|
| 130 | + if args.modify_watch_by_eval: |
| 131 | + eval(compile(args.modify_watch_by_eval, '<string>', 'exec')) |
| 132 | + |
| 133 | + if args.metadata_git_commit: |
| 134 | + watch.setdefault('metadata', {}) |
| 135 | + watch['metadata']['git_commit_hash'] = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip() |
| 136 | + watch['metadata']['git_uncommitted_changes'] = True if len(subprocess.check_output(['git', 'status', '--porcelain']).strip()) > 0 else False |
| 137 | + |
| 138 | + es.watcher.put_watch(id=test["watch_name"], body=watch) |
| 139 | + |
| 140 | + if args.execute_watch: |
| 141 | + response = es.watcher.execute_watch(id=test["watch_name"]) |
| 142 | + |
| 143 | + # Cleanup after the test to not pollute the environment for other tests. |
| 144 | + if not args.keep_index: |
| 145 | + try: |
| 146 | + es.indices.delete(test['index']) |
| 147 | + except Exception as err: |
| 148 | + print("Unable to delete current dataset") |
| 149 | + pass |
| 150 | + |
| 151 | + # Confirm Matches |
64 | 152 | match = test['match'] if 'match' in test else True |
65 | | - print("Expected: Watch Condition: %s"%match) |
66 | | - if not 'condition' in response['watch_record']['result']: |
67 | | - print("Condition not evaluated due to watch error") |
| 153 | + print("Expected: Watch Condition: {}".format(match)) |
| 154 | + if 'condition' not in response['watch_record']['result']: |
| 155 | + print("Condition not evaluated due to watch error: {}".format( |
| 156 | + json.dumps(response['watch_record'], sort_keys=True, indent=2) |
| 157 | + )) |
68 | 158 | print("TEST FAIL") |
69 | 159 | sys.exit(1) |
70 | | - met=response['watch_record']['result']['condition']['met'] |
71 | | - print("Received: Watch Condition: %s"%met) |
| 160 | + met = response['watch_record']['result']['condition']['met'] |
| 161 | + print("Received: Watch Condition: {}".format(met)) |
72 | 162 | if match: |
73 | 163 | if met and response['watch_record']['result']['condition']['status'] == "success": |
74 | | - print("Expected: %s"%test['expected_response']) |
75 | | - logging=find_item(response['watch_record']['result']['actions'],'logging')['logging'] |
| 164 | + print("Expected: {}".format(test.get('expected_response'))) |
| 165 | + if len(response['watch_record']['result']['actions']) == 0: |
| 166 | + if response['watch_record']['result']['transform']['status'] == 'failure': |
| 167 | + print("No actions where taken because transform failed: {}".format( |
| 168 | + json.dumps(response['watch_record']['result'], sort_keys=True, indent=2) |
| 169 | + )) |
| 170 | + else: |
| 171 | + print("No actions where taken: {}".format( |
| 172 | + json.dumps(response['watch_record']['result'], sort_keys=True, indent=2) |
| 173 | + )) |
| 174 | + print("TEST FAIL") |
| 175 | + sys.exit(1) |
| 176 | + |
| 177 | + logging_action = next((action for action in response['watch_record']['result']['actions'] if action["type"] == "logging"), None) |
| 178 | + if logging_action is None: |
| 179 | + print("No logging actions was taken. This test framework uses the logging action for comparison so you might need enable this action.") |
| 180 | + print("TEST FAIL") |
| 181 | + sys.exit(1) |
| 182 | + if logging_action.get('transform', {}).get('status', 'success') != 'success': |
| 183 | + print("Logging transform script failed: {}".format( |
| 184 | + json.dumps(logging_action.get('transform', {}), sort_keys=True, indent=2), |
| 185 | + )) |
| 186 | + print("TEST FAIL") |
| 187 | + sys.exit(1) |
| 188 | + if 'logging' not in logging_action: |
| 189 | + print("Logging action is not present: {}".format(logging_action)) |
| 190 | + print("TEST FAIL") |
| 191 | + sys.exit(1) |
| 192 | + logging = logging_action['logging'] |
76 | 193 | if logging: |
77 | | - print("Received: %s"%logging['logged_text']) |
78 | | - if logging['logged_text'] == test['expected_response']: |
| 194 | + print("Received: {}".format(logging['logged_text'])) |
| 195 | + if logging['logged_text'] == test.get('expected_response'): |
79 | 196 | print("TEST PASS") |
80 | 197 | sys.exit(0) |
81 | 198 | else: |
82 | 199 | print("Logging action required for testing") |
83 | 200 | print("TEST FAIL") |
84 | 201 | sys.exit(1) |
85 | 202 | else: |
86 | | - print("TEST %s"%("PASS" if not response['watch_record']['result']['condition']['met'] else "FAIL")) |
| 203 | + print("TEST {}".format("FAIL" if response['watch_record']['result']['condition']['met'] else "PASS")) |
87 | 204 | sys.exit(met) |
0 commit comments