From f8fed9c7ddec3bedef1b4932326f64d1ed511efb Mon Sep 17 00:00:00 2001 From: Sergey Skripnick Date: Fri, 14 Apr 2017 17:37:44 +0300 Subject: [PATCH 01/19] Fix missing packages in setup.py Closes #665 --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 325779617..ae0d189f6 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ # limitations under the License. import re import ast -from setuptools import setup +from setuptools import setup, find_packages _version_re = re.compile(r'__version__\s+=\s+(.*)') with open('security_monkey/__init__.py', 'rb') as f: @@ -24,7 +24,7 @@ name='security_monkey', version=SECURITY_MONKEY_VERSION, long_description=__doc__, - packages=['security_monkey'], + packages=find_packages(exclude=["tests"]), include_package_data=True, zip_safe=False, install_requires=[ From 061442002059b8745e72b47c3048fd3a4ce1477f Mon Sep 17 00:00:00 2001 From: Patrick Kelley Date: Fri, 14 Apr 2017 16:20:29 +0000 Subject: [PATCH 02/19] Explicitly adding urllib3[secure] to setup.py to fix issue #620 --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index ae0d189f6..42817eaa4 100644 --- a/setup.py +++ b/setup.py @@ -57,6 +57,7 @@ 'cloudaux>=1.1.5', 'joblib>=0.9.4', 'pyjwt>=1.01', + 'urllib3[secure]' ], extras_require = { 'onelogin': ['python-saml>=2.2.0'], From f05ee7d252ab23a13b1754e833fd4f38b19d2c05 Mon Sep 17 00:00:00 2001 From: Patrick Kelley Date: Fri, 14 Apr 2017 17:19:27 +0000 Subject: [PATCH 03/19] Adding IPv6 support to the securitygroup watcher. Should fix issue #507. --- security_monkey/watchers/security_group.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/security_monkey/watchers/security_group.py b/security_monkey/watchers/security_group.py index 782732a3f..bff2ea910 100644 --- a/security_monkey/watchers/security_group.py +++ b/security_monkey/watchers/security_group.py @@ -66,7 +66,13 @@ def _build_rule(self, rule, rule_type): new_rule=rule_config.copy() new_rule['cidr_ip'] = ips.get('CidrIp') rule_list.append(new_rule) - + + for ips in rule.get('Ipv6Ranges'): + #make a copy of the base rule info. + new_rule=rule_config.copy() + new_rule['cidr_ip'] = ips.get('CidrIpv6') + rule_list.append(new_rule) + for user_id_group_pairs in rule.get('UserIdGroupPairs'): #make a copy of the base rule info. new_rule=rule_config.copy() From cf3989494896b617070876d254aacab363d348de Mon Sep 17 00:00:00 2001 From: Patrick Kelley Date: Fri, 14 Apr 2017 18:25:45 +0000 Subject: [PATCH 04/19] Updating the security group auditor to treat ::/0 the same as 0.0.0.0/0. --- security_monkey/auditors/security_group.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/security_monkey/auditors/security_group.py b/security_monkey/auditors/security_group.py index 6a244b60c..106ffd3a0 100644 --- a/security_monkey/auditors/security_group.py +++ b/security_monkey/auditors/security_group.py @@ -140,7 +140,7 @@ def check_securitygroup_large_subnet(self, sg_item): for rule in sg_item.config.get("rules", []): cidr = rule.get("cidr_ip", None) if cidr and not self._check_inclusion_in_network_whitelist(cidr): - if '/' in cidr and not cidr == "0.0.0.0/0" and not cidr == "10.0.0.0/8": + if '/' in cidr and not cidr == "0.0.0.0/0" and not cidr == "10.0.0.0/8" and not cidr == "::/0": mask = int(cidr.split('/')[1]) if mask < 24 and mask > 0: notes = "{} on {}".format(cidr, self._port_for_rule(rule)) @@ -156,7 +156,7 @@ def check_securitygroup_zero_subnet(self, sg_item): for rule in sg_item.config.get("rules", []): cidr = rule.get("cidr_ip", None) - if cidr and '/' in cidr and not cidr == "0.0.0.0/0" and not cidr == "10.0.0.0/8": + if cidr and '/' in cidr and not cidr == "0.0.0.0/0" and not cidr == "10.0.0.0/8" and not cidr == "::/0": mask = int(cidr.split('/')[1]) if mask == 0: notes = "{} on {}".format(cidr, self._port_for_rule(rule)) @@ -164,7 +164,7 @@ def check_securitygroup_zero_subnet(self, sg_item): def check_securitygroup_ingress_any(self, sg_item): """ - Make sure the SG does not contain any 0.0.0.0/0 ingress rules + Make sure the SG does not contain any 0.0.0.0/0 or ::/0 ingress rules """ tag = "Security Group ingress rule contains 0.0.0.0/0" severity = 10 @@ -176,10 +176,13 @@ def check_securitygroup_ingress_any(self, sg_item): if "0.0.0.0/0" == cidr and rtype == "ingress": notes = "{} on {}".format(cidr, self._port_for_rule(rule)) self.add_issue(severity * multiplier, tag, sg_item, notes=notes) + if "::/0" == cidr and rtype == "ingress": + notes = "{} on {}".format(cidr, self._port_for_rule(rule)) + self.add_issue(severity * multiplier, tag, sg_item, notes=notes) def check_securitygroup_egress_any(self, sg_item): """ - Make sure the SG does not contain any 0.0.0.0/0 egress rules + Make sure the SG does not contain any 0.0.0.0/0 or ::/0 egress rules """ tag = "Security Group egress rule contains 0.0.0.0/0" severity = 5 @@ -191,6 +194,9 @@ def check_securitygroup_egress_any(self, sg_item): if "0.0.0.0/0" == cidr and rtype == "egress": notes = "{} on {}".format(cidr, self._port_for_rule(rule)) self.add_issue(severity * multiplier, tag, sg_item, notes=notes) + if "::/0" == cidr and rtype == "egress": + notes = "{} on {}".format(cidr, self._port_for_rule(rule)) + self.add_issue(severity * multiplier, tag, sg_item, notes=notes) def check_securitygroup_10net(self, sg_item): """ From 2ea87c55df32da6fbbbf049bc9d7a84c08098617 Mon Sep 17 00:00:00 2001 From: Patrick Kelley Date: Sat, 15 Apr 2017 00:34:46 +0000 Subject: [PATCH 05/19] Enhancing PolicyDiff to be able to handle non-ascii strings. --- security_monkey/common/PolicyDiff.py | 106 +++++++++++++-------------- 1 file changed, 53 insertions(+), 53 deletions(-) diff --git a/security_monkey/common/PolicyDiff.py b/security_monkey/common/PolicyDiff.py index f12a50e28..4d4cde8c8 100644 --- a/security_monkey/common/PolicyDiff.py +++ b/security_monkey/common/PolicyDiff.py @@ -36,7 +36,7 @@ def escape(data): def i(indentation): - return '    '*indentation + return u'    '*indentation # ADDED @@ -53,20 +53,20 @@ def process_sub_dict(key, sda, sdb, indentation): brackets = get_brackets(sda) if type(sda) in [str, unicode]: if sda == sdb: - retstr += same("{4}\"{0}\": {2}{1}{3},".format(key, escape(sda), brackets['open'], brackets['close'], i(indentation))) + retstr += same(u"{4}\"{0}\": {2}{1}{3},".format(key, escape(sda), brackets['open'], brackets['close'], i(indentation))) else: - retstr += deleted("{4}\"{0}\": {2}{1}{3},".format(key, escape(sdb), brackets['open'], brackets['close'], i(indentation))) - retstr += added("{4}\"{0}\": {2}{1}{3},".format(key, escape(sda), brackets['open'], brackets['close'], i(indentation))) + retstr += deleted(u"{4}\"{0}\": {2}{1}{3},".format(key, escape(sdb), brackets['open'], brackets['close'], i(indentation))) + retstr += added(u"{4}\"{0}\": {2}{1}{3},".format(key, escape(sda), brackets['open'], brackets['close'], i(indentation))) elif type(sda) in [bool, type(None), int, float]: if sda == sdb: - retstr += same("{2}\"{0}\": {1},".format(key, json.dumps(sda), i(indentation))) + retstr += same(u"{2}\"{0}\": {1},".format(key, json.dumps(sda), i(indentation))) else: - retstr += deleted("{2}\"{0}\": {1},".format(key, json.dumps(sdb), i(indentation))) - retstr += added("{2}\"{0}\": {1},".format(key, json.dumps(sda), i(indentation))) + retstr += deleted(u"{2}\"{0}\": {1},".format(key, json.dumps(sdb), i(indentation))) + retstr += added(u"{2}\"{0}\": {1},".format(key, json.dumps(sda), i(indentation))) elif type(sda) is dict: - retstr += same("{4}\"{0}\": {2}
\n{1}{4}{3},".format(key, diff_dict(sda, sdb, indentation + 1), brackets['open'], brackets['close'], i(indentation))) + retstr += same(u"{4}\"{0}\": {2}
\n{1}{4}{3},".format(key, diff_dict(sda, sdb, indentation + 1), brackets['open'], brackets['close'], i(indentation))) elif type(sda) is list: - retstr += same("{4}\"{0}\": {2}
\n{1}{4}{3},".format(key, diff_list(sda, sdb, indentation + 1), brackets['open'], brackets['close'], i(indentation))) + retstr += same(u"{4}\"{0}\": {2}
\n{1}{4}{3},".format(key, diff_list(sda, sdb, indentation + 1), brackets['open'], brackets['close'], i(indentation))) else: print "process_sub_dict - Unexpected type {}".format(type(sda)) return retstr @@ -76,7 +76,7 @@ def print_list(structure, action, indentation): retstr = '' for value in structure: brackets = form_brackets(value, indentation) - new_value = "" + new_value = u"" if type(value) in [str, unicode, int, float]: new_value = escape(value) elif type(value) in [bool, type(None)]: @@ -88,7 +88,7 @@ def print_list(structure, action, indentation): else: print "print_list - Unexpected type {}".format(type(value)) - content = "{3}{1}{0}{2},".format(new_value, brackets['open'], brackets['close'], i(indentation)) + content = u"{3}{1}{0}{2},".format(new_value, brackets['open'], brackets['close'], i(indentation)) if action is 'same': retstr += same(content) @@ -100,11 +100,11 @@ def print_list(structure, action, indentation): def print_dict(structure, action, indentation): - retstr = '' + retstr = u'' for key in structure.keys(): value = structure[key] brackets = form_brackets(value, indentation) - new_value = '' + new_value = u'' if type(value) in [str, unicode, int, float]: new_value = escape(value) elif type(value) in [bool, type(None)]: @@ -116,7 +116,7 @@ def print_dict(structure, action, indentation): else: print "print_dict - Unexpected type {}".format(type(value)) - content = "{4}\"{0}\": {2}{1}{3},".format( + content = u"{4}\"{0}\": {2}{1}{3},".format( escape(key), new_value, brackets['open'], @@ -144,7 +144,7 @@ def print_item(value, action, indentlevel): return print_list(value, action, indentlevel) else: print "print_item - Unexpected diff_dict type {}".format(type(value)) - return '' + return u'' def diff_dict(dicta, dictb, indentation): @@ -156,24 +156,24 @@ def diff_dict(dicta, dictb, indentation): if not keya in dictb: brackets = get_brackets(dicta[keya]) if type(dicta[keya]) in [str, unicode, int, float, bool, type(None)]: - retstr += added("{4}\"{0}\": {2}{1}{3},".format(keya, print_item(dicta[keya], 'added', indentation + 1), brackets['open'], brackets['close'], i(indentation))) + retstr += added(u"{4}\"{0}\": {2}{1}{3},".format(keya, print_item(dicta[keya], 'added', indentation + 1), brackets['open'], brackets['close'], i(indentation))) if type(dicta[keya]) in [list, dict]: - retstr += added("{4}\"{0}\": {2}
\n{1}{4}{3},".format(keya, print_item(dicta[keya], 'added', indentation + 1), brackets['open'], brackets['close'], i(indentation))) + retstr += added(u"{4}\"{0}\": {2}
\n{1}{4}{3},".format(keya, print_item(dicta[keya], 'added', indentation + 1), brackets['open'], brackets['close'], i(indentation))) else: if not type(dicta[keya]) is type(dictb[keya]): brackets = get_brackets(dictb[keya]) - retstr += deleted("{4}\"{0}\": {2}{1}{3},".format(keya, dictb[keya], brackets['open'], brackets['close'], i(indentation))) + retstr += deleted(u"{4}\"{0}\": {2}{1}{3},".format(keya, dictb[keya], brackets['open'], brackets['close'], i(indentation))) brackets = get_brackets(dicta[keya]) - retstr += added("{4}\"{0}\": {2}{1}{3},".format(keya, dicta[keya], brackets['open'], brackets['close'], i(indentation))) + retstr += added(u"{4}\"{0}\": {2}{1}{3},".format(keya, dicta[keya], brackets['open'], brackets['close'], i(indentation))) else: retstr += process_sub_dict(keya, dicta[keya], dictb[keya], indentation) for keyb in dictb.keys(): if not keyb in dicta: brackets = get_brackets(dictb[keyb]) if type(dictb[keyb]) in [str, unicode, int, float, bool, type(None)]: - retstr += deleted("{4}\"{0}\": {2}{1}{3},".format(keyb, print_item(dictb[keyb], 'deleted', indentation + 1), brackets['open'], brackets['close'], i(indentation))) + retstr += deleted(u"{4}\"{0}\": {2}{1}{3},".format(keyb, print_item(dictb[keyb], 'deleted', indentation + 1), brackets['open'], brackets['close'], i(indentation))) if type(dictb[keyb]) in [list, dict]: - retstr += deleted("{4}\"{0}\": {2}
\n{1}{4}{3},".format(keyb, print_item(dictb[keyb], 'deleted', indentation + 1), brackets['open'], brackets['close'], i(indentation))) + retstr += deleted(u"{4}\"{0}\": {2}
\n{1}{4}{3},".format(keyb, print_item(dictb[keyb], 'deleted', indentation + 1), brackets['open'], brackets['close'], i(indentation))) return remove_last_comma(retstr) @@ -210,12 +210,12 @@ def diff_list(lista, listb, indentation): if item in listb: brackets = get_brackets(item) if type(item) in [str, unicode, int, float]: - retstr += same("{3}{1}{0}{2},".format(escape(item), brackets['open'], brackets['close'], i(indentation))) + retstr += same(u"{3}{1}{0}{2},".format(escape(item), brackets['open'], brackets['close'], i(indentation))) elif type(item) in [bool, type(None)]: - retstr += same("{3}{1}{0}{2},".format(json.dumps(item), brackets['open'], brackets['close'], i(indentation))) + retstr += same(u"{3}{1}{0}{2},".format(json.dumps(item), brackets['open'], brackets['close'], i(indentation))) elif type(item) in [list, dict]: diffstr = print_item(item, 'same', indentation + 1) - retstr += same("{3}{1}
\n{0}{3}{2},".format(diffstr, brackets['open'], brackets['close'], i(indentation))) + retstr += same(u"{3}{1}
\n{0}{3}{2},".format(diffstr, brackets['open'], brackets['close'], i(indentation))) else: print "diff_list - Unexpected Type {}".format(type(item)) else: @@ -230,27 +230,27 @@ def diff_list(lista, listb, indentation): brackets = get_brackets(item) if None is bestmatch: if type(item) in [str, unicode, int, float]: - retstr += added("{3}{1}{0}{2},".format(escape(item), brackets['open'], brackets['close'], i(indentation))) + retstr += added(u"{3}{1}{0}{2},".format(escape(item), brackets['open'], brackets['close'], i(indentation))) elif type(item) in [bool, type(None)]: - retstr += added("{3}{1}{0}{2},".format(json.dumps(item), brackets['open'], brackets['close'], i(indentation))) + retstr += added(u"{3}{1}{0}{2},".format(json.dumps(item), brackets['open'], brackets['close'], i(indentation))) elif type(item) in [list, dict]: diffstr = print_item(item, 'added', indentation + 1) - retstr += added("{3}{1}
\n{0}{3}{2},".format(diffstr, brackets['open'], brackets['close'], i(indentation))) + retstr += added(u"{3}{1}
\n{0}{3}{2},".format(diffstr, brackets['open'], brackets['close'], i(indentation))) else: print "diff_list - Unexpected Type {}".format(type(item)) else: if type(item) in [str, unicode, int, float]: - retstr += deleted("{3}{1}{0}{2},".format(escape(bestmatch), brackets['open'], brackets['close'], i(indentation))) - retstr += added("{3}{1}{0}{2},".format(escape(item), brackets['open'], brackets['close'], i(indentation))) + retstr += deleted(u"{3}{1}{0}{2},".format(escape(bestmatch), brackets['open'], brackets['close'], i(indentation))) + retstr += added(u"{3}{1}{0}{2},".format(escape(item), brackets['open'], brackets['close'], i(indentation))) elif type(item) in [bool, type(None)]: - retstr += deleted("{3}{1}{0}{2},".format(json.dumps(bestmatch), brackets['open'], brackets['close'], i(indentation))) - retstr += added("{3}{1}{0}{2},".format(json.dumps(item), brackets['open'], brackets['close'], i(indentation))) + retstr += deleted(u"{3}{1}{0}{2},".format(json.dumps(bestmatch), brackets['open'], brackets['close'], i(indentation))) + retstr += added(u"{3}{1}{0}{2},".format(json.dumps(item), brackets['open'], brackets['close'], i(indentation))) elif type(item) is list: diffstr = diff_list(item, bestmatch, indentation + 1) - retstr += same("{3}{1}
\n{0}{3}{2},".format(diffstr, brackets['open'], brackets['close'], i(indentation))) + retstr += same(u"{3}{1}
\n{0}{3}{2},".format(diffstr, brackets['open'], brackets['close'], i(indentation))) elif type(item) is dict: diffstr = diff_dict(item, bestmatch, indentation + 1) - retstr += same("{3}{1}
\n{0}{3}{2},".format(diffstr, brackets['open'], brackets['close'], i(indentation))) + retstr += same(u"{3}{1}
\n{0}{3}{2},".format(diffstr, brackets['open'], brackets['close'], i(indentation))) else: print "diff_list - Unexpected Type {}".format(type(item)) deletedlist.remove(bestmatch) @@ -258,12 +258,12 @@ def diff_list(lista, listb, indentation): for item in deletedlist: brackets = get_brackets(item) if type(item) in [str, unicode, int, float]: - retstr += deleted("{3}{1}{0}{2},".format(escape(item), brackets['open'], brackets['close'], i(indentation))) + retstr += deleted(u"{3}{1}{0}{2},".format(escape(item), brackets['open'], brackets['close'], i(indentation))) elif type(item) in [bool, type(None)]: - retstr += deleted("{3}{1}{0}{2},".format(json.dumps(item), brackets['open'], brackets['close'], i(indentation))) + retstr += deleted(u"{3}{1}{0}{2},".format(json.dumps(item), brackets['open'], brackets['close'], i(indentation))) elif type(item) in [list, dict]: diffstr = print_item(item, 'deleted', indentation + 1) - retstr += deleted("{3}{1}
\n{0}{3}{2},".format(diffstr, brackets['open'], brackets['close'], i(indentation))) + retstr += deleted(u"{3}{1}
\n{0}{3}{2},".format(diffstr, brackets['open'], brackets['close'], i(indentation))) else: print "diff_list - Unexpected Type {}".format(type(item)) return remove_last_comma(retstr) @@ -290,7 +290,7 @@ def str_distance(a, b): def find_most_similar(item, list): - stritem = str(item) + stritem = unicode(item) mindistance = sys.maxint bestmatch = None @@ -301,7 +301,7 @@ def find_most_similar(item, list): for listitem in list: if type(listitem) == type(item): - strlistitem = str(listitem) + strlistitem = unicode(listitem) distance = str_distance(stritem, strlistitem) if distance == 0: return listitem @@ -315,14 +315,14 @@ def form_brackets(value, indentation): brackets = {'open': '', 'close': ''} if type(value) in [str, unicode]: - brackets['open'] = '"' - brackets['close'] = '"' + brackets['open'] = u'"' + brackets['close'] = u'"' elif type(value) is dict: - brackets['open'] = '{
\n' - brackets['close'] = i(indentation) + '}' + brackets['open'] = u'{
\n' + brackets['close'] = i(indentation) + u'}' elif type(value) is list: - brackets['open'] = '[
\n' - brackets['close'] = i(indentation) + ']' + brackets['open'] = u'[
\n' + brackets['close'] = i(indentation) + u']' return brackets @@ -330,19 +330,19 @@ def get_brackets(item): brackets = {'open': '', 'close': ''} if type(item) in [str, unicode]: - brackets['open'] = '"' - brackets['close'] = '"' + brackets['open'] = u'"' + brackets['close'] = u'"' if type(item) is list: - brackets['open'] = '[' - brackets['close'] = ']' + brackets['open'] = u'[' + brackets['close'] = u']' if type(item) is dict: - brackets['open'] = '{' - brackets['close'] = '}' + brackets['open'] = u'{' + brackets['close'] = u'}' return brackets def color(text, color): - return "{1}
\n".format(color, text) + return u"{1}
\n".format(color, text) def added(text): @@ -412,7 +412,7 @@ def produceDiffHTML(self): return "No Policy.
" if isinstance(self._old_policy, basestring): - return "{0}
{1}".format(deleted(self._old_policy), added(self._new_policy)) + return u"{0}
{1}".format(deleted(self._old_policy), added(self._new_policy)) brackets = get_brackets(self._new_policy) @@ -423,4 +423,4 @@ def produceDiffHTML(self): else: raise ValueError("PolicyDiff::produceDiffHTML cannot process items of type: {}".format(type(self._new_policy))) - return "{1}
\n{0}{2}
\n".format(inner_html, brackets['open'], brackets['close']) + return u"{1}
\n{0}{2}
\n".format(inner_html, brackets['open'], brackets['close']) From 8eb8d3e5dcef7db9db11dc798c3aa234e8d586ad Mon Sep 17 00:00:00 2001 From: Patrick Kelley Date: Sat, 15 Apr 2017 01:29:46 +0000 Subject: [PATCH 06/19] Fixing path to aws_accounts.json --- security_monkey/manage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/security_monkey/manage.py b/security_monkey/manage.py index ecddf6ea3..e29ae82c0 100644 --- a/security_monkey/manage.py +++ b/security_monkey/manage.py @@ -144,11 +144,11 @@ def clear_expired_exceptions(): @manager.command def amazon_accounts(): """ Pre-populates standard AWS owned accounts """ - import os import json from security_monkey.datastore import Account, AccountType + from os.path import dirname, join - data_file = os.path.join(os.path.dirname(__file__), "data", "aws_accounts.json") + data_file = join(dirname(dirname(__file__)), "data", "aws_accounts.json") data = json.load(open(data_file, 'r')) app.logger.info('Adding / updating Amazon owned accounts') From 128b559184277a391ce2b9093e0376cf5eef7d1c Mon Sep 17 00:00:00 2001 From: Patrick Kelley Date: Sat, 15 Apr 2017 01:39:18 +0000 Subject: [PATCH 07/19] Adding a call to add amazon_accounts to our travis CI --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 3aec4e5b6..e40aa1ad9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -40,6 +40,7 @@ before_script: script: - sh env_tests/test_dart.sh + - monkey amazon_accounts - coverage run -a -m py.test security_monkey/tests/auditors || exit 1 - coverage run -a -m py.test security_monkey/tests/watchers || exit 1 - coverage run -a -m py.test security_monkey/tests/core || exit 1 From e83d3828cc3056e41303fd670792d0dde9f3dadf Mon Sep 17 00:00:00 2001 From: Patrick Kelley Date: Sat, 15 Apr 2017 15:02:17 +0000 Subject: [PATCH 08/19] Adding package_data and data_files sections to setup.py --- setup.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/setup.py b/setup.py index 42817eaa4..05f085ddf 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,16 @@ version=SECURITY_MONKEY_VERSION, long_description=__doc__, packages=find_packages(exclude=["tests"]), + package_data={ + 'security_monkey': [ + 'templates/*.json', + 'templates/*.html', + 'templates/security/*.html', + ] + }, include_package_data=True, + data_files=[('env-config', ['env-config/config.py', 'env-config/config-docker.py']), + ('data', ['data/aws_accounts.json'])], zip_safe=False, install_requires=[ 'APScheduler==2.1.2', From 606dcfaa8b09953e6ffbf050218cb5c62ad1ddfb Mon Sep 17 00:00:00 2001 From: Will Bengtson Date: Mon, 17 Apr 2017 10:30:03 -0700 Subject: [PATCH 09/19] Commit the security trackable information and refresh user --- security_monkey/sso/views.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/security_monkey/sso/views.py b/security_monkey/sso/views.py index 13e22a6b6..c41fabad3 100644 --- a/security_monkey/sso/views.py +++ b/security_monkey/sso/views.py @@ -129,6 +129,8 @@ def post(self): # Tell Flask-Principal the identity changed identity_changed.send(current_app._get_current_object(), identity=Identity(user.id)) login_user(user) + db.session.commit() + db.session.refresh(user) return redirect(return_to, code=302) @@ -210,6 +212,8 @@ def post(self): # Tell Flask-Principal the identity changed identity_changed.send(current_app._get_current_object(), identity=Identity(user.id)) login_user(user) + db.session.commit() + db.session.refresh(user) return redirect(return_to, code=302) @@ -282,11 +286,13 @@ def post(self): # Tell Flask-Principal the identity changed identity_changed.send(current_app._get_current_object(), identity=Identity(user.id)) login_user(user) + db.session.commit() + db.session.refresh(user) self_url = OneLogin_Saml2_Utils.get_self_url(self.req) if 'RelayState' in request.form and self_url != request.form['RelayState']: return redirect(auth.redirect_to(request.form['RelayState']), code=302) - else: + else: return redirect(current_app.config.get('BASE_URL'), code=302) else: return dict(message='OneLogin authentication failed.'), 403 From b6ea2879f5559131904552b55e94d320b01748a5 Mon Sep 17 00:00:00 2001 From: AlexClineBB Date: Tue, 18 Apr 2017 10:15:54 -0400 Subject: [PATCH 10/19] Add reference_policies for TLS transitional ELB security policies This resolves the 'Unknown reference policy' ELB auditor issue for these AWS-provided ELB policies. --- security_monkey/auditors/elb.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/security_monkey/auditors/elb.py b/security_monkey/auditors/elb.py index d6ed51a75..af8a43b5e 100644 --- a/security_monkey/auditors/elb.py +++ b/security_monkey/auditors/elb.py @@ -258,6 +258,11 @@ def _process_reference_policy(self, reference_policy, policy_name, port, elb_ite # https://forums.aws.amazon.com/ann.jspa?annID=3996 return + if reference_policy == 'ELBSecurityPolicy-TLS-1-1-2017-01' or reference_policy == 'ELBSecurityPolicy-TLS-1-2-2017-01': + # Transitional policies for early TLS deprecation + # https://forums.aws.amazon.com/ann.jspa?annID=4475 + return + notes = reference_policy self.add_issue(10, "Unknown reference policy.", elb_item, notes=notes) From 1216699cd395fd997e42e6a4aefdf458bbde81ff Mon Sep 17 00:00:00 2001 From: Patrick Kelley Date: Tue, 18 Apr 2017 19:33:21 +0000 Subject: [PATCH 11/19] Updating supervisor config to provide full path to monkey as it seems to ignore the venv added to the PATH variable --- supervisor/security_monkey.conf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/supervisor/security_monkey.conf b/supervisor/security_monkey.conf index b8105e916..d84c2286a 100644 --- a/supervisor/security_monkey.conf +++ b/supervisor/security_monkey.conf @@ -10,7 +10,7 @@ user=www-data autostart=true autorestart=true environment=PYTHONPATH='/usr/local/src/security_monkey/',PATH="/usr/local/src/security_monkey/venv/bin:%(ENV_PATH)s" -command=monkey run_api_server +command=/usr/local/src/security_monkey/venv/bin/monkey run_api_server [program:securitymonkeyscheduler] user=www-data @@ -18,4 +18,4 @@ autostart=true autorestart=true directory=/usr/local/src/security_monkey/ environment=PYTHONPATH='/usr/local/src/security_monkey/',PATH="/usr/local/src/security_monkey/venv/bin:%(ENV_PATH)s" -command=monkey start_scheduler +command=/usr/local/src/security_monkey/venv/bin/monkey start_scheduler From 1e798aac36440ce65fa976ca0823d31b60eb1799 Mon Sep 17 00:00:00 2001 From: Patrick Kelley Date: Tue, 18 Apr 2017 21:41:59 +0000 Subject: [PATCH 12/19] No-opping DB migration b8ccf5b8089b --- migrations/versions/b8ccf5b8089b_.py | 37 +++++----------------------- 1 file changed, 6 insertions(+), 31 deletions(-) diff --git a/migrations/versions/b8ccf5b8089b_.py b/migrations/versions/b8ccf5b8089b_.py index ce36df97d..f3f63ee45 100644 --- a/migrations/versions/b8ccf5b8089b_.py +++ b/migrations/versions/b8ccf5b8089b_.py @@ -1,9 +1,9 @@ -"""Fetch the S3 Canonical IDs for all active AWS accounts. +"""Please run "monkey fetch_aws_canonical_ids" Revision ID: b8ccf5b8089b Revises: 908b0085d28d Create Date: 2017-03-23 11:00:43.792538 -Author: Mike Grima +Author: Mike Grima , No-op'ed by Patrick """ @@ -14,40 +14,15 @@ from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker -from security_monkey.manage import fetch_aws_canonical_ids - -Session = sessionmaker() -Base = declarative_base() - revision = 'b8ccf5b8089b' down_revision = '908b0085d28d' -class Account(Base): - """ - Meant to model AWS accounts. - """ - __tablename__ = "account" - id = sa.Column(sa.Integer, primary_key=True) - active = sa.Column(sa.Boolean()) - third_party = sa.Column(sa.Boolean()) - name = sa.Column(sa.String(32), index=True, unique=True) - notes = sa.Column(sa.String(256)) - identifier = sa.Column(sa.String(256)) # Unique id of the account, the number for AWS. - account_type_id = sa.Column(sa.Integer, sa.ForeignKey("account_type.id"), nullable=False) - unique_const = sa.UniqueConstraint('account_type_id', 'identifier') - def upgrade(): - print("[-->] Adding canonical IDs to all AWS accounts that are active...") - bind = op.get_bind() - session = Session(bind=bind) - - # If there are currently no accounts, then skip... (avoids alembic issues...) - accounts = session.query(Account).all() - if len(accounts) > 0: - fetch_aws_canonical_ids(True) - - print("[@] Completed adding canonical IDs to all active AWS accounts...") + # This revision has been replaced with a no-op after numerous reports of db upgrade problems. + # We recommend you run: + # monkey fetch_aws_canonical_ids + pass def downgrade(): From db03221644366faa702db25a1d97842f82c5bb6c Mon Sep 17 00:00:00 2001 From: Patrick Kelley Date: Tue, 18 Apr 2017 18:13:30 -0700 Subject: [PATCH 13/19] =?UTF-8?q?Reverting=20PR=20667.=20Adding=20line=20t?= =?UTF-8?q?o=20quickstart=20to=20fix=20InsecurePlatformWarning=E2=80=A6=20?= =?UTF-8?q?(#683)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Reverting PR 667. Adding line to quickstat to fix InsecurePlatformWarning * Adding instruction to update pip in quickstart/Dockerfile. --- Dockerfile | 2 ++ docs/quickstart.md | 4 +++- setup.py | 3 +-- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 6b48c5b41..a640fe04e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -25,6 +25,8 @@ RUN apt-get update &&\ rm -rf /var/lib/apt/lists/* RUN pip install setuptools --upgrade +RUN pip install pip --upgrade +RUN pip install "urllib3[secure]" --upgrade RUN cd /usr/local/src &&\ # git clone --branch $SECURITY_MONKEY_VERSION https://github.com/Netflix/security_monkey.git diff --git a/docs/quickstart.md b/docs/quickstart.md index d3377f302..c156ebab0 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -83,8 +83,10 @@ Releases are on the master branch and are updated about every three months. Blee virtualenv venv source venv/bin/activate pip install --upgrade setuptools + pip install --upgrade pip + pip install --upgrade urllib3[secure] # to prevent InsecurePlatformWarning pip install google-compute-engine # Only required on GCP - python setup.py install + python setup.py develop ### Compile (or Download) the web UI diff --git a/setup.py b/setup.py index 05f085ddf..4efec3f09 100644 --- a/setup.py +++ b/setup.py @@ -65,8 +65,7 @@ 'jira==0.32', 'cloudaux>=1.1.5', 'joblib>=0.9.4', - 'pyjwt>=1.01', - 'urllib3[secure]' + 'pyjwt>=1.01' ], extras_require = { 'onelogin': ['python-saml>=2.2.0'], From 34d72462f4618c5afdea839b54b39f303a67e21a Mon Sep 17 00:00:00 2001 From: Patrick Kelley Date: Tue, 18 Apr 2017 18:14:07 -0700 Subject: [PATCH 14/19] TravisCI docker-compose build(#685) * Naive docker build titus integration attempt * Adding docker service requirement to travis config * Moving docker-compose build command to the before_install section * What version of docker-compose is currently on travis * Following travisci/docker-compose instructions I found on the internet. * Trying to force travis to give me trusty so I can use docker-compose * creating an empty secmonkey.env to pass into docker-build * Fixing yaml syntax * cat fail * Shipping with a blank secmonkey.env * Upgrading pip and installing urllib3[secure] in Dockerfile * Splitting the three pip install commands into three different RUN commands --- .travis.yml | 25 ++++++++++++++++++++++++- secmonkey.env | 6 ++++++ 2 files changed, 30 insertions(+), 1 deletion(-) create mode 100644 secmonkey.env diff --git a/.travis.yml b/.travis.yml index e40aa1ad9..d5f75b483 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,3 +1,4 @@ +dist: trusty sudo: required language: python @@ -5,6 +6,9 @@ language: python addons: postgresql: "9.4" +services: + - docker + matrix: include: - python: "2.7" @@ -17,12 +21,31 @@ env: global: - PIP_DOWNLOAD_CACHE=".pip_download_cache" - SECURITY_MONKEY_SETTINGS=`pwd`/env-config/config.py + - DOCKER_COMPOSE_VERSION=1.11.2 install: - sed -i '/WTF_CSRF_ENABLED = True/c\WTF_CSRF_ENABLED = False' `pwd`/env-config/config.py before_install: # - sudo apt-get -qq update + # # Now we can install the newer docker-engine which is required for the newer + # # docker-composer we will install next. The messy options are to force it to + # # be non-interactive (normally it asks you a bunch of config questions). + # - sudo apt-get install -o Dpkg::Options::="--force-confold" --force-yes -y docker-engine + + # - sudo rm /usr/local/bin/docker-compose + # - curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > docker-compose + # - chmod +x docker-compose + # - sudo mv docker-compose /usr/local/bin + + + # - cat "AWS_ACCESS_KEY_ID=\nAWS_SECRET_ACCESS_KEY=\nSECURITY_MONKEY_POSTGRES_HOST=postgres\nSECURITY_MONKEY_FQDN=127.0.0.1\nSESSION_COOKIE_SECURE=False\n" > secmonkey.env + # Check that docker-compose is now running the latest version (or at least the + # one we specified). This is not to be confused with the version we printed + # before doing the update. + - docker-compose --version + - docker-compose build + # - sudo apt-get install -y libxml2-dev libxmlsec1-dev - sudo mkdir -p /var/log/security_monkey/ - sudo touch /var/log/security_monkey/securitymonkey.log @@ -37,10 +60,10 @@ before_script: - pip install .[tests] - pip install coveralls - monkey db upgrade + - monkey amazon_accounts script: - sh env_tests/test_dart.sh - - monkey amazon_accounts - coverage run -a -m py.test security_monkey/tests/auditors || exit 1 - coverage run -a -m py.test security_monkey/tests/watchers || exit 1 - coverage run -a -m py.test security_monkey/tests/core || exit 1 diff --git a/secmonkey.env b/secmonkey.env new file mode 100644 index 000000000..be4751f93 --- /dev/null +++ b/secmonkey.env @@ -0,0 +1,6 @@ +AWS_ACCESS_KEY_ID= +AWS_SECRET_ACCESS_KEY= +SECURITY_MONKEY_POSTGRES_HOST=postgres +SECURITY_MONKEY_FQDN=127.0.0.1 +# Must be false if HTTP +SESSION_COOKIE_SECURE=False \ No newline at end of file From 2e629c289db8501a366b9808da9025bd4157a2dd Mon Sep 17 00:00:00 2001 From: Travis McPeak Date: Wed, 19 Apr 2017 08:18:08 -0700 Subject: [PATCH 15/19] Add Bandit gate to Security Monkey (#688) :sleuth_or_spy: * Addressing Bandit findings This commit addresses a few Bandit findings so that we can run a gate in the future. A few issues are marked non dangerous (use of md5 for non-security), and a couple issues were fixed (autoescaping Jinja and using yaml.safe_load). * Update travis to run Bandit This commit adds a Bandit gate to travis to run against each commit. Bandit will be run with medium+ severity and confidence filters and exclude the tests folder. --- .travis.yml | 3 +++ migrations/versions/bfb550a500ab_.py | 2 +- security_monkey/common/jinja.py | 4 +++- security_monkey/datastore.py | 2 +- security_monkey/datastore_utils.py | 2 +- security_monkey/jirasync.py | 2 +- 6 files changed, 10 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index d5f75b483..88150d427 100644 --- a/.travis.yml +++ b/.travis.yml @@ -25,6 +25,8 @@ env: install: - sed -i '/WTF_CSRF_ENABLED = True/c\WTF_CSRF_ENABLED = False' `pwd`/env-config/config.py + - pip install bandit + before_install: # - sudo apt-get -qq update @@ -70,6 +72,7 @@ script: - coverage run -a -m py.test security_monkey/tests/views || exit 1 - coverage run -a -m py.test security_monkey/tests/interface || exit 1 - coverage run -a -m py.test security_monkey/tests/utilities || exit 1 + - bandit -r -ll -ii -x security_monkey/tests . after_success: - coveralls diff --git a/migrations/versions/bfb550a500ab_.py b/migrations/versions/bfb550a500ab_.py index 1828c6e7c..66b5b40ed 100644 --- a/migrations/versions/bfb550a500ab_.py +++ b/migrations/versions/bfb550a500ab_.py @@ -161,7 +161,7 @@ def durable_hash(item, ephemeral_paths): def hash_config(config): item = sub_dict(config) item_str = json.dumps(item, sort_keys=True) - item_hash = hashlib.md5(item_str) + item_hash = hashlib.md5(item_str) # nosec: not used for security return item_hash.hexdigest() diff --git a/security_monkey/common/jinja.py b/security_monkey/common/jinja.py index 8210f47e5..51578c3a4 100644 --- a/security_monkey/common/jinja.py +++ b/security_monkey/common/jinja.py @@ -31,6 +31,8 @@ def get_jinja_env(): Returns a Jinja environment with a FileSystemLoader for our templates """ templates_directory = os.path.abspath(os.path.join(__file__, '..', '..', templates)) - jinja_environment = jinja2.Environment(loader=jinja2.FileSystemLoader(templates_directory)) + jinja_environment = jinja2.Environment(loader=jinja2.FileSystemLoader(templates_directory), # nosec + autoescape=select_autoescape(['html', 'xml'])) + # nosec - jinja autoescape enabled for potentially dangerous extensions html, xml #jinja_environment.filters['dateformat'] = dateformat return jinja_environment diff --git a/security_monkey/datastore.py b/security_monkey/datastore.py index 99a102a05..875ed59fd 100644 --- a/security_monkey/datastore.py +++ b/security_monkey/datastore.py @@ -519,7 +519,7 @@ def hash_config(self, config): """ item = sub_dict(config) item_str = json.dumps(item, sort_keys=True) - item_hash = hashlib.md5(item_str) + item_hash = hashlib.md5(item_str) # nosec: not used for security return item_hash.hexdigest() def get_all_ctype_filtered(self, tech=None, account=None, region=None, name=None, include_inactive=False): diff --git a/security_monkey/datastore_utils.py b/security_monkey/datastore_utils.py index 804a70cec..9b5afed6b 100644 --- a/security_monkey/datastore_utils.py +++ b/security_monkey/datastore_utils.py @@ -200,7 +200,7 @@ def durable_hash(config, ephemeral_paths): def hash_config(config): item = sub_dict(config) item_str = json.dumps(item, sort_keys=True) - item_hash = hashlib.md5(item_str) + item_hash = hashlib.md5(item_str) # nosec: not used for security return item_hash.hexdigest() diff --git a/security_monkey/jirasync.py b/security_monkey/jirasync.py index 6ce7c3a64..7b8039ba2 100644 --- a/security_monkey/jirasync.py +++ b/security_monkey/jirasync.py @@ -24,7 +24,7 @@ def __init__(self, jira_file): try: with open(jira_file) as jf: data = jf.read() - data = yaml.load(data) + data = yaml.safe_load(data) self.account = data['account'] self.password = data['password'] self.project = data['project'] From e6f62f407524f7c6a812378810590b808861c255 Mon Sep 17 00:00:00 2001 From: Mike Grima Date: Wed, 19 Apr 2017 08:28:56 -0700 Subject: [PATCH 16/19] =?UTF-8?q?Fix=20for=20issue=20#680=20=F0=9F=9A=91?= =?UTF-8?q?=20(#687)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- env-config/config-local.py | 212 ++++++++++++++++++ security_monkey/account_manager.py | 34 ++- security_monkey/exceptions.py | 11 + security_monkey/manage.py | 12 +- security_monkey/tests/auditors/test_s3.py | 2 - .../tests/utilities/test_account_utils.py | 94 ++++++++ security_monkey/views/account.py | 10 +- 7 files changed, 357 insertions(+), 18 deletions(-) create mode 100644 env-config/config-local.py create mode 100644 security_monkey/tests/utilities/test_account_utils.py diff --git a/env-config/config-local.py b/env-config/config-local.py new file mode 100644 index 000000000..76bdda768 --- /dev/null +++ b/env-config/config-local.py @@ -0,0 +1,212 @@ +# Copyright 2014 Netflix, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# Insert any config items for local devleopment here. +# This will be fed into Flask/SQLAlchemy inside security_monkey/__init__.py + +LOG_CFG = { + 'version': 1, + 'disable_existing_loggers': False, + 'formatters': { + 'standard': { + 'format': '%(asctime)s %(levelname)s: %(message)s ' + '[in %(pathname)s:%(lineno)d]' + } + }, + 'handlers': { + 'file': { + 'class': 'logging.handlers.RotatingFileHandler', + 'level': 'DEBUG', + 'formatter': 'standard', + 'filename': 'security_monkey-local.log', + 'maxBytes': 10485760, + 'backupCount': 100, + 'encoding': 'utf8' + }, + 'console': { + 'class': 'logging.StreamHandler', + 'level': 'DEBUG', + 'formatter': 'standard', + 'stream': 'ext://sys.stdout' + } + }, + 'loggers': { + 'security_monkey': { + 'handlers': ['file', 'console'], + 'level': 'INFO' + }, + 'apscheduler': { + 'handlers': ['file', 'console'], + 'level': 'WARN' + } + } +} + +SQLALCHEMY_DATABASE_URI = 'postgresql://securitymonkeyuser:securitymonkeypassword@localhost:5432/secmonkey' + +SQLALCHEMY_POOL_SIZE = 50 +SQLALCHEMY_MAX_OVERFLOW = 15 +ENVIRONMENT = 'local' +USE_ROUTE53 = False +FQDN = 'localhost' +API_PORT = '5000' +WEB_PORT = '5000' +WEB_PATH = '/static/ui.html' +FRONTED_BY_NGINX = False +NGINX_PORT = '80' +BASE_URL = 'http://{}:{}{}'.format(FQDN, WEB_PORT, WEB_PATH) +DEBUG = False + +SECRET_KEY = '' + +MAIL_DEFAULT_SENDER = 'securitymonkey@example.com' +SECURITY_REGISTERABLE = False +SECURITY_CONFIRMABLE = False +SECURITY_RECOVERABLE = False +SECURITY_PASSWORD_HASH = 'bcrypt' +SECURITY_PASSWORD_SALT = '' +SECURITY_TRACKABLE = True + +SECURITY_POST_LOGIN_VIEW = BASE_URL +SECURITY_POST_REGISTER_VIEW = BASE_URL +SECURITY_POST_CONFIRM_VIEW = BASE_URL +SECURITY_POST_RESET_VIEW = BASE_URL +SECURITY_POST_CHANGE_VIEW = BASE_URL + +# This address gets all change notifications (i.e. 'securityteam@example.com') +SECURITY_TEAM_EMAIL = [] + +# These are only required if using SMTP instead of SES +EMAILS_USE_SMTP = False # Otherwise, Use SES +SES_REGION = 'us-east-1' +MAIL_SERVER = 'smtp.example.com' +MAIL_PORT = 465 +MAIL_USE_SSL = True +MAIL_USERNAME = 'username' +MAIL_PASSWORD = 'password' + +WTF_CSRF_ENABLED = False +WTF_CSRF_SSL_STRICT = True # Checks Referer Header. Set to False for API access. +WTF_CSRF_METHODS = ['DELETE', 'POST', 'PUT', 'PATCH'] + +# "NONE", "SUMMARY", or "FULL" +SECURITYGROUP_INSTANCE_DETAIL = 'FULL' + +# SSO SETTINGS: +ACTIVE_PROVIDERS = [] # "ping", "google" or "onelogin" + +PING_NAME = '' # Use to override the Ping name in the UI. +PING_REDIRECT_URI = "http://{FQDN}:{PORT}/api/1/auth/ping".format(FQDN=FQDN, PORT=WEB_PORT) +PING_CLIENT_ID = '' # Provided by your administrator +PING_AUTH_ENDPOINT = '' # Often something ending in authorization.oauth2 +PING_ACCESS_TOKEN_URL = '' # Often something ending in token.oauth2 +PING_USER_API_URL = '' # Often something ending in idp/userinfo.openid +PING_JWKS_URL = '' # Often something ending in JWKS +PING_SECRET = '' # Provided by your administrator + +GOOGLE_CLIENT_ID = '' +GOOGLE_AUTH_ENDPOINT = '' +GOOGLE_SECRET = '' +# GOOGLE_HOSTED_DOMAIN = 'example.com' # Verify that token issued by comes from domain + +ONELOGIN_APP_ID = '' # OneLogin App ID provider by your administrator +ONELOGIN_EMAIL_FIELD = 'User.email' # SAML attribute used to provide email address +ONELOGIN_DEFAULT_ROLE = 'View' # Default RBAC when user doesn't already exist +ONELOGIN_HTTPS = True # If using HTTPS strict mode will check the requests are HTTPS +ONELOGIN_SETTINGS = { + # If strict is True, then the Python Toolkit will reject unsigned + # or unencrypted messages if it expects them to be signed or encrypted. + # Also it will reject the messages if the SAML standard is not strictly + # followed. Destination, NameId, Conditions ... are validated too. + "strict": True, + + # Enable debug mode (outputs errors). + "debug": True, + + # Service Provider Data that we are deploying. + "sp": { + # Identifier of the SP entity (must be a URI) + "entityId": "http://{FQDN}:{PORT}/metadata/".format(FQDN=FQDN, PORT=WEB_PORT), + # Specifies info about where and how the message MUST be + # returned to the requester, in this case our SP. + "assertionConsumerService": { + # URL Location where the from the IdP will be returned + "url": "http://{FQDN}:{PORT}/api/1/auth/onelogin?acs".format(FQDN=FQDN, PORT=WEB_PORT), + # SAML protocol binding to be used when returning the + # message. OneLogin Toolkit supports this endpoint for the + # HTTP-POST binding only. + "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" + }, + # If you need to specify requested attributes, set a + # attributeConsumingService. nameFormat, attributeValue and + # friendlyName can be omitted + #"attributeConsumingService": { + # "ServiceName": "SP test", + # "serviceDescription": "Test Service", + # "requestedAttributes": [ + # { + # "name": "", + # "isRequired": False, + # "nameFormat": "", + # "friendlyName": "", + # "attributeValue": "" + # } + # ] + #}, + # Specifies info about where and how the message MUST be + # returned to the requester, in this case our SP. + "singleLogoutService": { + # URL Location where the from the IdP will be returned + "url": "http://{FQDN}:{PORT}/api/1/auth/onelogin?sls".format(FQDN=FQDN, PORT=WEB_PORT), + # SAML protocol binding to be used when returning the + # message. OneLogin Toolkit supports the HTTP-Redirect binding + # only for this endpoint. + "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" + }, + # Specifies the constraints on the name identifier to be used to + # represent the requested subject. + # Take a look on src/onelogin/saml2/constants.py to see the NameIdFormat that are supported. + "NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified", + # Usually x509cert and privateKey of the SP are provided by files placed at + # the certs folder. But we can also provide them with the following parameters + "x509cert": "", + "privateKey": "" + }, + + # Identity Provider Data that we want connected with our SP. + "idp": { + # Identifier of the IdP entity (must be a URI) + "entityId": "https://app.onelogin.com/saml/metadata/{APP_ID}".format(APP_ID=ONELOGIN_APP_ID), + # SSO endpoint info of the IdP. (Authentication Request protocol) + "singleSignOnService": { + # URL Target of the IdP where the Authentication Request Message + # will be sent. + "url": "https://app.onelogin.com/trust/saml2/http-post/sso/{APP_ID}".format(APP_ID=ONELOGIN_APP_ID), + # SAML protocol binding to be used when returning the + # message. OneLogin Toolkit supports the HTTP-Redirect binding + # only for this endpoint. + "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" + }, + # SLO endpoint info of the IdP. + "singleLogoutService": { + # URL Location of the IdP where SLO Request will be sent. + "url": "https://app.onelogin.com/trust/saml2/http-redirect/slo/{APP_ID}".format(APP_ID=ONELOGIN_APP_ID), + # SAML protocol binding to be used when returning the + # message. OneLogin Toolkit supports the HTTP-Redirect binding + # only for this endpoint. + "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" + }, + # Public x509 certificate of the IdP + "x509cert": "" + } +} diff --git a/security_monkey/account_manager.py b/security_monkey/account_manager.py index c09a0e9ee..be88452dd 100644 --- a/security_monkey/account_manager.py +++ b/security_monkey/account_manager.py @@ -29,6 +29,8 @@ import time import traceback +from security_monkey.exceptions import AccountNameExists + account_registry = {} @@ -68,16 +70,34 @@ class AccountManager(object): identifier_label = None identifier_tool_tip = None - def update(self, account_type, name, active, third_party, notes, identifier, custom_fields=None): + def update(self, account_id, account_type, name, active, third_party, notes, identifier, custom_fields=None): """ Updates an existing account in the database. """ - account_type_result = _get_or_create_account_type(account_type) - account = Account.query.filter(Account.name == name, Account.account_type_id == account_type_result.id).first() - if not account: - app.logger.error( - 'Account with name {} does not exist'.format(name)) - return None + _get_or_create_account_type(account_type) + + # Query the account by ID if provided: + if account_id: + account = Account.query.filter(Account.id == account_id).first() + + if not account: + app.logger.error("Account with ID {} does not exist.".format(account_id)) + return None + + # Are we changing the account name? + if account.name != name: + # Check if the account with that name exists: + if Account.query.filter(Account.name == name).first(): + app.logger.error("Account with name: {} already exists.".format(name)) + raise AccountNameExists(name) + + account.name = name + + else: + account = Account.query.filter(Account.name == name).first() + if not account: + app.logger.error("Account with name {} does not exist.".format(name)) + return None account.active = active account.notes = notes diff --git a/security_monkey/exceptions.py b/security_monkey/exceptions.py index c7a7e1267..2e02cfdcd 100644 --- a/security_monkey/exceptions.py +++ b/security_monkey/exceptions.py @@ -108,3 +108,14 @@ def __init__(self, connection_message, tech, account, region): def __str__(self): return repr("Likely reached the AWS rate limit. {}/{}/{}:\n{}".format( self.tech, self.account, self.region, self.connection_message)) + + +class AccountNameExists(SecurityMonkeyException): + """Security Monkey Account name exists... cannot rename or create an account with that name""" + def __init__(self, account_name): + self.account_name = account_name + app.logger.info(self) + + def __str__(self): + return repr("Account with name: {} already exists. Cannnot create" + " or rename account with this name.".format(self.account_name)) diff --git a/security_monkey/manage.py b/security_monkey/manage.py index e29ae82c0..6af934923 100644 --- a/security_monkey/manage.py +++ b/security_monkey/manage.py @@ -94,7 +94,7 @@ def delete_unjustified_issues(accounts, monitors): monitor_names = _parse_tech_names(monitors) account_names = _parse_accounts(accounts) from security_monkey.datastore import ItemAudit - issues = ItemAudit.query.filter_by(ItemAudit.justified==False).all() + issues = ItemAudit.query.filter_by(ItemAudit.justified == False).all() for issue in issues: del issue.sub_items[:] db.session.delete(issue) @@ -601,10 +601,10 @@ def handle(self, app, *args, **kwargs): identifier = kwargs.pop('identifier') update = kwargs.pop('update_existing', False) if update: - result = self._account_manager.update( - self._account_manager.account_type, name, active, thirdparty, notes, identifier, - custom_fields=kwargs - ) + result = self._account_manager.update(None, self._account_manager.account_type, name, active, thirdparty, + notes, identifier, + custom_fields=kwargs + ) else: result = self._account_manager.create( self._account_manager.account_type, @@ -626,4 +626,4 @@ def main(): if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/security_monkey/tests/auditors/test_s3.py b/security_monkey/tests/auditors/test_s3.py index d3cf2555f..0720c50a8 100644 --- a/security_monkey/tests/auditors/test_s3.py +++ b/security_monkey/tests/auditors/test_s3.py @@ -175,8 +175,6 @@ """) -asdf = "dsfhgiouhy23984723789y4riuwhfkajshf91283742389u823723" - class S3AuditorTestCase(SecurityMonkeyTestCase): def pre_test_setup(self): self.s3_items = [ diff --git a/security_monkey/tests/utilities/test_account_utils.py b/security_monkey/tests/utilities/test_account_utils.py new file mode 100644 index 000000000..ebda8280d --- /dev/null +++ b/security_monkey/tests/utilities/test_account_utils.py @@ -0,0 +1,94 @@ +# Copyright 2017 Netflix, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +.. module: security_monkey.tests.utilities.test_account_utils + :platform: Unix +.. version:: $$VERSION$$ +.. moduleauthor:: Mike Grima +""" +from security_monkey.exceptions import AccountNameExists +from security_monkey.manage import AddAccount, manager +from security_monkey import db +from security_monkey.datastore import AccountType, Account, AccountTypeCustomValues +from security_monkey.tests import SecurityMonkeyTestCase + + +class AccountTestUtils(SecurityMonkeyTestCase): + def pre_test_setup(self): + self.account_type = AccountType(name='AWS') + db.session.add(self.account_type) + db.session.commit() + + def test_create_aws_account(self): + from security_monkey.account_manager import account_registry + + for name, account_manager in account_registry.items(): + manager.add_command("add_account_%s" % name.lower(), AddAccount(account_manager())) + + manager.handle("manage.py", ["add_account_aws", "-n", "test", "--active", "--id", "99999999999", + "--canonical_id", "bcaf1ffd86f41161ca5fb16fd081034f", + "--role_name", "SecurityMonkey"]) + + account = Account.query.filter(Account.name == "test").first() + assert account + assert account.identifier == "99999999999" + assert account.active + assert len(account.custom_fields) == 3 + + # Get the canonical ID field: + c_id = AccountTypeCustomValues.query.filter(AccountTypeCustomValues.name == "canonical_id", + AccountTypeCustomValues.account_id == account.id).first() + + assert c_id + assert c_id.value == "bcaf1ffd86f41161ca5fb16fd081034f" + + # Already exists: + assert manager.handle("manage.py", ["add_account_aws", "-n", "test", "--active", "--id", "99999999999", + "--canonical_id", "bcaf1ffd86f41161ca5fb16fd081034f", + "--role_name", "SecurityMonkey"]) == -1 + + def test_update_aws_account(self): + from security_monkey.account_manager import account_registry + + for name, account_manager in account_registry.items(): + manager.add_command("add_account_%s" % name.lower(), AddAccount(account_manager())) + + # Create the account: + from security_monkey.account_manager import account_registry + for name, am in account_registry.items(): + if name == "AWS": + break + + account_manager = am() + account_manager.create(account_manager.account_type, "test", True, False, "Tests", "99999999999", + custom_fields=dict(canonical_id="bcaf1ffd86f41161ca5fb16fd081034f", s3_id=None)) + + # Create a second account: + account_manager.create(account_manager.account_type, "test2", True, False, "Tests", "99999999990", + custom_fields=dict(canonical_id="bcaf1ffd86f41161ca5fb16fd081asdf", s3_id=None)) + + # Get the ID of the first account: + id = Account.query.filter(Account.name == "test").one().id + + # Try to rename the account: + account_manager.update(id, account_manager.account_type, "lololol", True, False, "Tests", "99999999999", + custom_fields=dict(canonical_id="bcaf1ffd86f41161ca5fb16fd081034f", s3_id=None)) + + assert not Account.query.filter(Account.name == "test").first() + assert Account.query.filter(Account.name == "lololol").first().id == id + + # Try to update it to an existing name: + with self.assertRaises(AccountNameExists): + account_manager.update(id, account_manager.account_type, "test2", True, False, "Tests", "99999999999", + custom_fields=dict(canonical_id="bcaf1ffd86f41161ca5fb16fd081034f", s3_id=None)) diff --git a/security_monkey/views/account.py b/security_monkey/views/account.py index 9aab7040e..6777afe7d 100644 --- a/security_monkey/views/account.py +++ b/security_monkey/views/account.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +from security_monkey.exceptions import AccountNameExists from security_monkey.views import AuthenticatedService from security_monkey.views import ACCOUNT_FIELDS from security_monkey.datastore import Account, AccountType @@ -147,8 +147,12 @@ def put(self, account_id): from security_monkey.account_manager import account_registry account_manager = account_registry.get(account_type)() - account = account_manager.update(account_type, name, active, third_party, notes, identifier, - custom_fields=custom_fields) + + try: + account = account_manager.update(account_id, account_type, name, active, third_party, notes, identifier, + custom_fields=custom_fields) + except AccountNameExists as _: + return {'status': 'error. Account name exists.'}, 409 if not account: return {'status': 'error. Account ID not found.'}, 404 From d2910edf44c48cf63c5d3afd2970db7df60f3ef8 Mon Sep 17 00:00:00 2001 From: Mike Grima Date: Wed, 19 Apr 2017 09:11:04 -0700 Subject: [PATCH 17/19] =?UTF-8?q?Enhancements=20to=20Travis:=20parallelize?= =?UTF-8?q?d=20the=20workloads.=20=F0=9F=9A=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .travis.yml | 148 +++++++++++++++++++++++++++------------------------- 1 file changed, 77 insertions(+), 71 deletions(-) diff --git a/.travis.yml b/.travis.yml index 88150d427..e353738c2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,84 +1,90 @@ -dist: trusty -sudo: required +# Travis YAML file for Security Monkey. -language: python +# Only way to get multiple builds to work without having the "default" job working was to +# duplicate everything :/ See: https://github.com/travis-ci/travis-ci/issues/4681 +matrix: + include: + ##### UNIT TEST JOB ##### + - dist: trusty + sudo: required + language: python + python: "2.7" -addons: - postgresql: "9.4" + env: + - UNIT_TEST_JOB=true + - PIP_DOWNLOAD_CACHE=".pip_download_cache" + - SECURITY_MONKEY_SETTINGS=`pwd`/env-config/config.py -services: - - docker + addons: + postgresql: "9.4" -matrix: - include: - - python: "2.7" + before_script: + - psql -c "CREATE DATABASE secmonkey;" -U postgres + - psql -c "CREATE ROLE securitymonkeyuser LOGIN PASSWORD 'securitymonkeypassword';" -U postgres + - psql -c "CREATE SCHEMA secmonkey GRANT Usage, Create ON SCHEMA secmonkey TO securitymonkeyuser;" -U postgres + - psql -c "set timezone TO 'GMT';" -U postgres + - python setup.py develop + - pip install .[tests] + - pip install coveralls + - monkey db upgrade + - monkey amazon_accounts + + before_install: + - sudo mkdir -p /var/log/security_monkey/ + - sudo touch /var/log/security_monkey/securitymonkey.log + - sudo chown travis /var/log/security_monkey/securitymonkey.log + + install: + - sed -i '/WTF_CSRF_ENABLED = True/c\WTF_CSRF_ENABLED = False' `pwd`/env-config/config.py + - pip install bandit + + script: + - coverage run -a -m py.test security_monkey/tests/auditors || exit 1 + - coverage run -a -m py.test security_monkey/tests/watchers || exit 1 + - coverage run -a -m py.test security_monkey/tests/core || exit 1 + - coverage run -a -m py.test security_monkey/tests/views || exit 1 + - coverage run -a -m py.test security_monkey/tests/interface || exit 1 + - coverage run -a -m py.test security_monkey/tests/utilities || exit 1 + - bandit -r -ll -ii -x security_monkey/tests . + + after_success: + - coveralls + - coverage report + ##################################################### + + ##### BUILD DOCKER CONTAINER JOB ##### + - dist: trusty + sudo: required + language: python + python: "2.7" + + env: + - BUILD_DOCKER=True + - DOCKER_COMPOSE_VERSION=1.11.2 + - PIP_DOWNLOAD_CACHE=".pip_download_cache" + - SECURITY_MONKEY_SETTINGS=`pwd`/env-config/config.py + + services: + - docker + + script: + - docker-compose --version + - docker-compose build + ##################################################### + + ##### TEST DART JOB ##### + - dist: trusty + env: DART_TEST=True + + script: sh env_tests/test_dart.sh + ##################################################### cache: directories: - .pip_download_cache -env: - global: - - PIP_DOWNLOAD_CACHE=".pip_download_cache" - - SECURITY_MONKEY_SETTINGS=`pwd`/env-config/config.py - - DOCKER_COMPOSE_VERSION=1.11.2 - -install: - - sed -i '/WTF_CSRF_ENABLED = True/c\WTF_CSRF_ENABLED = False' `pwd`/env-config/config.py - - pip install bandit - - -before_install: - # - sudo apt-get -qq update - # # Now we can install the newer docker-engine which is required for the newer - # # docker-composer we will install next. The messy options are to force it to - # # be non-interactive (normally it asks you a bunch of config questions). - # - sudo apt-get install -o Dpkg::Options::="--force-confold" --force-yes -y docker-engine - - # - sudo rm /usr/local/bin/docker-compose - # - curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > docker-compose - # - chmod +x docker-compose - # - sudo mv docker-compose /usr/local/bin - - - # - cat "AWS_ACCESS_KEY_ID=\nAWS_SECRET_ACCESS_KEY=\nSECURITY_MONKEY_POSTGRES_HOST=postgres\nSECURITY_MONKEY_FQDN=127.0.0.1\nSESSION_COOKIE_SECURE=False\n" > secmonkey.env - # Check that docker-compose is now running the latest version (or at least the - # one we specified). This is not to be confused with the version we printed - # before doing the update. - - docker-compose --version - - docker-compose build - - # - sudo apt-get install -y libxml2-dev libxmlsec1-dev - - sudo mkdir -p /var/log/security_monkey/ - - sudo touch /var/log/security_monkey/securitymonkey.log - - sudo chown travis /var/log/security_monkey/securitymonkey.log - -before_script: - - psql -c "CREATE DATABASE secmonkey;" -U postgres - - psql -c "CREATE ROLE securitymonkeyuser LOGIN PASSWORD 'securitymonkeypassword';" -U postgres - - psql -c "CREATE SCHEMA secmonkey GRANT Usage, Create ON SCHEMA secmonkey TO securitymonkeyuser;" -U postgres - - psql -c "set timezone TO 'GMT';" -U postgres - - python setup.py develop - - pip install .[tests] - - pip install coveralls - - monkey db upgrade - - monkey amazon_accounts - -script: - - sh env_tests/test_dart.sh - - coverage run -a -m py.test security_monkey/tests/auditors || exit 1 - - coverage run -a -m py.test security_monkey/tests/watchers || exit 1 - - coverage run -a -m py.test security_monkey/tests/core || exit 1 - - coverage run -a -m py.test security_monkey/tests/views || exit 1 - - coverage run -a -m py.test security_monkey/tests/interface || exit 1 - - coverage run -a -m py.test security_monkey/tests/utilities || exit 1 - - bandit -r -ll -ii -x security_monkey/tests . - -after_success: - - coveralls - - coverage report - notifications: email: - mgrima@netflix.com - pkelley@netflix.com + - tmcpeak@netflix.com From 9628f19abd1c1f7cf3995b2facb822e78205fd34 Mon Sep 17 00:00:00 2001 From: Patrick Kelley Date: Thu, 20 Apr 2017 11:16:29 -0700 Subject: [PATCH 18/19] Prepare for release v0.9.1 (#690) * Prep for v0.9.1. Version bump, changelog update. * Adding contributors section to 0.9.1 changelog --- Dockerfile | 2 +- README.md | 2 ++ dart/pubspec.yaml | 2 +- docker/nginx/Dockerfile | 2 +- docs/authors.md | 6 ------ docs/changelog.md | 31 +++++++++++++++++++++++++++++++ security_monkey/__init__.py | 2 +- 7 files changed, 37 insertions(+), 10 deletions(-) delete mode 100644 docs/authors.md diff --git a/Dockerfile b/Dockerfile index a640fe04e..f706537cf 100644 --- a/Dockerfile +++ b/Dockerfile @@ -16,7 +16,7 @@ FROM ubuntu:14.04 MAINTAINER Netflix Open Source Development -ENV SECURITY_MONKEY_VERSION=v0.9.0 \ +ENV SECURITY_MONKEY_VERSION=v0.9.1 \ SECURITY_MONKEY_SETTINGS=/usr/local/src/security_monkey/env-config/config-docker.py RUN apt-get update &&\ diff --git a/README.md b/README.md index 59112ecac..836a2d507 100644 --- a/README.md +++ b/README.md @@ -21,6 +21,8 @@ Project resources ----------------- - [Quickstart](docs/quickstart.md) +- [Upgrading](docs/update.md) +- [Changelog](dosc/changelog.md) - [Source code](https://github.com/netflix/security_monkey) - [Issue tracker](https://github.com/netflix/security_monkey/issues) - [Gitter.im Chat Room](https://gitter.im/Netflix/security_monkey) diff --git a/dart/pubspec.yaml b/dart/pubspec.yaml index 467d66426..b6330c614 100644 --- a/dart/pubspec.yaml +++ b/dart/pubspec.yaml @@ -1,6 +1,6 @@ name: security_monkey description: An AWS Policy Monitoring and Alerting Tool -version: 0.9.0 +version: 0.9.1 dependencies: angular: "^1.1.2+2" angular_ui: ">=0.6.8 <0.7.0" diff --git a/docker/nginx/Dockerfile b/docker/nginx/Dockerfile index fcd0c835b..f2aae08df 100644 --- a/docker/nginx/Dockerfile +++ b/docker/nginx/Dockerfile @@ -15,7 +15,7 @@ FROM nginx:1.11.4 MAINTAINER Netflix Open Source Development -ENV SECURITY_MONKEY_VERSION=v0.9.0 +ENV SECURITY_MONKEY_VERSION=v0.9.1 RUN apt-get update &&\ apt-get install -y curl git sudo apt-transport-https &&\ curl https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - &&\ diff --git a/docs/authors.md b/docs/authors.md deleted file mode 100644 index 36e141fb6..000000000 --- a/docs/authors.md +++ /dev/null @@ -1,6 +0,0 @@ -Authors -======= - -securitymonkey 0.9.0 is copyright 2014,2015,2016,2017 Netflix. inc. - -If you want to contribute to security monkey, see [contributing](contributing.md). diff --git a/docs/changelog.md b/docs/changelog.md index 4045cdbb9..da9aca645 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,6 +1,37 @@ Changelog ========= +v0.9.1 (2017-04-20) +---------------------------------------- + +- PR #666 - @redixin - Use find_packages in setup.py to include nested packages. +- PR #667 - @monkeysecurity - Explicitly adding `urllib3[secure]` to setup.py (REVERTED in #683) +- PR #668 - @monkeysecurity - IPv6 support in security groups. +- PR #669 - @monkeysecurity - Updating the security group auditor to treat `::/0` the same as `0.0.0.0/0` +- PR #671 - @monkeysecurity - Enhancing PolicyDiff to be able to handle non-ascii strings. +- PR #673 - @monkeysecurity - Fixing path to `aws_accounts.json`. (Broken my moving `manage.py`) +- PR #675 - @monkeysecurity - Adding `package_data` and `data_files` sections to setup.py. +- PR #677 - @willbengtson - Fixing the security trackable information. +- PR #682 - @monkeysecurity - Updating packaged supervisor config to provide full path to `monkey` +- PR #681 - @AlexCline - Add reference_policies for TLS transitional ELB security policies +- PR #684 - @monkeysecurity - Disabling DB migration `b8ccf5b8089b`. Was freezing some `db upgrades` +- PR #683 - @monkeysecurity - Reverted #667. Added `pip install --upgrade urllib3[secure]` to `quickstart` and `Dockerfile`. +- PR #685 - @monkeysecurity - Running `docker-compose build` in Travis-CI. +- PR #688 - @mcpeak - Add Bandit gate to Security Monkey. +- PR #687 - @mikegrima - Fix for issue #680. (Unable to edit account names) +- PR #689 - @mikegrima - Enhancements to Travis-CI: parallelized the workloads. (docker/python/dart in parallel) + +Important Notes: + - This is a hotfix release to correct a number of installation difficulties reported since `0.9.0`. + +Contributors: +- @redixin +- @AlexCline +- @willbengtson +- @mcpeak +- @mikegrima +- @monkeysecurity + v0.9.0 (2017-04-13) ---------------------------------------- diff --git a/security_monkey/__init__.py b/security_monkey/__init__.py index f659020a0..0ce350879 100644 --- a/security_monkey/__init__.py +++ b/security_monkey/__init__.py @@ -23,7 +23,7 @@ import stat ### VERSION ### -__version__ = '0.9.0' +__version__ = '0.9.1' ### FLASK ### from flask import Flask From baddf1abd61f2f95757bee9a03af17f121d3d081 Mon Sep 17 00:00:00 2001 From: Patrick Kelley Date: Thu, 20 Apr 2017 18:17:56 +0000 Subject: [PATCH 19/19] Fixing broken link in readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 836a2d507..f54191c12 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ Project resources - [Quickstart](docs/quickstart.md) - [Upgrading](docs/update.md) -- [Changelog](dosc/changelog.md) +- [Changelog](docs/changelog.md) - [Source code](https://github.com/netflix/security_monkey) - [Issue tracker](https://github.com/netflix/security_monkey/issues) - [Gitter.im Chat Room](https://gitter.im/Netflix/security_monkey)