From 68b080c9cc349292ae5365b4cd69fe7a1204edc6 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Mon, 16 Dec 2019 14:37:16 -0800 Subject: [PATCH 001/143] Updates for pylint and such. --- generic_large_compare.py | 2 +- jelly_api_2/genericlargecompare.py | 3 +- jelly_api_2/puthostjson.py | 12 +- pylintrc | 549 +++++++++++++++++++++++++++++ requirements.txt | 11 + storage.py | 359 ++++++++++++------- templates/base_header.html | 2 +- travis/testing.sh | 8 +- ui.py | 216 +++++------- 9 files changed, 899 insertions(+), 263 deletions(-) create mode 100644 pylintrc create mode 100644 requirements.txt diff --git a/generic_large_compare.py b/generic_large_compare.py index 19db571..1a0469f 100755 --- a/generic_large_compare.py +++ b/generic_large_compare.py @@ -15,7 +15,7 @@ import pymysql import json import re -import apt_pkg +#import apt_pkg from copy import deepcopy from time import time from time import sleep diff --git a/jelly_api_2/genericlargecompare.py b/jelly_api_2/genericlargecompare.py index 34ec856..fbfe387 100644 --- a/jelly_api_2/genericlargecompare.py +++ b/jelly_api_2/genericlargecompare.py @@ -6,6 +6,7 @@ API for Host Information Should return data about the host & return the collections for this particular host. + ```swagger-yaml /genericlargecompare/ : x-cached-length: "Every Midnight" @@ -130,7 +131,7 @@ import urllib import requests from generic_large_compare import generic_large_compare -import apt +#import apt genericlargecompare = Blueprint('api2_genericlargecompare', __name__) diff --git a/jelly_api_2/puthostjson.py b/jelly_api_2/puthostjson.py index 6112dfa..f9ac15e 100644 --- a/jelly_api_2/puthostjson.py +++ b/jelly_api_2/puthostjson.py @@ -48,8 +48,8 @@ def generic_puthostjson(): this_endpoint_endorsements = ( ("conntype","sapi"), ) - endorsementmgmt.process_endorsements(endorsements=this_endpoint_endorsements, \ - session_endorsements=g.session_endorsements ) + endorsementmgmt.process_endorsements(endorsements=this_endpoint_endorsements, + session_endorsements=g.session_endorsements) argument_error = False @@ -81,12 +81,14 @@ def generic_puthostjson(): if check_result_passed is True : # It's good do the Storage - this_store_collection_result = storage(g.config_items["sapi"].get("storageconfig", "storage.ini"), request.json, sapi=True) + this_store_collection_result = storage(g.config_items["sapi"].get("storageconfig", "storage.ini"), + request.json, + sapi=True) data_dict=dict() data_dict["storage_result"] = this_store_collection_result - if error == False : + if error == False: response_dict = dict() @@ -97,7 +99,7 @@ def generic_puthostjson(): return jsonify(**response_dict) - else : + else: response_dict = dict() response_dict["meta"] = meta_dict diff --git a/pylintrc b/pylintrc new file mode 100644 index 0000000..80b1429 --- /dev/null +++ b/pylintrc @@ -0,0 +1,549 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code +extension-pkg-whitelist=jq + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. +jobs=1 + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Specify a configuration file. +#rcfile= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +disable=print-statement, + parameter-unpacking, + unpacking-in-except, + old-raise-syntax, + backtick, + long-suffix, + old-ne-operator, + old-octal-literal, + import-star-module-level, + non-ascii-bytes-literal, + raw-checker-failed, + bad-inline-option, + locally-disabled, + locally-enabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + apply-builtin, + basestring-builtin, + buffer-builtin, + cmp-builtin, + coerce-builtin, + execfile-builtin, + file-builtin, + long-builtin, + raw_input-builtin, + reduce-builtin, + standarderror-builtin, + unicode-builtin, + xrange-builtin, + coerce-method, + delslice-method, + getslice-method, + setslice-method, + no-absolute-import, + old-division, + dict-iter-method, + dict-view-method, + next-method-called, + metaclass-assignment, + indexing-exception, + raising-string, + reload-builtin, + oct-method, + hex-method, + nonzero-method, + cmp-method, + input-builtin, + round-builtin, + intern-builtin, + unichr-builtin, + map-builtin-not-iterating, + zip-builtin-not-iterating, + range-builtin-not-iterating, + filter-builtin-not-iterating, + using-cmp-argument, + eq-without-hash, + div-method, + idiv-method, + rdiv-method, + exception-message-attribute, + invalid-str-codec, + sys-max-int, + bad-python3-import, + deprecated-string-function, + deprecated-str-translate-call, + deprecated-itertools-function, + deprecated-types-field, + next-method-defined, + dict-items-not-iterating, + dict-keys-not-iterating, + dict-values-not-iterating, + broad-except, + logging-format-interpolation, + unused-import, + len-as-condition, + too-many-locals, + too-many-nested-blocks, + consider-iterating-dictionary, + invalid-name, + redefined-outer-name + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[REPORTS] + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio).You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Tells whether to display a full report or only the messages +reports=no + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=optparse.Values,sys.exit + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=175 + +# Maximum number of lines in a module +max-module-lines=1000 + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +no-space-check=trailing-comma, + dict-separator + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes +max-spelling-suggestions=4 + +# Spelling dictionary name. Available dictionaries: none. To make it working +# install python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to indicated private dictionary in +# --spelling-private-dict-file option instead of raising a message. +spelling-store-unknown-words=no + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis. It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + + +[BASIC] + +# Naming style matching correct argument names +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style +#argument-rgx= + +# Naming style matching correct attribute names +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Naming style matching correct class attribute names +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style +#class-attribute-rgx= + +# Naming style matching correct class names +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming-style +#class-rgx= + +# Naming style matching correct constant names +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma +good-names=i, + j, + k, + ex, + Run, + _ + +# Include a hint for the correct naming format with invalid-name +include-naming-hint=no + +# Naming style matching correct inline iteration names +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style +#inlinevar-rgx= + +# Naming style matching correct method names +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style +#method-rgx= + +# Naming style matching correct module names +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +property-classes=abc.abstractproperty + +# Naming style matching correct variable names +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style +#variable-rgx= + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +logging-modules=logging + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in a if statement +max-bool-expr=5 + +# Maximum number of branch for function / method body +max-branches=12 + +# Maximum number of locals for function / method body +max-locals=25 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of statements in function / method body +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[IMPORTS] + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=optparse,tkinter.tix + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception + diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..f6caf00 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,11 @@ +PyYAML +flask +flask-graphQL +flask_cors +graphene +pymysql +manowar_agent +lxml +jsonschema +colorama +paramiko diff --git a/storage.py b/storage.py index 6307c75..5c16a74 100755 --- a/storage.py +++ b/storage.py @@ -9,12 +9,13 @@ from configparser import ConfigParser from time import time import datetime - -import pymysql +import logging import json import sys import ast +import pymysql + # Printing Stuff from colorama import Fore, Back, Style import pprint @@ -27,20 +28,40 @@ parser = argparse.ArgumentParser() parser.add_argument("-c", "--config", help="JSON Config File with our Storage Info", required=True) parser.add_argument("-j", "--json", help="json file to store", required=True) - parser.add_argument("-V", "--verbose", action="store_true", help="Enable Verbose Mode") + parser.add_argument("-v", "--verbose", action='append_const', help="Turn on Verbosity", const=1, default=[]) + parser._optionals.title = "DESCRIPTION " # Parser Args args = parser.parse_args() # Grab Variables - JSONFILE=args.json - CONFIG=args.config - VERBOSE=args.verbose + JSONFILE = args.json + CONFIG = args.config + + VERBOSE = len(args.verbose) + + if VERBOSE == 0: + logging.basicConfig(level=logging.ERROR) + + elif VERBOSE == 1: + logging.basicConfig(level=logging.WARNING) + + elif VERBOSE == 2: + logging.basicConfig(level=logging.INFO) + + elif VERBOSE > 2: + logging.basicConfig(level=logging.DEBUG) + + LOGGER = logging.getLogger() + + LOGGER.info("Welcome to Storage Module") def storage(CONFIG, JSONFILE, VERBOSE=False, sapi=False): + logger = logging.getLogger("storage.py") + STORAGE_TIME = int(time()) storage_stats = dict() @@ -63,65 +84,91 @@ def storage(CONFIG, JSONFILE, VERBOSE=False, sapi=False): - db_config_items=dict() - ip_intel_config=dict() + db_config_items = dict() + ip_intel_config = dict() # Collection Items for section in config: - if section == "database" : + if section == "database": for item in config[section]: db_config_items[item] = config[section][item] - if section == "ip_intel" : + if section == "ip_intel": for item in config[section]: ip_intel_config[item] = ast.literal_eval(config[section][item]) MAX = db_config_items["collectionmaxchars"] - storage_stats = dict() storage_stats["storage_timestamp"] = STORAGE_TIME try: - db_conn = pymysql.connect(host=db_config_items['dbhostname'], port=int(db_config_items['dbport']), user=db_config_items['dbusername'], passwd=db_config_items['dbpassword'], db=db_config_items['dbdb']) - dbmessage = "Good, connected to " + db_config_items['dbusername'] + "@" + db_config_items['dbhostname'] + ":" + db_config_items['dbport'] + "/" + db_config_items['dbdb'] + db_conn = pymysql.connect(host=db_config_items['dbhostname'], + port=int(db_config_items['dbport']), + user=db_config_items['dbusername'], + passwd=db_config_items['dbpassword'], + db=db_config_items['dbdb']) + + dbmessage = "Good, connected to {}@{}:{}/{}".format(db_config_items['dbusername'], + db_config_items['dbhostname'], + db_config_items['dbport'], + db_config_items['dbdb']) + storage_stats["db-status"] = dbmessage - except Exception as e: + + except Exception as dbconnection_error: storage_stats["db-status"] = "Connection Failed" - storage_stats["db-error"] = str(e) - return(storage_stats) + storage_stats["db-error"] = str(dbconnection_error) + return storage_stats collection_good, hostdata, results_data = parse_json_file(JSONFILE=JSONFILE) storage_stats["collection_status"] = collection_good - if collection_good : - try : + if collection_good: + try: host_id = insert_update_host(hostdata, db_conn) hostname = hostdata["hostname"] storage_stats["collection_timestamp"] = hostdata['last_update'] - storage_stats["inserts"], storage_stats["updates"], storage_stats["errors"] = insert_update_collections(db_conn, host_id, results_data, MAX, hostdata['last_update'], hostname) - except Exception as e : - print(Fore.RED, "Error Updating Host Collecitons ", str(e), Style.RESET_ALL) + storage_stats["inserts"], storage_stats["updates"], storage_stats["errors"] = insert_update_collections(db_conn, + host_id, + results_data, + MAX, + hostdata['last_update'], + hostname) + + except Exception as dbconnection_error: + logger.error("{}Error Updating Host Collecitons {}{}".format(Fore.RED, + dbconnection_error, + Style.RESET_ALL)) + storage_stats["insert_update"] = 0 storage_stats["errors"] = 1 else: - print(Fore.GREEN, "Updating Collection Success", str(storage_stats), Style.RESET_ALL) + logger.info("{}Updating Collection Success{}{}".format(Fore.GREEN, + storage_stats, + Style.RESET_ALL)) + # Updating Collection has been a success Let's check if this is a sapi host. - if sapi == True : + if sapi is True: # I am so update sapi table - storage_stats["sapi_data"] = store_as_SAPI_host(host_id=host_id, db_conn=db_conn, hostname=hostname) + storage_stats["sapi_data"] = store_as_SAPI_host(host_id=host_id, + db_conn=db_conn, + hostname=hostname) + do_ipintel = ip_intel_config.get("do_intel", False) - #print(do_ipintel) - #print(hostdata.keys()) - if do_ipintel == True and "ip_intel" in hostdata.keys() : - # Process the IP Intelligence for this host - result = process_ip_intel(config_dict={"ip_intel" : ip_intel_config}, multireport=hostdata["ip_intel"], host=hostname ) - if result == 200 : - print(Fore.GREEN, "IP Intel : {} for host {} ".format(result, hostname) , Style.RESET_ALL) - else : - print(Fore.RED, "IP Intel : {} for host {} ".format(result, hostname), Style.RESET_ALL) + logger.debug("Doing IP Intel. ({} Statement).".format(do_ipintel)) + + if do_ipintel == True and "ip_intel" in hostdata.keys(): + # Process the IP Intelligence for this host + result = process_ip_intel(config_dict={"ip_intel" : ip_intel_config}, + multireport=hostdata["ip_intel"], + host=hostname) + if result == 200: + logger.info("{}IP Intel : {} for host {}{}".format(Fore.GREEN, result, hostname, Style.RESET_ALL)) + else: + logger.error("{}IP Intel : {} for host {}{}".format(Fore.RED, result, hostname, Style.RESET_ALL)) - else : + else: storage_stats["inserts_updates"] = 0 storage_stats["errors"] = 1 @@ -131,19 +178,22 @@ def storage(CONFIG, JSONFILE, VERBOSE=False, sapi=False): db_conn.commit() db_conn.close() except Exception as e: - print(Fore.RED, "Error Closing DB Connection", Style.RESET_ALL) + logger.error("{}Error Closing DB Connection{}".format(Fore.RED, Style.RESET_ALL)) if __name__ == "__main__": print(json.dumps(storage_stats, sort_keys=True, indent=4)) - return(storage_stats) + return storage_stats def parse_json_file(JSONFILE=False, VERBOSE=False): + + logger = logging.getLogger("storage:parse_json_file") + # Return: collection_good, hostdata, results_data collection_good = False # If we've got the dict passed to us instead of the filename - if type(JSONFILE) is dict : + if isinstance(JSONFILE, dict): # Treat the dict as the results collection_results = JSONFILE else: @@ -154,7 +204,7 @@ def parse_json_file(JSONFILE=False, VERBOSE=False): # No matter what check to see that I have "SSH SUCCESS" (Future more) in my collection_status # Future will have more successful types - if collection_results['collection_status'] in ["SSH SUCCESS", "STINGCELL"] : + if collection_results['collection_status'] in ["SSH SUCCESS", "STINGCELL"]: # Do Parse stuff collection_good = True hostdata = { @@ -167,10 +217,10 @@ def parse_json_file(JSONFILE=False, VERBOSE=False): } results_data = collection_results['collection_data'] - if "ip_intel" in collection_results : + if "ip_intel" in collection_results: hostdata["ip_intel"] = collection_results["ip_intel"] - else : + else: # Failed for some reason. Ignoring any results. print(collection_results["status"]) collection_good = False @@ -180,45 +230,51 @@ def parse_json_file(JSONFILE=False, VERBOSE=False): return collection_good, hostdata, results_data def null_or_value(data_to_check, VERBOSE=False): - if data_to_check == None : + + logger = logging.getLogger("storage:null_or_value") + + if data_to_check == None: data = "NULL" return data - else : + else: data = "'" + str(data_to_check) + "'" return data def insert_update_host(hostdata, db_conn, VERBOSE=False): + logger = logging.getLogger("storage:insert_update_host") + cur = db_conn.cursor() + # TODO Replace this with # Always Columns column_string = "hostname, last_update" # Always Values - values_string = "'" + hostdata['hostname'] + "', FROM_UNIXTIME(" + str(hostdata['last_update']) + ") " + values_string = "'{}', FROM_UNIXTIME({}) ".format(hostdata['hostname'], str(hostdata['last_update'])) # SELECT * Specification - if hostdata['host_uber_id'] != "N/A" : + if hostdata['host_uber_id'] != "N/A": # Host_uber_id Given # from hosts where [ host_uber_id = hostdata['host_uber_id'] - select_tail_specification = "from hosts where host_uber_id = '" + str(hostdata['host_uber_id']) + "' ;" + if isinstance(hostdata["host_uber_id"], int): + select_tail_specification = "from hosts where host_uber_id = %s " + host_id_query_params.append(hostdata["host_uber_id"]) else: - select_tail_specification = "from hosts where hostname = '" + str(hostdata['hostname']) + "' ;" + select_tail_specification = "from hosts where hostname = %s" + host_id_query_params.append(hostdata["hostname"]) + + # TODO Add MOWN Logic here someday #################################################################### - # Add host_id - # No If there's not specifing this + # Add host_id to Query. select_head_specification = "SELECT host_id " - host_id_query = select_head_specification + select_tail_specification + host_id_query = "SELECT host_id {}".format(select_tail_specification) try: - cur.execute(host_id_query) - except Exception as insert_update_query_error: - logger.error("{}Trouble with query {} : {}{}".format(Fore.RED, str(host_id_query), str(e), Style.RESET_ALL)) - - if not cur.rowcount: - # No Results - host_id_data = "NULL" - else: - this_host_id_data = cur.fetchone()[0] - host_id_data = null_or_value(this_host_id_data) - - column_string = "host_id, " + column_string - values_string = host_id_data + ", " + values_string - #################################################################### - - #################################################################### - # Add POP - if hostdata['pop'] == "N/A": - # Do a Select to see if POP is in the DB - select_head_specification = "SELECT pop " - pop_query = select_head_specification + select_tail_specification + host_id_debug_query = cur.mogrify(host_id_query, host_id_query_params) + logger.debug("Host ID Query : {}".format(host_id_debug_query)) + cur.execute(host_id_query, host_id_query_params) - try: - cur.execute(pop_query) - except Exception as pop_query_error: - logger.error("{}Trouble with query {}:{}{}".format(Fore.RED, pop_query, pop_query_error, Style.RESET_ALL)) - - if not cur.rowcount: - # No Results - pop_data = "NULL" - else: - this_pop_data = cur.fetchone()[0] - pop_data = null_or_value(this_pop_data) + except Exception as insert_update_query_error: + logger.error("{}Trouble with query for {} : {}{}".format(Fore.RED, str(host_id_query), str(e), Style.RESET_ALL)) else: - # POP Data Given Always Overwrite - pop_data = "'{}'".format(hostdata['pop']) - - column_string = column_string + ", pop" - values_string = values_string + ", " + pop_data - - # Add srvtype - if hostdata['srvtype'] == "N/A": - # Do a Select to see if SRVTYPE is in the DB - select_head_specification = "SELECT srvtype " - srvtype_query = select_head_specification + select_tail_specificationkj - - try: - cur.execute(srvtype_query) - except Exception as srvtype_select_error: - logger.error("{}Trouble with query {}:{}{}".format(Fore.RED, srvtype_query, srvtype_select_error, Style.RESET_ALL)) - if not cur.rowcount: # No Results - srvtype_data = "NULL" + host_id = None else: - this_srvtype_data = cur.fetchone()[0] - srvtype_data = null_or_value(this_srvtype_data) - else: - # POP Data Given Always Overwrite - srvtype_data = "'{}'".format(hostdata['srvtype']) - - column_string = column_string + ", srvtype" - values_string = values_string + ", " + srvtype_data - - # Add hoststatus - if hostdata['status'] == "N/A": - # Do a Select to see if SRVTYPE is in the DB - select_head_specification = "SELECT hoststatus " - status_query = select_head_specification + select_tail_specification - - try: - cur.execute(status_query) - except Exception as select_hoststatus_error: - logger.error("{}Trouble with query {}:{}{}".format(Fore.RED, status_query, select_hoststatus_error, Style.RESET_ALL)) - - if not cur.rowcount: - # No Results - status_data = "NULL" + # Not a Dict Response + host_id = cur.fetchone()[0] + + logger.debug("Current Host to Insert Id : {}".format(host_id)) + + # Add HostID Data To Columns, Matches and Values + if host_id is not None: + insert_columns.append("host_id") + insert_values.append("%s") + insert_columns_args.append(host_id) + + ## V2 Factors like pop srvtype and the like + for v2factor in [("pop", "pop"), ("srvtype", "srvtype"), ("status", "hoststatus"), ("host_uber_id", "host_uber_id")]: + if hostdata[v2factor[0]] != "N/A" and hostdata[v2factor[0]] is not None: + insert_columns.append(v2factor[1]) + insert_values.append("%s") + insert_columns_args.append(hostdata[v2factor[0]]) else: - this_status_data = cur.fetchone()[0] - status_data = null_or_value(this_status_data) - else: - # POP Data Given Always Overwrite - status_data = "'{}'".format(hostdata['status']) - - column_string = column_string + ", hoststatus" - values_string = values_string + ", " + status_data + logger.warning("No {0} given for host {1}, ignoring {0} column.".format(v2factor[0], hostdata["hostname"])) - # Add host_uber_id - if hostdata['host_uber_id'] == "N/A": - # Do a Select to see if SRVTYPE is in the DB - select_head_specification = "SELECT host_uber_id " - host_uber_id_query = select_head_specification + select_tail_specification + replace_query = "REPLACE into hosts ( {} ) VALUES ( {} )".format(" , ".join(insert_columns), + " , ".join(insert_values)) - try: - cur.execute(host_uber_id_query) - except Exception as select_host_uber_id: - logger.error("{}Trouble with query {}:{}{}".format(Fore.RED, host_uber_id_query, select_host_uber_id, Style.RESET_ALL)) - - if not cur.rowcount: - # No Results - host_uber_id_data = "NULL" - else: - this_host_uber_id_data = cur.fetchone()[0] - host_uber_id_data = null_or_value(this_host_uber_id_data) + try: + replace_query_debug = cur.mogrify(replace_query, insert_columns_args) + logger.debug("Replace Query for Host {} : {}".format(hostdata["hostname"], replace_query_debug)) + cur.execute(replace_query, insert_columns_args) + except Exception as replace_error: + logger.error("Unable to do Replace Query for host {} with error : {}".format(hostdata["hostname"], replace_query_debug)) else: - # POP Data Given Always Overwrite - host_uber_id_data = "'{}'".format(hostdata['host_uber_id']) - - column_string = column_string + ", host_uber_id" - values_string = values_string + ", " + host_uber_id_data - #################################################################### + host_id = cur.lastrowid - query_head = "REPLACE into hosts ( " - query_mid = " ) VALUES ( " - query_tail = " ) ; " - query_string = query_head + column_string + query_mid + values_string + query_tail - #print(query_string) - - cur.execute(query_string) - #print("Problem with query : " + query_string ) - #print("Error: " + str(sys.exc_info()[0] ) - - this_row = cur.lastrowid - db_conn.commit() - cur.close() + finally: + db_conn.commit() + cur.close() - return this_row + return host_id +def store_as_SAPI_host(host_id, db_conn, hostname, VERBOSE=False): + ''' + Store as SAPI host. When we have a SAPI host we want to update the sapiActiveHosts table with that hostname + so that we know we don't need to ssh to that in collections. + ''' -def store_as_SAPI_host(host_id, db_conn, hostname, VERBOSE=False): + logger = logging.getLogger("storage.py:store_as_SAPI_host") SAPI_STORE_TIME = int(time()) @@ -478,11 +419,12 @@ def insert_update_collections(db_conn, host_id, results_data, MAX, timestamp, ho cur = db_conn.cursor() - error_count = 0 inserts = 0 updates = 0 + for item in results_data: + if "collection_failed" in results_data[item]: logger.info("{}{}Collection Failed for {} on host: {}{}".format(Back.CYAN, Fore.BLACK, @@ -507,12 +449,10 @@ def insert_update_collections(db_conn, host_id, results_data, MAX, timestamp, ho str(collection_subtype), str(collection_value)] - find_existing_query = "SELECT " - find_existing_query = find_existing_query +\ - " collection_value, collection_id, last_update FROM collection " +\ - "WHERE fk_host_id = %s AND collection_type = %s " +\ - "AND collection_subtype = %s AND collection_value = %s " +\ - " Order by last_update desc limit 1 ; " + find_existing_query = "SELECT {} {} {} {}".format(" collection_value, collection_id, last_update FROM collection ", + "WHERE fk_host_id = %s AND collection_type = %s ", + "AND collection_subtype = %s AND collection_value = %s ", + " Order by last_update desc limit 1 ") try: cur.execute(find_existing_query, find_existing_query_args) From b1aa6b84ec123176bf34bdd3668b5420543c5806 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Sun, 22 Dec 2019 20:36:18 -0800 Subject: [PATCH 005/143] Some Updates * Storagejsonverify tabs to spaces * subtype_large_compare tabs to spaces --- storageJSONVerify.py | 109 +++++++++++++++++++-------------------- subtype_large_compare.py | 92 ++++++++++++++++----------------- 2 files changed, 100 insertions(+), 101 deletions(-) diff --git a/storageJSONVerify.py b/storageJSONVerify.py index e1f2081..36f0cd5 100755 --- a/storageJSONVerify.py +++ b/storageJSONVerify.py @@ -9,76 +9,75 @@ import argparse # System Operations import sys -# Import Schema Parser -# Requires python3-jsonschema installed -import jsonschema # Import import json # Import import os -JSONSCHEMA_FILEPATH="/oc/local/secops/jellyfish2/" -JSONSCHEMA_DEFAULT_SCHEMA=JSONSCHEMA_FILEPATH+"/jellyfish_storage.json.schema" +# Schema Parser +import jsonschema + +JSONSCHEMA_FILEPATH = "/oc/local/secops/jellyfish2/" +JSONSCHEMA_DEFAULT_SCHEMA = JSONSCHEMA_FILEPATH+"/jellyfish_storage.json.schema" if __name__ == "__main__": - parser = argparse.ArgumentParser() + parser = argparse.ArgumentParser() - # -h help - # -s Audit Definitions (Required) - # -j JSON File + # -h help + # -s Audit Definitions (Required) + # -j JSON File - parser.add_argument("-s", "--schema", help="JSON Schema File to use for validation. (Default : jellyfish_storage.json.schema)", default=JSONSCHEMA_DEFAULT_SCHEMA ) - parser.add_argument("-j", "--json", help="JSON File to Validate", required="TRUE") + parser.add_argument("-s", "--schema", help="JSON Schema File to use for validation. (Default : jellyfish_storage.json.schema)", default=JSONSCHEMA_DEFAULT_SCHEMA) + parser.add_argument("-j", "--json", help="JSON File to Validate", required="TRUE") - args=parser.parse_args() + args = parser.parse_args() - schema_file=args.schema - json_file=args.json + schema_file = args.schema + json_file = args.json def storageJSONVerify(schema_file, json_file): - if type(json_file) is dict: - # Treat this as the dict itself - this_json = json_file - else: - try: - with open(json_file,"r") as this_json_file: - this_json = json.load(this_json_file) - except ValueError as err: - msg="Error in the Format of your JSON File " + err - return (False, msg) - - #print(this_json) - - if type(schema_file) is dict : - # Treat this as the schema itself - this_schema = schema_file - else: - try: - with open(schema_file,"r") as this_schema_file: - this_schema = json.load(this_schema_file) - except ValueError as err: - msg="Error in the Format of your Schema File: " + str(err) - return (False, msg) - - - #print(this_schema) - - try: - jsonschema.validate(this_json,this_schema) - except jsonschema.exceptions.ValidationError as err: - msg="Error in your JSON File: " + err - return (False, msg) - except jsonschema.exceptions.SchemaError as err: - msg="Error with your Schema File: " + err - return (False, msg) - else: - msg="JSON file passed Schema Validation." - return (True, msg) + ''' + Using the jsonschema file specified check the json bits for compliance + ''' + + if isinstance(json_file, dict): + # Treat this as the dict itself + this_json = json_file + else: + try: + with open(json_file, "r") as this_json_file: + this_json = json.load(this_json_file) + except ValueError as err: + msg = "Error in the Format of your JSON File " + err + return (False, msg) + + if isinstance(schema_file, dict): + # Treat this as the schema itself + this_schema = schema_file + else: + try: + with open(schema_file, "r") as this_schema_file: + this_schema = json.load(this_schema_file) + except ValueError as err: + msg = "Error in the Format of your Schema File: " + str(err) + return (False, msg) + + try: + jsonschema.validate(this_json, this_schema) + except jsonschema.exceptions.ValidationError as err: + msg = "Error in your JSON File: {}".format(err) + return (False, msg) + except jsonschema.exceptions.SchemaError as err: + msg = "Error with your Schema File: {}".format(err) + return (False, msg) + else: + msg = "JSON file passed Schema Validation." + return (True, msg) if __name__ == "__main__": - #"We're going to run the main stuff - result = storageJSONVerify(schema_file, json_file) + #"We're going to run the main stuff + result = storageJSONVerify(schema_file, json_file) - print(result) + print(result) diff --git a/subtype_large_compare.py b/subtype_large_compare.py index 8e5b179..1b5fb29 100644 --- a/subtype_large_compare.py +++ b/subtype_large_compare.py @@ -15,7 +15,7 @@ import pymysql import json import re -import apt_pkg +#import apt_pkg from copy import deepcopy from time import time from time import sleep @@ -26,36 +26,36 @@ from queue import Queue def subtype_large_compare(db_conn, host_list_dict, mtype, ctype, csubtype, mvalue, FRESH): - + # Subtype Large Comparison cur = db_conn.cursor() - + results_dict = deepcopy(host_list_dict) - + # Host Lists host_ids_list = list() fail_hosts = list() success_hosts = list() exempt_hosts = list() - + inserts = 0 updates = 0 - + # Host ID for item in host_list_dict : host_ids_list.append(item['host_id']) - + # Massaging Types - + massaged_ctype = [] massaged_csubtype = [] massaged_mvalue = [] - + # In order for each one. Rehydrate string (with ") and then convert to a tuple - - - + + + if type(ctype) is str : massaged_ctype.append(ctype) else : @@ -68,25 +68,25 @@ def subtype_large_compare(db_conn, host_list_dict, mtype, ctype, csubtype, mvalu else : # Cycle through each subtype list and toss that into csubtype value massaged_csubtype = [ ast.literal_eval('"' + item.replace("," , "\",\"") + '"') for item in csubtype ] - + if type(mvalue) is str : interm_processed_mvalue = ast.literal_eval('"' + mvalue.replace("," , "\",\"") + '"') massaged_mvalue.append(interm_processed_mvalue) else : # Cycle throught the regexp matches and toss that into csubtype value massaged_mvalue = [ ast.literal_eval('"' + item.replace("," , "\",\"") + '"') for item in mvalue ] - + #print(type(massaged_ctype), massaged_ctype) #print(type(massaged_csubtype), massaged_csubtype) #print(type(massaged_mvalue), massaged_mvalue) - + host_id_list_string = ",".join(map(str, host_ids_list)) - + #print(type(host_id_list_string), host_id_list_string) #print("About to build Queries") - - for index_value in range(0, len(massaged_ctype)) : - + + for index_value in range(0, len(massaged_ctype)) : + if mtype == "subnonhere" : # Both do the same thing. They just accept either a numerical match or a zero COMBINE = " OR " @@ -100,10 +100,10 @@ def subtype_large_compare(db_conn, host_list_dict, mtype, ctype, csubtype, mvalu COLUMNMATCH = " != " else : raise Exception("Unknown match type ", mtype) - + #print(COMBINE, COLUMNMATCH) # Cycle through each ctype & Do a Collection - + collection = [] collection.append("SELECT fk_host_id, count(DISTINCT(collection_subtype))") collection.append("from collection") @@ -114,42 +114,42 @@ def subtype_large_compare(db_conn, host_list_dict, mtype, ctype, csubtype, mvalu collection.append(" ( collection_type = '" + str(massaged_ctype[index_value]) + "' )") collection.append(" AND ") collection.append(" ( ") - + #print(collection) - + # Grab the Column columns_only = [] - + for sub_index_value in range(0, len(massaged_csubtype[index_value])) : - - + + # Generate My Column Match String if massaged_mvalue[index_value][sub_index_value] == "any" : matchstring = "" else : matchstring = " AND collection_value REGEXP '" + massaged_mvalue[index_value][sub_index_value] + "'" - + #print("Column Match: ", matchstring) - + columnmatch_string = "collection_subtype " + COLUMNMATCH + "'" + massaged_csubtype[index_value][sub_index_value] + "'" - + columns_only.append("( " + columnmatch_string + matchstring + " )") - + #print(columns_only) - + columns_only_string = COMBINE .join(columns_only) - - collection.append(columns_only_string) + + collection.append(columns_only_string) collection.append(" ) ") - + collection.append(" and last_update >= now() - INTERVAL " + str(FRESH) + " SECOND " ) collection.append(" group by fk_host_id order by last_update desc ;" ) - + collection_query = " ".join(collection) - + if len(host_ids_list) > 0 : cur.execute(collection_query) - if cur.rowcount: + if cur.rowcount: query_results_list = cur.fetchall() else : # No Results @@ -157,22 +157,22 @@ def subtype_large_compare(db_conn, host_list_dict, mtype, ctype, csubtype, mvalu query_results_list = [] else : query_results_list = [] - + #print(query_results_list) query_results_list_index = [ host[0] for host in query_results_list ] #print(query_results_list_index) - + exempthost = list() passhost = list() failhost = list() - - try: + + try: if mtype == "subnonhere" : # No exempt hosts. All hosts not in our results pass exempthost = [] passhost = [ host for host in host_ids_list if host not in query_results_list_index ] failhost = [ host[0] for host in query_results_list if host[1] > 0 ] - elif mtype == "suballhere" : + elif mtype == "suballhere" : # All the hosts that Aren't in our query Results Fail exempthost = [ ] # All the hosts in the results whose output matches exactly @@ -189,8 +189,8 @@ def subtype_large_compare(db_conn, host_list_dict, mtype, ctype, csubtype, mvalu raise Exception("Unknown match type. Potential Race Condition!") except Exception as e: print("Error Doing Comparisons: ", e) - - + + #print(exempthost, passhost, failhost) for host in range(0,len(results_dict)): @@ -208,11 +208,11 @@ def subtype_large_compare(db_conn, host_list_dict, mtype, ctype, csubtype, mvalu elif results_dict[host]['host_id'] in passhost : results_dict[host]['pfe'] = "pass" results_dict[host]['pfevalue'] = "Subtype Comparison " + mtype + " Passed" - elif results_dict[host]['host_id'] in exempthost : + elif results_dict[host]['host_id'] in exempthost : results_dict[host]['pfe'] = "notafflicted" results_dict[host]['pfevalue'] = "Subtype Comparison " + mtype + " Exempt" except Exception as e : print("Error trying to match items relating to host ", host, " : " , e ) - + cur.close() return results_dict From a245423224581ea6bad7be56a09ec793a1aad45b Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Sun, 22 Dec 2019 20:39:25 -0800 Subject: [PATCH 006/143] Audittools Breakout Rewrote some of the audittools items to make it easier to add new, future audits. Also added some new audits to the --- audittools/__init__.py | 5 + audittools/audit_source.py | 162 ++++++++++ audittools/audits_usn.py | 277 ++++++++++++++++ audittools/cve_class.py | 133 ++++++++ audittools/ubuntu_cve.py | 247 ++++++++++++++ audittools/verifyAudits.py | 301 ++++++++++++++++++ rss_creator.py | 183 +++++++++++ .../audittools.d/ubuntu_usn/USN-4202-2.json | 72 +++++ .../audittools.d/ubuntu_usn/USN-4214-2.json | 78 +++++ .../audittools.d/ubuntu_usn/USN-4216-2.json | 53 +++ .../audittools.d/ubuntu_usn/USN-4217-2.json | 49 +++ .../audittools.d/ubuntu_usn/USN-4219-1.json | 126 ++++++++ .../audittools.d/ubuntu_usn/USN-4220-1.json | 134 ++++++++ .../audittools.d/ubuntu_usn/USN-4221-1.json | 126 ++++++++ .../audittools.d/ubuntu_usn/USN-4222-1.json | 65 ++++ .../audittools.d/ubuntu_usn/USN-4223-1.json | 180 +++++++++++ .../audittools.d/ubuntu_usn/USN-4224-1.json | 138 ++++++++ verifyAudits.py | 187 ----------- 18 files changed, 2329 insertions(+), 187 deletions(-) create mode 100755 audittools/__init__.py create mode 100644 audittools/audit_source.py create mode 100755 audittools/audits_usn.py create mode 100644 audittools/cve_class.py create mode 100755 audittools/ubuntu_cve.py create mode 100755 audittools/verifyAudits.py create mode 100755 rss_creator.py create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4202-2.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4214-2.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4216-2.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4217-2.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4219-1.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4220-1.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4221-1.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4222-1.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4223-1.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4224-1.json delete mode 100755 verifyAudits.py diff --git a/audittools/__init__.py b/audittools/__init__.py new file mode 100755 index 0000000..ac74b6a --- /dev/null +++ b/audittools/__init__.py @@ -0,0 +1,5 @@ +from .audit_source import AuditSource +from .verifyAudits import walk_auditd_dir, load_auditfile, verifySingleAudit, verifyAudits +from .cve_class import mowCVE +from .audits_usn import AuditSourceUSN +from .ubuntu_cve import mowCVEUbuntu diff --git a/audittools/audit_source.py b/audittools/audit_source.py new file mode 100644 index 0000000..81db986 --- /dev/null +++ b/audittools/audit_source.py @@ -0,0 +1,162 @@ +#!/usr/bin/env python3 + +''' +Create a General Class that can be Generalized to Collect +Audits from Public Sources. +''' + +import logging +import json +import configparser +import os +import os.path + +import requests +import yaml + +from audittools.verifyAudits import verifySingleAudit + +class AuditSource: + + ''' + Class for Manowar Audit Creation + + Provides common fundamentals that can be utilized from different sources. + ''' + + def __init__(self, **kwargs): + + ''' + Initializes a Bare Function + ''' + + self.logger = logging.getLogger("AuditSource") + + self.source_key = kwargs.get("source_key", None) + self.audit_name = kwargs.get("audit_name", None) + + self.audit_filename = kwargs.get("audit_filename", None) + self.audit_path = kwargs.get("audit_path", None) + + self.audit_data = {"vuln-name" : kwargs.get("vuln-name", None), + "vuln-primary-link" : kwargs.get("vuln-primary-link", None), + "vuln-priority" : kwargs.get("vuln-priority", None), + "vuln-additional-links" : kwargs.get("vuln-additional-links", dict()), + "vuln-long-description" : None, + "comparisons" : kwargs.get("comparisons", dict()), + "filters" : kwargs.get("filters", dict()), + "jellyfishversion" : 2} + + def return_audit(self): + + ''' + Returns a Dictionary Form + ''' + + audit_dict = {self.audit_name : self.audit_data} + + return audit_dict + + def validate_audit_written(self): + + if self.audit_filename is None: + raise ValueError("Audit Not Written to File") + + validated = verifySingleAudit(self.audit_filename) + + return validated + + def validate_audit_live(self): + + if self.audit_name is None: + raise ValueError("Audit(s) Not Stored to self.audit_data") + + validated = verifySingleAudit(self.return_audit()) + + return validated + + def assert_writeability(self): + + ''' + Raises an Exception if I can't Write a File + ''' + + if self.audit_filename is None: + raise ValueError("Missing audit_filename") + + if self.audit_path is None: + raise ValueError("Missing Audit Path.") + else: + if os.path.isdir(self.audit_path) is False: + raise FileNotFoundError("Directory {} Doesn't Exist.".format(self.audit_path)) + + if self.audit_name is None: + raise ValueError("Missing Audit Name.") + + + def audit_file_exists(self): + + ''' + Checks to See if a File Eixists + ''' + + exists = False + + self.assert_writeability() + + audit_file = os.path.join(self.audit_path, self.audit_filename) + + if os.path.isfile(audit_file) is True: + exists = True + + return exists + + def write_audit(self, file_format="json"): + + written = [False, "Not Written"] + + self.assert_writeability() + + if file_format not in ("json", "yaml", "ini"): + raise ValueError("File Format Incorrect.") + + if self.validate_audit_live() is False: + self.logger.error("File not Written, Validation Pre-Flight Check Failed.") + written = [False, "Validation Failure"] + elif self.audit_file_exists() is True: + self.logger.error("File not Written, File aready Exists.") + written = [False, "File Pre-Exists"] + else: + # Okay We're Good Let's Write Stuff + audit_file = os.path.join(self.audit_path, self.audit_filename) + + try: + with open(audit_file, "w", encoding="utf8") as audit_file_obj: + if file_format is "json": + json.dump(self.return_audit(), audit_file_obj, sort_keys=True, indent=2) + elif file_format is "yaml": + yaml.dump(self.return_audit(), audit_file_obj, default_flow_style=False) + elif file_format is "ini": + parser = configparser.ConfigParser() + + audit_data = self.return_audit() + + for section in audit_data.keys(): + parser[section] = audit_data[section] + + parser.write(audit_file_obj) + + except Exception as error_when_writing: + written = [False, "Error {} When Writing.".format(error_when_writing)] + self.logger.error("Unable to Write Audit {} with Error {}".format(self.source_key, error_when_writing)) + else: + self.logger.debug("Write File for {} to {}".format(self.source_key, audit_file)) + written = [True, "File written to {}".format(audit_file)] + + return written + + + + + + diff --git a/audittools/audits_usn.py b/audittools/audits_usn.py new file mode 100755 index 0000000..228896e --- /dev/null +++ b/audittools/audits_usn.py @@ -0,0 +1,277 @@ +#!/usr/bin/env python3 + +''' +audits_usn.py + +Given a Single USN let's create an manowar audit that represents that. +''' + +import argparse +import json +import logging +import os +import os.path +import time +import re + +import requests + +import audittools.audit_source +import audittools.ubuntu_cve + + + +if __name__ == "__main__" : + parser = argparse.ArgumentParser() + #parser.add_argument("-v", "--verbose", action='store_true', help="Turn on Verbosity") + parser.add_argument("-v", "--verbose", action='append_const', help="Turn on Verbosity", const=1, default=[]) + parser.add_argument("--nocache", action="store_true", help="Don't use local usn_db.json") + parser.add_argument("--cacheage", default=21600, help="How long (in seconds) to accept local usn_db.json file default 6 hours 21600 seconds") + parser.add_argument("--cachefile", default="/tmp/usn_db.json", help="Location of Cachefile default /tmp/usn_db.json") + parser.add_argument("-o", "--output", default=False, help="File to output to") + parser.add_argument("-p", "--print", action="store_true", help="Print Audit to Screen") + parser.add_argument("-u", "--usn", required=True) + + args = parser.parse_args() + + VERBOSE = len(args.verbose) + + if VERBOSE == 0: + logging.basicConfig(level=logging.ERROR) + elif VERBOSE == 1: + logging.basicConfig(level=logging.WARNING) + elif VERBOSE == 2: + logging.basicConfig(level=logging.INFO) + else: + logging.basicConfig(level=logging.DEBUG) + + USN = args.usn + + CACHEAGE = args.cacheage + CACHEFILE = args.cachefile + + LOGGER = logging.getLogger("audits_usn.py") + + LOGGER.debug("Welcome to Audits USN.") + +class AuditSourceUSN(audittools.audit_source.AuditSource): + + ''' + Implements a Public AuditSource object for Ubuntu Security Notices + ''' + + __usn_regex = "[Uu][Ss][Nn]-\d{4}-\d{1}" + __usn_url = "https://usn.ubuntu.com/usn-db/database.json" + + __default_cachefile = "/tmp/usn_db.json" + __default_cacheage = 21600 + + def __init__(self, **kwargs): + + # Run My Parent Init Function + audittools.audit_source.AuditSource.__init__(self, **kwargs) + + self.cachefile = kwargs.get("cachefile", self.__default_cachefile) + self.cacheage = kwargs.get("cacheage", self.__default_cacheage) + + # Confirm I have a USN + if re.match(self.__usn_regex, self.source_key) is None: + raise ValueError("Source Key Doesn't look like a USN {}".format(str(self.source_key))) + + # Update My Cache + if kwargs.get("ignore_cache", False) is False: + self.handle_local_cache() + + self.usn_data = self.get_usn_data() + + self.populate_audit() + + def populate_audit(self): + + ''' + Assuming I have my USN Data as a Dictionary, Let's populate the self.audit_data & self.audit_name items from my + parent function. + ''' + + self.audit_name = self.source_key.upper() + + self.audit_data = {**self.audit_data, + "vuln-name" : self.audit_name, + "vuln-primary-link" : self.usn_data["primary_link"], + "vuln-additional-links" : self.usn_data["reference_links"], + "vuln-short-description" : self.usn_data["isummary"], + "vuln-priority" : self.usn_data["highest_priority"]} + + # Caluclate Priority "vuln-priority" : kwargs.get("vuln-priority", None), + self.audit_data["filters"], self.audit_data["comparisons"] = self.cal_bucket_defs() + + # Calulate Filters & Comparisons + self.audit_data["vuln-long-description"] = "{}\n\nTLDR: {}\n".format(self.usn_data["description"], self.usn_data["action"]) + + + def get_usn_data(self): + + ''' + Turn the USN to an Audit Dictionary of type {"usn-number" : } + ''' + + if self.source_key is None: + raise ValueError("Unknown USN") + + usn_num = "-".join(self.source_key.split("-")[1:]) + + with open(self.cachefile) as cachefile_obj: + try: + all_data = json.load(cachefile_obj) + except Exception as json_fomat_error: + self.logger.error("JSON Formatting Error, Try removing Cache file.") + + try: + usn_data = all_data[usn_num] + + self.logger.debug("Loaded Data for USN {}".format(self.source_key)) + + except Exception as read_json_error: + self.logger.error("Unable to Find/Load USN {}.".format(self.source_key)) + self.logger.debug("Error Found when loading {} : {}".format(self.source_key, read_json_error)) + self.logger.debug("USN Num : {}".format(usn_num)) + + usn_data = None + else: + self.logger.debug("Found Upstream USN Data for the following releases: {}".format(usn_data["releases"].keys())) + + # Parse CVEs + usn_data["parsed_cves"] = list() + usn_data["usn_num"] = usn_num + usn_data["primary_link"] = "https://usn.ubuntu.com/{}/".format(usn_num) + usn_data["reference_links"] = {self.source_key.upper() : usn_data["primary_link"]} + + highest_priority = 1 + + for cve_string in usn_data["cves"]: + try: + this_cve_obj = audittools.ubuntu_cve.mowCVEUbuntu(cve=cve_string) + except ValueError as cve_parse_error: + self.logger.warning("Ignoring CVE of : {}".format(cve_string)) + + # Let's See if Launchpad is at play here + if cve_string.startswith("https://launchpad.net/bugs"): + # It's Launchpad Let's ad the Link when we ignore + lp_num = cve_string.split("/")[-1] + usn_data["reference_links"]["LP : {}".format(lp_num)] = str(cve_string) + else: + # Add it as itself + usn_data["reference_links"][str(cve_string)] = str(cve_string) + + else: + usn_data["parsed_cves"].append(this_cve_obj) + + if int(this_cve_obj.best_numeric_score()) > highest_priority: + highest_priority = int(this_cve_obj.best_numeric_score()) + + # Add CVE Link to References + usn_data["reference_links"]["{}_({})".format(this_cve_obj.cve_id.upper(), this_cve_obj.get_severity().capitalize())] = this_cve_obj.primary_reference + + self.logger.info("{} : {} ({:.2}) added.".format(this_cve_obj.cve_id.upper(), + this_cve_obj.get_severity().capitalize(), + this_cve_obj.best_numeric_score())) + + usn_data["highest_priority"] = highest_priority + + return usn_data + + def cal_bucket_defs(self): + + ''' + Given the data in self.usn_data return the bucket definitions for filters and comparisons tuple of dicts + ''' + + comparisons = dict() + filters = dict() + + for this_release in self.usn_data["releases"].keys(): + + bucket_name = "{}-bucket".format(this_release) + + filters[bucket_name] = {"filter-collection-type" : ["os", "release"], + "filter-collection-subtype" : ["default", "default"], + "filter-match-value" : ["Ubuntu", this_release], + "filter-match" : "is"} + + comparisons[bucket_name] = {"comparison-collection-type" : list(), + "comparison-collection-subtype" : list(), + "comparison-match-value" : list(), + "comparison-match" : "aptge"} + + for package in self.usn_data["releases"][this_release]["binaries"].keys(): + # For Each Package populate it's relevant comparison + self.logger.debug("Release {}: Package : {}".format(this_release, package)) + comparisons[bucket_name]["comparison-collection-type"].append("packages") + comparisons[bucket_name]["comparison-collection-subtype"].append(package) + comparisons[bucket_name]["comparison-match-value"].append(self.usn_data["releases"][this_release]["binaries"][package]["version"]) + + return (filters, comparisons) + + def handle_local_cache(self): + + ''' + 1. See if the Cache File is Recent and Exists + 1. Download if Missing + ''' + + now = int(time.time()) + + get_file = False + + if os.path.isfile(self.cachefile): + file_create_time = os.path.getmtime(self.cachefile) + + time_left = file_create_time - (now - self.cacheage) + + self.logger.info("File has {} Seconds before expiration.".format(time_left)) + + if time_left <= 0: + self.logger.info("File {} seconds {} too old. Pulling New Version.".format(abs(time_left), self.cachefile)) + + get_file = True + + else: + self.logger.debug("File {} new enough. {} seconds left.".format(self.cachefile, time_left)) + else: + self.logger.debug("File {} missing. Pulling it.".format(self.cachefile)) + get_file = True + + + if get_file is True: + + with open(self.cachefile, "wb") as new_cachefile: + try: + response = requests.get(self.__usn_url) + except Exception as get_json_error: + self.logger.error("Unable to Get usn db with error : {}".format(get_json_error)) + raise get_json_error + else: + if response.status_code == requests.codes.ok: + self.logger.info("Writing new Cache File.") + new_cachefile.write(response.content) + else: + self.logger.error("Error getting DB. HTTP Response Code {}".format(response.status_code)) + raise ValueError("Response {} Recieved".format(respone.status_code)) + finally: + self.logger.debug("New Cache File Written.") + + +if __name__ == "__main__" : + + my_usn = AuditSourceUSN(source_key=USN) + + validated = my_usn.validate_audit_live() + + LOGGER.info("validated : {}".format(validated)) + + print(json.dumps(my_usn.return_audit())) + + + + + diff --git a/audittools/cve_class.py b/audittools/cve_class.py new file mode 100644 index 0000000..70ce374 --- /dev/null +++ b/audittools/cve_class.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python3 + +''' +A new Ubuntu CVE different than shuttlefish. Utilizes Launchpad data to grab Ubuntu +CVE Data +''' + +import time +import logging +import re + +import cvss +import cpe +# Library doesn't exist +# import capec + +class mowCVE: + + ''' + Generic CVE Class To Expand Upon Includes Bits for Auditing + + Creation + ''' + + # Case Insensitive + _okay_severities = ["unknown", "none", "low", "medium", "high", "critical"] + _cve_regex = "[Cc][Vv][Ee]-(\d+)-(\d+)" + + logger = logging.getLogger("mowCVE") + + def __init__(self, cve=None, **kwargs): + + ''' + Initialze a Holder for CVE Things + ''' + + + if cve is None: + raise ValueError("CVE ID Required") + + try: + cve_parts = re.search(self._cve_regex, cve, re.I) + except Exception as cve_parse_error: + self.logger.error("Unable to Parse CVE : {}".format(cve)) + raise ValueError("Badly Formatted CVE") + else: + if cve_parts is not None: + self.cve_id = cve.upper() + self.cve_year = int(cve_parts.group(1)) + self.cve_num = int(cve_parts.group(2)) + else: + raise ValueError("Valid CVE ID Required, Recieved {}".format(cve)) + + self.description = kwargs.get("description", None) + self.title = kwargs.get("title", None) + + if isinstance(kwargs.get("cvss2", None), str): + self.cvss2 = cvss.CVSS2(kwargs["cvss2"]) + elif isinstance(kwargs.get("cvss2", None), cvss.CVSS2): + self.cvss2 = kwargs["cvss2"] + else: + self.cvss2 = None + + if isinstance(kwargs.get("cvss3", None), str): + self.cvss3 = cvss.CVSS3(kwargs["cvss3"]) + elif isinstance(kwargs.get("cvss3", None), cvss.CVSS3): + self.cvss3 = kwargs["cvss3"] + else: + self.cvss3 = None + + self.severity_override = kwargs.get("severity_override", None) + self.score_override = kwargs.get("score_override", None) + self.cpe_list = [cpe.CPE(indv_cpe) for indv_cpe in kwargs.get("cpe_list", list())] + self.capec_list = kwargs.get("capec_list", list()) + self.references = kwargs.get("references", dict()) + self.primary_reference = kwargs.get("primary_reference", None) + self.last_updated = kwargs.get("last_updated", None) + self.published = kwargs.get("published", None) + + # Updated Now! + self.self_updated = int(time.time()) + + # Audit Items + self.filters = kwargs.get("bucket_def", {}) + self.comparisons = kwargs.get("comparisons", {}) + + def get_severity(self): + + ''' + Logic to Return the "Right" Severity" + ''' + + best_severity = "unknown" + # Best Severity + + if self.severity_override is not None and self.severity_override.lower() in self._okay_severities: + best_severity = self.severity_override + elif self.cvss3 is not None: + best_severity = self.cvss3.severities()[-1].lower() + elif self.cvss2 is not None: + cvss2_severity_num = self.cvss2.scores()[-1] + + if cvss2_severity_num <= 3.9: + best_severity = "low" + elif cvss2_severity_num <= 6.9: + best_severity = "medium" + elif cvss2_severity_num <= 10: + best_severity = "high" + + else: + self.logger.warning("No Severity Found returning Unknown") + + return best_severity + + def best_numeric_score(self): + + ''' + Best Out of 10 Numeric Score + ''' + + best_score = 0.1 + # Best Severity + + if isinstance(self.score_override, (float, int)) and self.score_override >= 0.0 and self.score_override <= 10.0: + best_score = self.score_override + elif self.cvss3 is not None: + best_score = self.cvss3.scores()[-1] + elif self.cvss2 is not None: + best_score = self.cvss2.scores()[-1] + else: + self.logger.warning("No Severity Found returning 0.1") + + return best_score diff --git a/audittools/ubuntu_cve.py b/audittools/ubuntu_cve.py new file mode 100755 index 0000000..0175325 --- /dev/null +++ b/audittools/ubuntu_cve.py @@ -0,0 +1,247 @@ +#!/usr/bin/env python3 + +''' +A new Ubuntu CVE different than shuttlefish. Utilizes Launchpad data to grab Ubuntu +CVE Data +''' + +import time +import logging +import argparse +import re +import datetime + +from urllib.parse import urljoin +from configparser import ConfigParser + +import cvss +import cpe +import requests + +# Library doesn't exist +# import capec +import audittools.cve_class + +if __name__ == "__main__" : + parser = argparse.ArgumentParser() + #parser.add_argument("-v", "--verbose", action='store_true', help="Turn on Verbosity") + parser.add_argument("-v", "--verbose", action='append_const', help="Turn on Verbosity", const=1, default=[]) + parser.add_argument("-c", "--cve", required=True) + + args = parser.parse_args() + + VERBOSE = len(args.verbose) + + if VERBOSE == 0: + logging.basicConfig(level=logging.ERROR) + elif VERBOSE == 1: + logging.basicConfig(level=logging.WARNING) + elif VERBOSE == 2: + logging.basicConfig(level=logging.INFO) + else: + logging.basicConfig(level=logging.DEBUG) + + CVE = args.cve + + LOGGER = logging.getLogger("ubuntu_cve.py") + + LOGGER.debug("Welcome to Ubuntu CVE.") + +class mowCVEUbuntu(audittools.cve_class.mowCVE): + + ''' + Ubuntu CVE Class that Updates mowCVE with Data from CVE + ''' + + # Case Insensitive + #__okay_severities = ["unknown", "none", "low", "medium", "high", "critical"] + + __ubuntu_cve_endpoint = "https://git.launchpad.net/ubuntu-cve-tracker/plain/active/" + + def __init__(self, cve=None, **kwargs): + + ''' + Initialze a Holder for CVE Things + ''' + + if cve is None: + raise ValueError("CVE ID Required") + + audittools.cve_class.mowCVE.__init__(self, cve=cve, **kwargs) + + ''' + self.description = kwargs.get("description", None) + self.title = kwargs.get("title", None) + self.cvss2 = cvss.CVSS2(kwargs.get("cvss2", None)) + self.cvss3 = cvss.CVSS3(kwargs.get("cvss3", None)) + self.severity_override = kwargs.get("severity_override", None) + self.score_override = kwargs.get("score_override", None) + self.cpe_list = [cpe.CPE(indv_cpe) for indv_cpe in kwargs.get("cpe_list", list())] + self.capec_list = kwargs.get("capec_list", list()) + self.references = kwargs.get("references", dict()) + self.primary_reference = kwargs.get("primary_reference", None) + self.last_updated = kwargs.get("last_updated", None) + self.published = kwargs.get("published", None) + + # Updated Now! + self.self_updated = int(time.time()) + + # Audit Items + self.filters = kwargs.get("bucket_def", {}) + self.comparisons = kwargs.get("comparisons", {}) + ''' + + # Don't Run When Testing + if kwargs.get("test", False) is False: + self.pull_ubuntu_cve() + + def pull_ubuntu_cve(self): + + ''' + Reach out, Grab the CVE Data and Parse it + ''' + + ubuntu_url = urljoin(self.__ubuntu_cve_endpoint, self.cve_id) + + parsed_data = None + + try: + response = requests.get(ubuntu_url) + except Exception as url_error: + self.logger.error("Unable to Query Ubuntu for CVE : {}".format(self.cve_id)) + self.logger.debug("Query Error for CVE {} : {}".format(self.cve_id, url_error)) + else: + + if response.status_code == requests.codes.ok: + # Good Data + data = response.text + + try: + + # Manipulate For Config Parser + data = "[{}]\n".format(self.cve_id) + data + + cve_data_parsed = ConfigParser() + cve_data_parsed.read_string(data) + except Exception as parsing_error: + self.logger.error("Unable to Read CVE {} with Error {}".format(self.cve_id, parsing_error)) + else: + self.logger.debug("Pulled Data from Ubuntu about CVE {}".format(self.cve_id)) + parsed_data = dict(cve_data_parsed._sections[self.cve_id]) + + self.enhance_cve(parsed_cve_data=parsed_data, ubuntu_url=ubuntu_url) + + elif response.status_code == 404: + self.logger.warning("CVE {} Not found on Ubuntu Site.".format(self.cve_id)) + else: + self.logger.error("CVE {} unable to Query for CVE Recieved {}".format(self.cve_id, response.status_code)) + finally: + pass + + + def enhance_cve(self, parsed_cve_data=None, ubuntu_url=None): + + ''' + Takes the parsed Data and Updates all the various bits + ''' + + self.title = "Ubuntu's {}".format(self.cve_id) + self.description = parsed_cve_data["description"] + + if parsed_cve_data["priority"] in self._okay_severities: + self.severity_override = parsed_cve_data["priority"] + self.score_override = self._okay_severities.index(parsed_cve_data["priority"]) * 1.9 + + readable_url = "https://people.canonical.com/~ubuntu-security/cve/{}/CVE-{}-{}.html".format(self.cve_year, self.cve_year, self.cve_num) + self.references = {"{} Data".format(self.title) : ubuntu_url, self.title : readable_url} + self.primary_reference = readable_url + + for reference_url in parsed_cve_data["references"].split(): + if len(reference_url) >= 0: + self.references[reference_url] = reference_url + + try: + updated_date = datetime.datetime.strptime(parsed_cve_data["publicdate"], "%Y-%m-%d %H:%M:%S UTC") + except Exception as date_error: + self.logger.warning("Unable to Read date of {}".format(parsed_cve_data["publicdate"])) + self.logger.debug("Date Error {}".format(date_error)) + else: + self.last_updated = int(updated_date.timestamp()) + + ## Now Let's Do Filters and Comparisons + + # Getting Package Patches Tracked in This CVE + filters = dict() + comparisons = dict() + + any_patches = False + + for potential_pkg in parsed_cve_data.keys(): + if potential_pkg.startswith("patches_"): + # Found a Patch + this_package_name = potential_pkg.split("_")[1] + + # Now Search for All the Release that this Patch has been Seen in. + for potential_package_patch in parsed_cve_data.keys(): + if potential_package_patch.endswith(this_package_name): + + this_release = potential_package_patch.split("_")[0] + + if this_release.endswith("/esm"): + # I have an ESM Package Alter the this_release value + this_release = this_release.split("/")[0] + + + if this_release in ("upstream", "devel", "patches"): + # Ignore the Upstream Data + pass + else: + # Legit Thing + status_string = parsed_cve_data[potential_package_patch] + + + try: + fixed_version = re.search("released \((.+)\)", status_string, re.IGNORECASE).group(1) + except Exception as fixed_version_error: + self.logger.debug("Unable to Find version for {} in {}".format(this_package_name, this_release)) + else: + self.logger.info("{} Found a Patch for Package Named {} in release {}".format(self.cve_id, this_package_name, this_release)) + + any_patches = True + # I have a Fixed Item + + # Okay Now I have a Patched Item. I need to both ensure that + # I have a filter/bucket for my this_release and create/add this package + # to my comparison/bucket for this package + bucket_name = "{}-bucket".format(this_release) + if bucket_name not in filters.keys(): + # This is the First Time I'm seeing this Release + filters[bucket_name] = {"filter-collection-type" : ["os", "release"], + "filter-collection-subtype" : ["default", "default"], + "filter-match-value" : ["Ubuntu", this_release], + "filter-match" : "is"} + + comparisons[bucket_name] = {"comparison-collection-type" : list(), + "comparison-collection-subtype" : list(), + "comparison-match-value" : list(), + "comparison-match" : "aptge"} + + # I've Added nake filters and comparisons for my bucket so + comparisons[bucket_name]["comparison-collection-type"].append("packages") + comparisons[bucket_name]["comparison-collection-subtype"].append(this_package_name) + comparisons[bucket_name]["comparison-match-value"].append(fixed_version) + + if any_patches is True: + self.filters = filters + self.comparisons = comparisons + else: + self.logger.warning("No Active Patches found for this CVE") + +if __name__ == "__main__" : + + my_usn = mowCVEUbuntu(cve=CVE) + + print(my_usn.comparisons) + #print(my_usn.get_severity()) + #print(my_usn.best_numeric_score()) + diff --git a/audittools/verifyAudits.py b/audittools/verifyAudits.py new file mode 100755 index 0000000..871ef81 --- /dev/null +++ b/audittools/verifyAudits.py @@ -0,0 +1,301 @@ +#!/usr/bin/env python3 + +''' +Copyright 2018, VDMS +Licensed under the terms of the BSD 2-clause license. See LICENSE file for terms. +''' + +# verifyAudits.py - Designed to verify that all of our audit files +# are good to go. + + +# Run through Analysis +import os +import json +import logging +import ast +import argparse +import sys + +from time import time +from configparser import ConfigParser + +import yaml + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + + parser.add_argument("-a", "--auditdir", help="Directory that Contains the audits", required=True) + parser.add_argument("-v", "--verbose", action='append_const', help="Turn on Verbosity", const=1, default=[]) + + # Parser Args + args = parser.parse_args() + + # Massage Configdir to not include trailing / + if args.auditdir[-1] == "/": + CONFIGDIR = args.auditdir[0:-1] + else: + CONFIGDIR = args.auditdir + + VERBOSE = len(args.verbose) + + if VERBOSE == 0: + logging.basicConfig(level=logging.ERROR) + elif VERBOSE == 1: + logging.basicConfig(level=logging.WARNING) + elif VERBOSE == 2: + logging.basicConfig(level=logging.INFO) + else: + logging.basicConfig(level=logging.DEBUG) + + LOGGER = logging.getLogger("verifyAudits.py") + +def walk_auditd_dir(configdirs): + + ''' + Walk the Auditd Dir and get all the valid types + ''' + + logger = logging.getLogger("verifyAudits.py:walk_auditd_dir") + + __acceptable_filename = [".ini", ".yaml", ".json"] + + auditfiles_found = list() + + if isinstance(configdirs, str): + all_configdirs = [configdirs] + elif isinstance(configdirs, (list, tuple)): + all_configdirs = configdirs + + for configdir in all_configdirs: + for (dirpath, dirnames, filenames) in os.walk(configdir): + + for singlefile in filenames: + onefile = dirpath + "/" + singlefile + #print(singlefile.find(".ini", -4)) + added = False + for filetype in __acceptable_filename: + if singlefile.endswith(filetype) is True: + # File ends with .ini Last 4 chars + auditfiles_found.append(onefile) + added = True + + if added is False: + logger.info("File {} Found in Audit Config Dir that's not being processed.".format(onefile)) + logger.debug("Current Dir {}".format(dirnames)) + + return auditfiles_found + + +def load_auditfile(auditfile): + + ''' + Loads a single auditfile with possibly more than one audit per file. + ''' + + logger = logging.getLogger("verifyAudits.py:load_auditfile") + + # Config Defaults For .ini file replacement + + this_time = int(time()) + back_week = this_time-604800 + back_month = this_time-2628000 + back_quarter = this_time-7844000 + back_year = this_time-31540000 + back_3_year = this_time-94610000 + time_defaults = {"now" : str(this_time), + "weekago" : str(back_week), + "monthago" : str(back_month), + "quarterago" : str(back_quarter), + "yearago" : str(back_year), + "threeyearago" : str(back_3_year)} + + audits = dict() + + logger.debug("Attempting to Load Audits from Auditfile : {}".format(auditfile)) + + try: + if os.path.isfile(auditfile): + if auditfile.endswith(".yaml") is True: + # YAML Parse + with open(auditfile) as yaml_file: + this_audit_config = yaml.safe_load(yaml_file) + elif auditfile.endswith(".json") is True: + with open(auditfile) as json_file: + this_audit_config = json.load(json_file) + elif auditfile.endswith(".ini", -4) is True: + this_audit_config = ConfigParser(time_defaults) + this_audit_config.read(auditfile) + else: + raise ValueError("Unknown File Type : {}".format(auditfile)) + else: + raise NotFounFoundError("Unknown File : {}".format(auditfile)) + + except Exception as parse_exception: + # Error if Parse + logger.error("File {} not parsed because of {}".format(auditfile, parse_exception)) + else: + # It's good so toss that shit in + + if auditfile.endswith(".ini"): + + for section in this_audit_config: + if section not in ["GLOBAL", "DEFAULT"]: + audits[section] = dict() + + # Add Defaults Info + audits[section]["filename"] = auditfile + + # Items this_audit_config + for item in this_audit_config[section]: + onelinethisstuff = "".join(this_audit_config[section][item].splitlines()) + try: + if item == "vuln-long-description": + audits[section][item] = ast.literal_eval("'''{}'''".format(onelinethisstuff)) + else: + audits[section][item] = ast.literal_eval(onelinethisstuff) + except Exception as ast_error: + logger.error("Verification ini style Failed. Use verifyAudits.py for more details") + logger.debug("INI Exception on AST Parsing {}:{} : {}".format(section, item, ast_error)) + + else: + # This is a YAML or JSON file just use it + for section in this_audit_config.keys(): + if isinstance(this_audit_config[section], dict): + + # Add Default Filename and Data + audits[section] = {"filename" : auditfile, **this_audit_config[section]} + + logger.info("File {} Returning {} Audits.".format(auditfile, len(audits.keys()))) + logger.debug("File {} Returning Audits {}".format(auditfile, ",".join(audits.keys()))) + + return audits + +def verifySingleAudit(auditfile): + + ''' + Verifies a Single Already Parsed Audit or Given Auditfile + + Utilizes Load Function for Loading a String. + ''' + + logger = logging.getLogger("verifyAudits.py:verifySingleAudit") + + field_strings = ["vuln-name", "vuln-short-description", "vuln-primary-link", "vuln-long-description"] + field_ints = ["vuln-priority"] + field_ints_optional = ["now", "monthago", "threeyearago", "quarterago", "weekago", "yearago"] + field_dicts = ["vuln-additional-links", "filters", "comparisons"] + field_uncontrolled = ["filename", "jellyfishversion"] + max_fields = field_strings + field_ints + field_dicts + field_uncontrolled + field_ints_optional + required_fields = field_strings + field_ints + field_dicts + + fields_checked = [] + + verified = True + + if isinstance(auditfile, str): + this_audit_config = load_auditfile(auditfile) + audit_file_name = auditfile + elif isinstance(auditfile, dict): + this_audit_config = auditfile + audit_file_name = "digital" + else: + raise TypeError("Unkown Auditfile Type.") + + # Now let's do the Checks + for section in this_audit_config.keys(): + # I let "Global or Default" fly right now + if section not in ["GLOBAL", "DEFAULT"]: + + logger.debug("Validating Audit with Name {}".format(section)) + + ##### Parse Check ######## + filter_object = dict() + comparison_object = dict() + + for item in this_audit_config[section]: + fields_checked.append(item) + + if item in field_strings: + if isinstance(this_audit_config[section][item], str) is False: + logger.error("Issue with file {} audit {} item {} Type is not string.".format(audit_file_name, section, item)) + logger.debug("Type {} / Value {}".format(type(this_audit_config[section][item]), this_audit_config[section][item])) + verified = False + elif item in field_ints: + if isinstance(this_audit_config[section][item], int) is False: + logger.error("Issue with file {} audit {} item {} Type is not int.".format(audit_file_name, section, item)) + logger.debug("Type {} / Value {}".format(type(this_audit_config[section][item]), this_audit_config[section][item])) + verified = False + elif item in field_dicts: + if isinstance(this_audit_config[section][item], dict) is False: + logger.error("Issue with file {} audit {} item {} Type is not dict.".format(audit_file_name, section, item)) + + logger.debug("Type {} / Value {}".format(type(this_audit_config[section][item]), this_audit_config[section][item])) + verified = False + if item == "filters": + filter_object = this_audit_config[section][item] + if item == "comparisons": + comparison_object = this_audit_config[section][item] + elif item in field_uncontrolled: + # Uncontrolled Fields not Controlled + pass + else: + # Auto Error unkown Field + logger.error("Issue with file {} audit {} item {} field is Unknown.".format(audit_file_name, section, item)) + verified = False + + ## Compare buckets + comparison_okay = [bucket for bucket in comparison_object.keys() if bucket not in filter_object.keys()] + filter_okay = [bucket for bucket in comparison_object.keys() if bucket not in filter_object.keys()] + + if len(comparison_okay) > 0 or len(filter_okay) > 0: + + logger.error("Issue with file {} audit {} Mismatch on Filters/Buckets".format(audit_file_name, section)) + logger.debug("Bad filters = {}".format(" , ".join(filter_okay))) + logger.debug("Bad Comparisons = {}".format(", ".join(comparison_okay))) + + verified = False + + ## Check Counts + if len(fields_checked) > len(max_fields) or len(fields_checked) < len(required_fields): + + missing_fields = [field for field in all_fields if field not in required_fields] + extra_fields = [field for field in fields_checked if field not in max_fields] + + logger.error("Issue with file {} audit {} has missing or extra fields.".format(audit_file_name, section)) + logger.debug("Extra Fields = {}".format(" , ".join(extra_fields))) + logger.debug("Missing Fields = {}".format(" , ".join(missing_fields))) + + verified = False + + return verified + +def verifyAudits(CONFIGDIR): + + ''' + Verify All the Audits in the Given Auditdir + ''' + + currently_verified = True + + # Grab all my Audits in CONFIGDIR Stuff + auditfiles = walk_auditd_dir(CONFIGDIR) + + #audits=dict() + for auditfile in auditfiles: + if verifySingleAudit(auditfile) is False: + currently_verified = True + + return currently_verified + + +if __name__ == "__main__": + okay = verifyAudits(CONFIGDIR) + + if okay is True: + LOGGER.info("Audits Okay") + sys.exit(0) + else: + LOGGER.info("Audits checks Failed") + sys.exit(1) diff --git a/rss_creator.py b/rss_creator.py new file mode 100755 index 0000000..2dc7292 --- /dev/null +++ b/rss_creator.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python3 + +import argparse +import os +import os.path +import logging +import re +import json + +import feedparser +import pyjq + +import audittools.audits_usn + + +_known_feeds = {"usn" : {"url" : "https://usn.ubuntu.com/usn/atom.xml", + "subdir" : "ubuntu_usn", + "jq_obj_source_key" : ".title", + "regex_obj_source_key" : r"(USN-\d{1,4}-\d{1,2})", + "update_existing" : False, + "audit_source_obj" : audittools.audits_usn.AuditSourceUSN, + "format" : "json" + } + } + + +if __name__ == "__main__" : + parser = argparse.ArgumentParser() + #parser.add_argument("-v", "--verbose", action='store_true', help="Turn on Verbosity") + parser.add_argument("-v", "--verbose", action='append_const', help="Turn on Verbosity", const=1, default=[]) + parser.add_argument("-b", "--basedir", default=os.getcwd()) + parser.add_argument("-f", "--feed", required=True) + parser.add_argument("-C", "--confirm", action="store_true", default=False) + parser.add_argument("-m", "--max", type=int, default=5) + + args = parser.parse_args() + + VERBOSE = len(args.verbose) + + if VERBOSE == 0: + logging.basicConfig(level=logging.ERROR) + elif VERBOSE == 1: + logging.basicConfig(level=logging.WARNING) + elif VERBOSE == 2: + logging.basicConfig(level=logging.INFO) + else: + logging.basicConfig(level=logging.DEBUG) + + LOGGER = logging.getLogger("audits_usn.py") + + LOGGER.debug("Welcome to Audits RSS Creator.") + + FEED = args.feed + CONFIRM = args.confirm + MAX = args.max + + BASEDIR = args.basedir + + if FEED not in _known_feeds.keys(): + LOGGER.error("Unknown Feed Definition : {}".format(FEED)) + LOGGER.info("Currently Known Feeds {}".format(",".join(_known_feeds.keys()))) + sys.exit(1) + +def feed_create(feed_name, feed_config=None, basedir=None, confirm=False, max_audit=5): + + ''' + Using the Configuration Specified, Query the RSS Feed and Create Audits for Missing + Entries. + ''' + + logger = logging.getLogger("rss_creator.py:feed_create") + + audit_source_items = dict() + + if feed_config is None: + logger.debug("Feed Config Not Given, Choosing {} from Global Config.".format(feed_name)) + feed_config = _known_feeds[feed_name] + + this_path = os.path.join(basedir, feed_config.get("subdir", feed_name)) + + if os.path.isdir(basedir) is False: + # Base Directory Exists + logger.error("Base Path of {} Doesn't Exist.") + + raise FileNotFoundError("Base Path Missing") + + if os.path.isdir(this_path) is False: + + logger.warning("Subdirectory doesn't exist attempting to Create") + + try: + os.mkdir(this_path) + except Exception as subdir_error: + logger.error("Error when creating subdirectory : {}".format(subdir_error)) + + raise subdir_error + + # I have a valid place to Put my Stuff. Let's Grab my URL + try: + feed_obj = feedparser.parse(feed_config["url"]) + except Exception as feed_read_error: + logger.error("Unable to Read RSS Feed Returning Empty") + feed_obj = {"entries" : list()} + + + if len(feed_obj["entries"]) == 0: + logger.warning("No Entries in Given URL.") + else: + # Have Entries Let's give this a whirl + current_num = 0 + + for entry in feed_obj["entries"]: + current_num = current_num + 1 + + best_source_key = None + + if "jq_obj_source_key" in feed_config.keys(): + # I have JQ to Try + jq_result = pyjq.one(feed_config["jq_obj_source_key"], entry) + + if jq_result is not None: + best_source_key = jq_result + + logger.debug("Best Source key After JQ : {}".format(best_source_key)) + + if "regex_obj_source_key" in feed_config.keys(): + + regex_result = re.search(feed_config["regex_obj_source_key"], str(best_source_key), re.I) + + if regex_result is not None: + best_source_key = regex_result.group(1) + + logger.debug("Best Source key After Regex : {}".format(best_source_key)) + + + + if best_source_key is not None and len(best_source_key) > 0: + + as_kwargs = {"source_key" : best_source_key, + "audit_filename" : "{}.{}".format(best_source_key, feed_config["format"]), + "audit_path" : this_path + } + + as_args = list() + + as_obj = feed_config["audit_source_obj"](*as_args, **as_kwargs) + + if as_obj.validate_audit_live() is True: + + # See if File Exists + if as_obj.audit_file_exists() is False: + # Add to Object + if confirm is False: + logger.info("Audit {} File Not Written to {} Confirm not Set.".format(best_source_key, as_obj.audit_filename)) + audit_source_items[best_source_key] = ["False", "Confirm not Set"] + else: + logger.info("Audit {} Writing to {}.".format(best_source_key, as_obj.audit_filename)) + + audit_source_items[best_source_key] = as_obj.write_audit(file_format=feed_config["format"]) + else: + logger.info("Audit File {} Has existing File.".format(best_source_key)) + audit_source_items[best_source_key] = [False, "Pre-Existing File."] + else: + logger.warning("Audit Finding for Source {} Not Valid.".format(best_source_key)) + audit_source_items[best_source_key] = [False, "Invalid Audit on Creation"] + + else: + logger.warning("No Source Key found for Entry : {}".format(entry["id"])) + + + + if max_audit is not None and max_audit != -1 and current_num > (max_audit - 1): + logger.info("Reached Maximum of {} Audits Processed.".format(current_num)) + break + + return audit_source_items + +if __name__ == "__main__" : + + # Run the Thing + results = feed_create(FEED, basedir=BASEDIR, confirm=CONFIRM, max_audit=MAX) + + print(json.dumps(results, indent=2)) diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4202-2.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4202-2.json new file mode 100644 index 0000000..032604b --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4202-2.json @@ -0,0 +1,72 @@ +{ + "USN-4202-2": { + "comparisons": { + "bionic-bucket": { + "comparison-collection-subtype": [ + "thunderbird" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1:68.2.2+build1-0ubuntu0.18.04.1" + ] + }, + "eoan-bucket": { + "comparison-collection-subtype": [ + "thunderbird" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1:68.2.2+build1-0ubuntu0.19.10.1" + ] + } + }, + "filters": { + "bionic-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "bionic" + ] + }, + "eoan-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "eoan" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "LP : 1854150": "https://launchpad.net/bugs/1854150", + "USN-4202-2": "https://usn.ubuntu.com/4202-2/" + }, + "vuln-long-description": "USN-4202-1 fixed vulnerabilities in Thunderbird. After upgrading, Thunderbird\ncreated a new profile for some users. This update fixes the problem.\n\nWe apologize for the inconvenience.\n\nOriginal advisory details:\n\n It was discovered that a specially crafted S/MIME message with an inner\n encryption layer could be displayed as having a valid signature in some\n circumstances, even if the signer had no access to the encrypted message.\n An attacker could potentially exploit this to spoof the message author.\n (CVE-2019-11755)\n \n Multiple security issues were discovered in Thunderbird. If a user were\n tricked in to opening a specially crafted website in a browsing context,\n an attacker could potentially exploit these to cause a denial of service,\n bypass security restrictions, bypass same-origin restrictions, conduct\n cross-site scripting (XSS) attacks, or execute arbitrary code.\n (CVE-2019-11757, CVE-2019-11758, CVE-2019-11759, CVE-2019-11760,\n CVE-2019-11761, CVE-2019-11762, CVE-2019-11763, CVE-2019-11764)\n \n A heap overflow was discovered in the expat library in Thunderbird. If a\n user were tricked in to opening a specially crafted message, an attacker\n could potentially exploit this to cause a denial of service, or execute\n arbitrary code. (CVE-2019-15903)\n\n\nTLDR: After a standard system update you need to restart Thunderbird to make\nall the necessary changes.\n\n", + "vuln-name": "USN-4202-2", + "vuln-primary-link": "https://usn.ubuntu.com/4202-2/", + "vuln-priority": 1, + "vuln-short-description": "USN-4202-1 caused a regression in Thunderbird.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4214-2.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4214-2.json new file mode 100644 index 0000000..fb069df --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4214-2.json @@ -0,0 +1,78 @@ +{ + "USN-4214-2": { + "comparisons": { + "bionic-bucket": { + "comparison-collection-subtype": [ + "librabbitmq4", + "amqp-tools" + ], + "comparison-collection-type": [ + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "0.8.0-1ubuntu0.18.04.2", + "0.8.0-1ubuntu0.18.04.2" + ] + }, + "xenial-bucket": { + "comparison-collection-subtype": [ + "amqp-tools", + "librabbitmq-dev" + ], + "comparison-collection-type": [ + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "0.7.1-1ubuntu0.2", + "0.7.1-1ubuntu0.2" + ] + } + }, + "filters": { + "bionic-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "bionic" + ] + }, + "xenial-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "xenial" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-18609_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-18609.html", + "USN-4214-2": "https://usn.ubuntu.com/4214-2/" + }, + "vuln-long-description": "USN-4214-1 fixed a vulnerability in RabbitMQ. This update provides\nthe corresponding updates for Ubuntu 16.04 LTS and Ubuntu 18.04 LTS.\n\nOriginal advisory details:\n\n It was discovered that RabbitMQ incorrectly handled certain inputs.\n An attacker could possibly use this issue to execute arbitrary code.\n\n\nTLDR: In general, a standard system update will make all the necessary changes.\n\n", + "vuln-name": "USN-4214-2", + "vuln-primary-link": "https://usn.ubuntu.com/4214-2/", + "vuln-priority": 5, + "vuln-short-description": "RabbitMQ could be made to execute arbitrary code if it received\na specially crafted input.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4216-2.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4216-2.json new file mode 100644 index 0000000..19a93d4 --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4216-2.json @@ -0,0 +1,53 @@ +{ + "USN-4216-2": { + "comparisons": { + "xenial-bucket": { + "comparison-collection-subtype": [ + "firefox" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "71.0+build5-0ubuntu0.16.04.1" + ] + } + }, + "filters": { + "xenial-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "xenial" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-11745_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-11745.html", + "CVE-2019-11756_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-11756.html", + "CVE-2019-17005_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17005.html", + "CVE-2019-17008_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17008.html", + "CVE-2019-17010_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17010.html", + "CVE-2019-17011_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17011.html", + "CVE-2019-17012_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17012.html", + "CVE-2019-17013_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17013.html", + "CVE-2019-17014_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17014.html", + "USN-4216-2": "https://usn.ubuntu.com/4216-2/" + }, + "vuln-long-description": "USN-4216-1 fixed vulnerabilities in Firefox. This update provides the\ncorresponding update for Ubuntu 16.04 LTS.\n\nOriginal advisory details:\n\n Multiple security issues were discovered in Firefox. If a user were\n tricked in to opening a specially crafted website, an attacker could\n potentially exploit these to cause a denial of service, obtain sensitive\n information, or execute arbitrary code.\n\n\nTLDR: After a standard system update you need to restart Firefox to make\nall the necessary changes.\n\n", + "vuln-name": "USN-4216-2", + "vuln-primary-link": "https://usn.ubuntu.com/4216-2/", + "vuln-priority": 5, + "vuln-short-description": "Firefox could be made to crash or run programs as your login if it\nopened a malicious website.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4217-2.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4217-2.json new file mode 100644 index 0000000..8b96dd8 --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4217-2.json @@ -0,0 +1,49 @@ +{ + "USN-4217-2": { + "comparisons": { + "trusty-bucket": { + "comparison-collection-subtype": [ + "libsmbclient", + "samba" + ], + "comparison-collection-type": [ + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "2:4.3.11+dfsg-0ubuntu0.14.04.20+esm4", + "2:4.3.11+dfsg-0ubuntu0.14.04.20+esm4" + ] + } + }, + "filters": { + "trusty-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "trusty" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-14861_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14861.html", + "CVE-2019-14870_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14870.html", + "USN-4217-2": "https://usn.ubuntu.com/4217-2/" + }, + "vuln-long-description": "USN-4217-1 fixed several vulnerabilities in Samba. This update provides\nthe corresponding update for Ubuntu 14.04 ESM.\n\nOriginal advisory details:\n\n Andreas Oster discovered that the Samba DNS management server incorrectly\n handled certain records. An authenticated attacker could possibly use this\n issue to crash Samba, resulting in a denial of service. (CVE-2019-14861)\n\n Isaac Boukris discovered that Samba did not enforce the Kerberos\n DelegationNotAllowed feature restriction, contrary to expectations.\n (CVE-2019-14870)\n\n\nTLDR: In general, a standard system update will make all the necessary changes.\n\n", + "vuln-name": "USN-4217-2", + "vuln-primary-link": "https://usn.ubuntu.com/4217-2/", + "vuln-priority": 5, + "vuln-short-description": "Several security issues were fixed in Samba.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4219-1.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4219-1.json new file mode 100644 index 0000000..1e369f5 --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4219-1.json @@ -0,0 +1,126 @@ +{ + "USN-4219-1": { + "comparisons": { + "bionic-bucket": { + "comparison-collection-subtype": [ + "libssh-4" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "0.8.0~20170825.94fa1e38-1ubuntu0.5" + ] + }, + "disco-bucket": { + "comparison-collection-subtype": [ + "libssh-4" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "0.8.6-3ubuntu0.3" + ] + }, + "eoan-bucket": { + "comparison-collection-subtype": [ + "libssh-4" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "0.9.0-1ubuntu1.3" + ] + }, + "xenial-bucket": { + "comparison-collection-subtype": [ + "libssh-4" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "0.6.3-4.3ubuntu0.5" + ] + } + }, + "filters": { + "bionic-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "bionic" + ] + }, + "disco-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "disco" + ] + }, + "eoan-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "eoan" + ] + }, + "xenial-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "xenial" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-14889_(Unknown)": null, + "USN-4219-1": "https://usn.ubuntu.com/4219-1/" + }, + "vuln-long-description": "It was discovered that libssh incorrectly handled certain scp commands. If\na user or automated system were tricked into using a specially-crafted scp\ncommand, a remote attacker could execute arbitrary commands on the server.\n\n\nTLDR: In general, a standard system update will make all the necessary changes.\n\n", + "vuln-name": "USN-4219-1", + "vuln-primary-link": "https://usn.ubuntu.com/4219-1/", + "vuln-priority": 1, + "vuln-short-description": "libssh could be made to run programs under certain conditions.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4220-1.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4220-1.json new file mode 100644 index 0000000..ddce74a --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4220-1.json @@ -0,0 +1,134 @@ +{ + "USN-4220-1": { + "comparisons": { + "bionic-bucket": { + "comparison-collection-subtype": [ + "git" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1:2.17.1-1ubuntu0.5" + ] + }, + "disco-bucket": { + "comparison-collection-subtype": [ + "git" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1:2.20.1-2ubuntu1.19.04.1" + ] + }, + "eoan-bucket": { + "comparison-collection-subtype": [ + "git" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1:2.20.1-2ubuntu1.19.10.1" + ] + }, + "xenial-bucket": { + "comparison-collection-subtype": [ + "git" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1:2.7.4-0ubuntu1.7" + ] + } + }, + "filters": { + "bionic-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "bionic" + ] + }, + "disco-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "disco" + ] + }, + "eoan-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "eoan" + ] + }, + "xenial-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "xenial" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-1348_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-1348.html", + "CVE-2019-1349_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-1349.html", + "CVE-2019-1350_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-1350.html", + "CVE-2019-1351_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-1351.html", + "CVE-2019-1352_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-1352.html", + "CVE-2019-1353_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-1353.html", + "CVE-2019-1354_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-1354.html", + "CVE-2019-1387_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-1387.html", + "CVE-2019-19604_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19604.html", + "USN-4220-1": "https://usn.ubuntu.com/4220-1/" + }, + "vuln-long-description": "Joern Schneeweisz and Nicolas Joly discovered that Git contained various\nsecurity flaws. An attacker could possibly use these issues to overwrite\narbitrary paths, execute arbitrary code, and overwrite files in the .git\ndirectory.\n\n\nTLDR: In general, a standard system update will make all the necessary changes.\n\n", + "vuln-name": "USN-4220-1", + "vuln-primary-link": "https://usn.ubuntu.com/4220-1/", + "vuln-priority": 5, + "vuln-short-description": "Several security issues were fixed in Git.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4221-1.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4221-1.json new file mode 100644 index 0000000..8e46979 --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4221-1.json @@ -0,0 +1,126 @@ +{ + "USN-4221-1": { + "comparisons": { + "bionic-bucket": { + "comparison-collection-subtype": [ + "libpcap0.8" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1.8.1-6ubuntu1.18.04.1" + ] + }, + "disco-bucket": { + "comparison-collection-subtype": [ + "libpcap0.8" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1.8.1-6ubuntu1.19.04.1" + ] + }, + "trusty-bucket": { + "comparison-collection-subtype": [ + "libpcap0.8" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1.5.3-2ubuntu0.1" + ] + }, + "xenial-bucket": { + "comparison-collection-subtype": [ + "libpcap0.8" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1.7.4-2ubuntu0.1" + ] + } + }, + "filters": { + "bionic-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "bionic" + ] + }, + "disco-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "disco" + ] + }, + "trusty-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "trusty" + ] + }, + "xenial-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "xenial" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-15165_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-15165.html", + "USN-4221-1": "https://usn.ubuntu.com/4221-1/" + }, + "vuln-long-description": "It was discovered that libpcap did not properly validate PHB headers in\nsome situations. An attacker could use this to cause a denial of service\n(memory exhaustion).\n\n\nTLDR: In general, a standard system update will make all the necessary changes.\n\n", + "vuln-name": "USN-4221-1", + "vuln-primary-link": "https://usn.ubuntu.com/4221-1/", + "vuln-priority": 5, + "vuln-short-description": "Applications using libpcap could be made to crash if given specially\ncrafted data.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4222-1.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4222-1.json new file mode 100644 index 0000000..e7aabba --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4222-1.json @@ -0,0 +1,65 @@ +{ + "USN-4222-1": { + "comparisons": { + "xenial-bucket": { + "comparison-collection-subtype": [ + "graphicsmagick", + "libgraphicsmagick-q16-3", + "libgraphicsmagick++-q16-12" + ], + "comparison-collection-type": [ + "packages", + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1.3.23-1ubuntu0.3", + "1.3.23-1ubuntu0.3", + "1.3.23-1ubuntu0.3" + ] + } + }, + "filters": { + "xenial-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "xenial" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2017-11638_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-11638.html", + "CVE-2017-11641_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-11641.html", + "CVE-2017-11642_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-11642.html", + "CVE-2017-11643_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-11643.html", + "CVE-2017-12935_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-12935.html", + "CVE-2017-12936_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-12936.html", + "CVE-2017-12937_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-12937.html", + "CVE-2017-13063_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-13063.html", + "CVE-2017-13064_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-13064.html", + "CVE-2017-13065_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-13065.html", + "CVE-2017-13134_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-13134.html", + "CVE-2017-13737_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-13737.html", + "CVE-2017-13775_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-13775.html", + "CVE-2017-13776_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-13776.html", + "CVE-2017-13777_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-13777.html", + "USN-4222-1": "https://usn.ubuntu.com/4222-1/" + }, + "vuln-long-description": "It was discovered that GraphicsMagick incorrectly handled certain image files.\nAn attacker could possibly use this issue to cause a denial of service or other\nunspecified impact.\n\n\nTLDR: In general, a standard system update will make all the necessary changes.\n\n", + "vuln-name": "USN-4222-1", + "vuln-primary-link": "https://usn.ubuntu.com/4222-1/", + "vuln-priority": 5, + "vuln-short-description": "Several security issues were fixed in GraphicsMagick.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4223-1.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4223-1.json new file mode 100644 index 0000000..b0ccc30 --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4223-1.json @@ -0,0 +1,180 @@ +{ + "USN-4223-1": { + "comparisons": { + "bionic-bucket": { + "comparison-collection-subtype": [ + "openjdk-11-jdk", + "openjdk-11-jre", + "openjdk-11-jre-zero", + "openjdk-11-jre-headless" + ], + "comparison-collection-type": [ + "packages", + "packages", + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "11.0.5+10-0ubuntu1.1~18.04", + "11.0.5+10-0ubuntu1.1~18.04", + "11.0.5+10-0ubuntu1.1~18.04", + "11.0.5+10-0ubuntu1.1~18.04" + ] + }, + "disco-bucket": { + "comparison-collection-subtype": [ + "openjdk-11-jdk", + "openjdk-11-jre", + "openjdk-11-jre-zero", + "openjdk-11-jre-headless" + ], + "comparison-collection-type": [ + "packages", + "packages", + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "11.0.5+10-0ubuntu1.1~19.04", + "11.0.5+10-0ubuntu1.1~19.04", + "11.0.5+10-0ubuntu1.1~19.04", + "11.0.5+10-0ubuntu1.1~19.04" + ] + }, + "eoan-bucket": { + "comparison-collection-subtype": [ + "openjdk-11-jdk", + "openjdk-11-jre", + "openjdk-11-jre-zero", + "openjdk-11-jre-headless" + ], + "comparison-collection-type": [ + "packages", + "packages", + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "11.0.5+10-0ubuntu1.1", + "11.0.5+10-0ubuntu1.1", + "11.0.5+10-0ubuntu1.1", + "11.0.5+10-0ubuntu1.1" + ] + }, + "xenial-bucket": { + "comparison-collection-subtype": [ + "openjdk-8-jre-zero", + "openjdk-8-jre-jamvm", + "openjdk-8-jdk", + "openjdk-8-jre-headless", + "openjdk-8-jre" + ], + "comparison-collection-type": [ + "packages", + "packages", + "packages", + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "8u232-b09-0ubuntu1~16.04.1", + "8u232-b09-0ubuntu1~16.04.1", + "8u232-b09-0ubuntu1~16.04.1", + "8u232-b09-0ubuntu1~16.04.1", + "8u232-b09-0ubuntu1~16.04.1" + ] + } + }, + "filters": { + "bionic-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "bionic" + ] + }, + "disco-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "disco" + ] + }, + "eoan-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "eoan" + ] + }, + "xenial-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "xenial" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-2894_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2894.html", + "CVE-2019-2945_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2945.html", + "CVE-2019-2949_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2949.html", + "CVE-2019-2962_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2962.html", + "CVE-2019-2964_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2964.html", + "CVE-2019-2973_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2973.html", + "CVE-2019-2975_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2975.html", + "CVE-2019-2977_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2977.html", + "CVE-2019-2978_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2978.html", + "CVE-2019-2981_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2981.html", + "CVE-2019-2983_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2983.html", + "CVE-2019-2987_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2987.html", + "CVE-2019-2988_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2988.html", + "CVE-2019-2989_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2989.html", + "CVE-2019-2992_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2992.html", + "CVE-2019-2999_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2999.html", + "USN-4223-1": "https://usn.ubuntu.com/4223-1/" + }, + "vuln-long-description": "Jan Jancar, Petr Svenda, and Vladimir Sedlacek discovered that a side-\nchannel vulnerability existed in the ECDSA implementation in OpenJDK. An\nAttacker could use this to expose sensitive information. (CVE-2019-2894)\n\nIt was discovered that the Socket implementation in OpenJDK did not\nproperly restrict the creation of subclasses with a custom Socket\nimplementation. An attacker could use this to specially create a Java class\nthat could possibly bypass Java sandbox restrictions. (CVE-2019-2945)\n\nRob Hamm discovered that the Kerberos implementation in OpenJDK did not\nproperly handle proxy credentials. An attacker could possibly use this to\nimpersonate another user. (CVE-2019-2949)\n\nIt was discovered that a NULL pointer dereference existed in the font\nhandling implementation in OpenJDK. An attacker could use this to cause a\ndenial of service (application crash). (CVE-2019-2962)\n\nIt was discovered that the Concurrency subsystem in OpenJDK did not\nproperly bound stack consumption when compiling regular expressions. An\nattacker could use this to cause a denial of service (application crash).\n(CVE-2019-2964)\n\nIt was discovered that the JAXP subsystem in OpenJDK did not properly\nhandle XPath expressions in some situations. An attacker could use this to\ncause a denial of service (application crash). (CVE-2019-2973,\nCVE-2019-2981)\n\nIt was discovered that the Nashorn JavaScript subcomponent in OpenJDK did\nnot properly handle regular expressions in some situations. An attacker\ncould use this to cause a denial of service (application crash).\n(CVE-2019-2975)\n\nIt was discovered that the String class in OpenJDK contained an out-of-\nbounds access vulnerability. An attacker could use this to cause a denial\nof service (application crash) or possibly expose sensitive information.\nThis issue only affected OpenJDK 11 in Ubuntu 18.04 LTS, Ubuntu 19.04,\nand Ubuntu 19.10. (CVE-2019-2977)\n\nIt was discovered that the Jar URL handler in OpenJDK did not properly\nhandled nested Jar URLs in some situations. An attacker could use this to\ncause a denial of service (application crash). (CVE-2019-2978)\n\nIt was discovered that the Serialization component of OpenJDK did not\nproperly handle deserialization of certain object attributes. An attacker\ncould use this to cause a denial of service (application crash).\n(CVE-2019-2983)\n\nIt was discovered that the FreetypeFontScaler class in OpenJDK did not\nproperly validate dimensions of glyph bitmap images read from font files.\nAn attacker could specially craft a font file that could cause a denial of\nservice (application crash). (CVE-2019-2987)\n\nIt was discovered that a buffer overflow existed in the SunGraphics2D class\nin OpenJDK. An attacker could possibly use this to cause a denial of\nservice (excessive memory consumption or application crash).\n(CVE-2019-2988)\n\nIt was discovered that the Networking component in OpenJDK did not properly\nhandle certain responses from HTTP proxies. An attacker controlling a\nmalicious HTTP proxy could possibly use this to inject content into a\nproxied HTTP connection. (CVE-2019-2989)\n\nIt was discovered that the font handling implementation in OpenJDK did not\nproperly validate TrueType font files in some situations. An attacker could\nspecially craft a font file that could cause a denial of service (excessive\nmemory consumption). (CVE-2019-2992)\n\nIt was discovered that the JavaDoc generator in OpenJDK did not properly\nfilter out some HTML elements properly, including documentation comments in\nJava source code. An attacker could possibly use this to craft a Cross-Site\nScripting attack. (CVE-2019-2999)\n\n\nTLDR: This update uses a new upstream release, which includes additional bug\nfixes. After a standard system update you need to restart any Java\napplications or applets to make all the necessary changes.\n\n", + "vuln-name": "USN-4223-1", + "vuln-primary-link": "https://usn.ubuntu.com/4223-1/", + "vuln-priority": 5, + "vuln-short-description": "Several security issues were fixed in OpenJDK.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4224-1.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4224-1.json new file mode 100644 index 0000000..40d8349 --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4224-1.json @@ -0,0 +1,138 @@ +{ + "USN-4224-1": { + "comparisons": { + "bionic-bucket": { + "comparison-collection-subtype": [ + "python3-django", + "python-django" + ], + "comparison-collection-type": [ + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1:1.11.11-1ubuntu1.6", + "1:1.11.11-1ubuntu1.6" + ] + }, + "disco-bucket": { + "comparison-collection-subtype": [ + "python3-django", + "python-django" + ], + "comparison-collection-type": [ + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1:1.11.20-1ubuntu0.3", + "1:1.11.20-1ubuntu0.3" + ] + }, + "eoan-bucket": { + "comparison-collection-subtype": [ + "python3-django", + "python-django" + ], + "comparison-collection-type": [ + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1:1.11.22-1ubuntu1.1", + "1:1.11.22-1ubuntu1.1" + ] + }, + "xenial-bucket": { + "comparison-collection-subtype": [ + "python3-django", + "python-django" + ], + "comparison-collection-type": [ + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1.8.7-1ubuntu5.11", + "1.8.7-1ubuntu5.11" + ] + } + }, + "filters": { + "bionic-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "bionic" + ] + }, + "disco-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "disco" + ] + }, + "eoan-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "eoan" + ] + }, + "xenial-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "xenial" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-19844_(High)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19844.html", + "USN-4224-1": "https://usn.ubuntu.com/4224-1/" + }, + "vuln-long-description": "Simon Charette discovered that the password reset functionality in\nDjango used a Unicode case insensitive query to retrieve accounts\nassociated with an email address. An attacker could possibly use this\nto obtain password reset tokens and hijack accounts.\n\n\nTLDR: In general, a standard system update will make all the necessary changes.\n\n", + "vuln-name": "USN-4224-1", + "vuln-primary-link": "https://usn.ubuntu.com/4224-1/", + "vuln-priority": 7, + "vuln-short-description": "Django accounts could be hijacked through password reset requests.\n" + } +} \ No newline at end of file diff --git a/verifyAudits.py b/verifyAudits.py deleted file mode 100755 index d75c738..0000000 --- a/verifyAudits.py +++ /dev/null @@ -1,187 +0,0 @@ -#!/usr/bin/env python3 - -''' -Copyright 2018, VDMS -Licensed under the terms of the BSD 2-clause license. See LICENSE file for terms. -''' - -# verifyAudits.py - Designed to verify that all of our audit files -# are good to go. - - -# Run through Analysis -import os - -import ast -import argparse -import sys -from time import time - -from configparser import ConfigParser - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument("-a", "--auditdir", help="Directory that Contains the audits", required=True) - #parser.add_argument("-c", "--config", help="Main analyze.ini file", required=True) - parser._optionals.title = "DESCRIPTION " - - # Parser Args - args = parser.parse_args() - - # Massage Configdir to not include trailing / - if args.auditdir[-1] == "/" : - CONFIGDIR=args.auditdir[0:-1] - else : - CONFIGDIR=args.auditdir - - # - #CONFIG=args.config - -def verifySingleAudit(auditfile) : - - field_strings = [ "vuln-name", "vuln-short-description", "vuln-primary-link", "vuln-long-description" ] - field_ints = [ "vuln-priority", "now", "monthago", "threeyearago", "quarterago", "weekago", "yearago" ] - field_dicts = [ "vuln-additional-links", "filters", "comparisons" ] - all_fields = field_strings + field_ints + field_dicts - total_fields = len(all_fields) - - fields_checked = [] - - verified=True - - # Config Defaults - this_time=int(time()) - back_week=this_time-604800 - back_month=this_time-2628000 - back_quarter=this_time-7844000 - back_year=this_time-31540000 - back_3_year=this_time-94610000 - time_defaults={ "now" : str(this_time), "weekago" : str(back_week), "monthago" : str(back_month), "quarterago" : str(back_quarter), "yearago" : str(back_year), "threeyearago" : str(back_3_year) } - - try: - # Try to Parse - this_audit_config = ConfigParser(time_defaults) - this_audit_config.read(auditfile) - except Exception as e: - # Error if Parse - print("File ", auditfile, " not paresed because of " , format(e)) - verified=False - else: - # It's good so toss that shit in - for section in this_audit_config : - # I let "Global or Default" fly right now - if section not in ["GLOBAL", "DEFAULT"] : - - ##### Parse Check ######## - filter_object = dict() - comparison_object = dict() - - for item in this_audit_config[section]: - fields_checked.append(item) - #print(item) - onelinethisstuff = "".join(this_audit_config[section][item].splitlines()) - try: - if item == "vuln-long-description" : - parsed = ast.literal_eval("'''{}'''".format(onelinethisstuff)) - else: - parsed = ast.literal_eval(onelinethisstuff) - except Exception as e: - print("Issue with file ", auditfile, " when attempting to parse", item, "Error ", str(e), onelinethisstuff) - verified=False - - if item in field_strings : - if type(parsed) is not str: - print("Issue with file ", auditfile, " when attempting to parse", item, " Type is not String but instead " , type(parsed) ) - verified=False - elif item in field_ints : - if type(parsed) is not int : - print("Issue with file ", auditfile, " when attempting to parse", item, " Type is not Int but instead " , type(parsed) ) - verified=False - elif item in field_dicts : - if type(parsed) is not dict : - print("Issue with file ", auditfile, " when attempting to parse", item, " Type is not dict but instead " , type(parsed) ) - verified=False - if item is "filters" : - filter_object = parsed - if item is "comparisons" : - comparison_object = parsed - else : - # Auto Error unkown Field - print("Issue with file ", auditfile, " when attempting to parse", item, " Unkown Field ") - verified=False - - ## Compare buckets - comparison_okay = [ bucket for bucket in comparison_object.keys() if bucket not in filter_object.keys() ] - filter_okay = [ bucket for bucket in comparison_object.keys() if bucket not in filter_object.keys() ] - - if len(comparison_okay) > 0 or len(filter_okay) > 0 : - print("Issue with file ", auditfile, " Comparison and bucket items. Bad filters = ", str(filter_okay), " Bad comparisons ", str(comparison_okay) ) - verified=False - - ## Check Counts - if len(fields_checked) != total_fields : - missing_fields = [ field for field in all_fields if field not in fields_checked ] - extra_fields = [ field for field in fields_checked if field not in all_fields ] - - print("Issue with file ", auditfile, "Field Error(s). Extra Fields : " , str(extra_fields), " ; Missing Fields : ", str(missing_fields) ) - verified=False - - return verified - -def verifyAudits(CONFIGDIR, CONFIG=None): - - currently_verified=True - - # Config Defaults - this_time=int(time()) - back_week=this_time-604800 - back_month=this_time-2628000 - back_quarter=this_time-7844000 - back_year=this_time-31540000 - back_3_year=this_time-94610000 - time_defaults={ "now" : str(this_time), "weekago" : str(back_week), "monthago" : str(back_month), "quarterago" : str(back_quarter), "yearago" : str(back_year), "threeyearago" : str(back_3_year) } - - ''' - try: - # Read Our INI with our data collection rules - config = ConfigParser(time_defaults) - config.read(CONFIG) - # Debug - #for i in config : - #for key in config[i] : - #print (i, "-", key, ":", config[i][key]) - except Exception as e: # pylint: disable=broad-except, invalid-name - print("Bad configuration file") - currently_verified=False - ''' - - - # Grab all my Audits in CONFIGDIR Stuff - auditfiles = [] - for (dirpath, dirnames, filenames) in os.walk(CONFIGDIR) : - for singlefile in filenames : - onefile = dirpath + "/" + singlefile - #print(singlefile.find(".ini", -4)) - if singlefile.find(".ini", -4) > 0 : - # File ends with .ini Last 4 chars - auditfiles.append(onefile) - - # Parse the Dicts - #audits=dict() - for auditfile in auditfiles : - if verifySingleAudit(auditfile) == False: - currently_verified=True - - return currently_verified - - -if __name__ == "__main__": - okay = verifyAudits(CONFIGDIR) - - if okay == True : - print("Audits Okay") - sys.exit(0) - else : - print("Audits checks Failed") - sys.exit(1) - \ No newline at end of file From c4d9c8dfb8ec37c212bdeb86d3b93252f1b2a40b Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Mon, 6 Jan 2020 17:14:41 -0800 Subject: [PATCH 007/143] Adding these as I'm about to refactor this to make OVAL only. --- audittools/audits_rhsa.py | 338 ++++++++++++++++++++++++++++++++++++++ audittools/redhat_cve.py | 199 ++++++++++++++++++++++ 2 files changed, 537 insertions(+) create mode 100755 audittools/audits_rhsa.py create mode 100755 audittools/redhat_cve.py diff --git a/audittools/audits_rhsa.py b/audittools/audits_rhsa.py new file mode 100755 index 0000000..cd2d898 --- /dev/null +++ b/audittools/audits_rhsa.py @@ -0,0 +1,338 @@ +#!/usr/bin/env python3 + +''' +audits_rhsa.py + +Given a Single RHSA let's create an manowar audit that represents that. +''' + +import argparse +import json +import logging +import os +import os.path +import time +import re +import packaging.version + + +from urllib.parse import urljoin + +import requests + +if __name__ == "__main__": + from audit_source import AuditSource + from redhat_cve import mowCVERedHat +else: + from audittools.audit_source import AuditSource + from audittools.redhat_cve import mowCVERedHat + + + +if __name__ == "__main__" : + parser = argparse.ArgumentParser() + #parser.add_argument("-v", "--verbose", action='store_true', help="Turn on Verbosity") + parser.add_argument("-v", "--verbose", action='append_const', help="Turn on Verbosity", const=1, default=[]) + parser.add_argument("-r", "--rhsa", required=True) + + args = parser.parse_args() + + VERBOSE = len(args.verbose) + + if VERBOSE == 0: + logging.basicConfig(level=logging.ERROR) + elif VERBOSE == 1: + logging.basicConfig(level=logging.WARNING) + elif VERBOSE == 2: + logging.basicConfig(level=logging.INFO) + else: + logging.basicConfig(level=logging.DEBUG) + + RHSA = args.rhsa + + LOGGER = logging.getLogger("audits_rhsa.py") + + LOGGER.debug("Welcome to Audits RHSA.") + +class AuditSourceRHSA(AuditSource): + + ''' + Implements a Public AuditSource object for Ubuntu Security Notices + ''' + + __rhsa_regex = r"[Rr][Hh][Ss][Aa]-\d{4}\:\d{1,6}" + __redhat_security_endpoint = "https://access.redhat.com/hydra/rest/securitydata/cvrf/" + + __epoch_regex = "^(\d)\:" + __el_regex = "^(\S+)\.el(\d{0,2})" + + + def __init__(self, **kwargs): + + # Run My Parent Init Function + AuditSource.__init__(self, **kwargs) + + # Confirm I have a USN + if re.match(self.__rhsa_regex, self.source_key) is None: + raise ValueError("Source Key Doesn't look like a RHSA {}".format(str(self.source_key))) + + self.rhsa_filters = dict() + self.rhsa_comparisons = dict() + + self.rhsa_data = self.get_rhsa_data() + + self.populate_audit() + + def populate_audit(self): + + ''' + Assuming I have my USN Data as a Dictionary, Let's populate the self.audit_data & self.audit_name items from my + parent function. + ''' + + self.audit_name = self.rhsa_data["cvrfdoc"]["document_title"] + + + self.audit_data = {**self.audit_data, + "vuln-name" : self.audit_name, + "vuln-primary-link" : self.rhsa_data["cvrfdoc"]["document_references"]["reference"][0]["url"], + "vuln-additional-links" : self.rhsa_data["reference_links"], + "vuln-short-description" : self.rhsa_data["cvrfdoc"]["document_notes"]["note"][0], + "vuln-priority" : self.rhsa_data["highest_priority"], + "filters": self.rhsa_filters, + "comparisons": self.rhsa_comparisons} + + self.audit_data["vuln-long-description"] = "\n\n".join(self.rhsa_data["cvrfdoc"]["document_notes"]["note"]) + + return + + def get_rhsa_data(self): + + ''' + Turn the RHSA to an Audit Dictionary of type {"usn-number" : } + ''' + + if self.source_key is None: + raise ValueError("Unknown USN") + + endpoint = "{}{}.json".format(self.__redhat_security_endpoint, self.source_key) + rhsa_data = dict() + + + try: + + self.logger.debug("Requesting {} URL of {}".format(self.source_key, endpoint)) + response = requests.get(endpoint) + + except Exception as get_hrsa_url_error: + self.logger.error("Error when Requesting data for RHSA {}".format(self.source_key)) + self.logger.info("Error for RHSA Request : {}".format(get_hrsa_url_error)) + else: + if response.status_code == requests.codes.ok: + # Good Data + rhsa_data = response.json() + + elif response.status_code == 404: + self.logger.warning("RHSA {} Not found on Red Hat Site.".format(self.source_key)) + else: + self.logger.error("RHSA {} unable to Query for RHSA Recieved {}".format(self.source_key, response.status_code)) + finally: + + self.logger.info(rhsa_data) + + rhsa_data["reference_links"] = dict() + rhsa_data["cves"] = list() + + # Document Links Population + for this_reference in rhsa_data["cvrfdoc"]["document_references"]["reference"]: + rhsa_data["reference_links"][this_reference["description"]] = this_reference["url"] + + # Documentation Links in Vulnerability + # Also Do the Buckets/Packages + + comparisons = dict() + filters = dict() + + highest_priority = 1 + + for this_vuln in rhsa_data["cvrfdoc"]["vulnerability"]: + + for this_reference in this_vuln["references"].get("reference", list()): + rhsa_data["reference_links"][this_reference["description"]] = this_reference["url"] + + if "cve" in this_vuln.keys(): + this_cve = mowCVERedHat(cve=this_vuln["cve"]) + + self.logger.debug(this_cve.rh_cust_package_fixed) + + if self.source_key in this_cve.rh_cust_package_fixed.keys(): + for package_string in this_cve.rh_cust_package_fixed[self.source_key]: + + try: + cve_split = self.break_package_release(package_string, extended=False) + except Exception as cve_package_parse_error: + self.logger.error("From CVE {} Found Package {} that has a parse error.".format(this_cve.cve_id, + package_string)) + self.logger.debug("From CVE {} Found Package {} that has a parse error {}.".format(this_cve.cve_id, + package_string)) + else: + self.insert_into_matrix(**cve_split) + + + for this_product_vuln in this_vuln["product_statuses"]["status"]["product_id"]: + + if this_vuln["product_statuses"]["status"]["type"] == "Fixed": + # This is a Fixed Vuln + + try: + i_split = self.break_package_release(this_product_vuln, extended=True) + except Exception as parse_error: + self.logger.error("Not parsing {} as I ran into an issue.".format(this_product_vuln)) + self.logger.info("Error {} when parsing {}".format(parse_error, this_product_vuln)) + else: + # I split it well let's put it in the buckets/keys + + self.insert_into_matrix(**i_split) + + else: + self.logger.warning("Ignoring Product {} as it's not listed as fixed.".format(this_product_vuln)) + + + this_prioirty = this_vuln.get("threats", dict()).get("ordinal", 0) + if this_prioirty > highest_priority: + highest_priority = this_priority + + rhsa_data["filters"] = filters + rhsa_data["comparisons"] = comparisons + rhsa_data["highest_priority"] = highest_priority + + return rhsa_data + + def insert_into_matrix(self, bucket_name=None, release_number=None, package=None, version=None, **kwargs): + + ''' + Insert a Package into the comparisons and filters + ''' + + self.logger.debug("{}, {}, {}, {}".format(bucket_name, release_number, package, version)) + + if bucket_name not in self.rhsa_filters.keys(): + # Add my Items + self.rhsa_filters[bucket_name] = {"filter-collection-type" : ["os_family", "os_release"], + "filter-collection-subtype" : ["default", "default"], + "filter-match-value" : ["RedHat", release_number], + "filter-match" : "is"} + + self.rhsa_comparisons[bucket_name] = {"comparison-collection-type" : list(), + "comparison-collection-subtype" : list(), + "comparison-match-value" : list(), + "comparison-match" : "aptge"} + + if package in self.rhsa_comparisons[bucket_name]["comparison-collection-subtype"]: + # Duplicate Package + package_index = self.rhsa_comparisons[bucket_name]["comparison-collection-subtype"].index(package) + + + current_version = packaging.version.parse(self.rhsa_comparisons[bucket_name]["comparison-match-value"][package_index]) + new_version = packaging.version.parse(version) + + if current_version > new_version: + self.logger.debug("Replacing definition for {} in bucket {}".format(package, bucket_name)) + self.rhsa_comparisons[bucket_name]["comparison-match-value"][package_index] = version + else: + self.logger.info("Duplicate definition for {} in bucket {} ignored.".format(package, bucket_name)) + else: + # New Add + self.logger.debug("Adding definition in bucket {} for package {} & version {}".format(bucket_name, package, version)) + + self.rhsa_comparisons[bucket_name]["comparison-collection-type"].append("packages") + self.rhsa_comparisons[bucket_name]["comparison-collection-subtype"].append(package) + self.rhsa_comparisons[bucket_name]["comparison-match-value"].append(version) + + return + + def break_package_release(self, package_text, extended=True): + + ''' + Takes a package text like : 7Server-7.6.EUS:kpatch-patch-3_10_0-957_38_1-0:1-3.el7 or kernel-2.6.32-754.24.2.el6 + And breaks it down into it's component pices. Giving you a Bucket, Package & Version that should be "consistent" + ''' + + self.logger.debug("Package Text {}".format(package_text)) + + if extended is True: + application_stream = package_text.split(":")[0] + + self.logger.debug("Ignoring Application Stream data of {}".format(application_stream)) + + fixed_product = ":".join(package_text.split(":")[1:]) + else: + + application_stream = "ns" + fixed_product = package_text + + product_regex = re.match(self.__el_regex, str(fixed_product)) + + if product_regex is not None: + release_number = int(product_regex.group(2)) + package_n_version = product_regex.group(1) + + self.logger.debug("Found Package for Release {} : {}".format(release_number, package_n_version)) + + # Split Package Name from Version + pnv_array = package_n_version.split("-") + + for chunk_index in range(1, len(pnv_array)-1): + if pnv_array[chunk_index][0].isdigit(): + # This is the chunk that starts the version + package = "-".join(pnv_array[:chunk_index]) + full_version = "-".join(pnv_array[chunk_index:]) + best_version = full_version + + + if re.match(self.__epoch_regex, full_version) is not None: + best_version = full_version.split(":")[1] + epoch = full_version.split(":")[0] + else: + epoch = None + + if len(best_version.split("-")) == 2: + version_release = best_version.split("-")[1] + best_version = best_version.split("-")[0] + else: + version_release = None + + break + + + + bucket_name = "{}-bucket".format(release_number) + + + return_data = {"application_stream" : application_stream, + "bucket_name" : bucket_name, + "package" : package, + "version" : best_version, + "full_version" : full_version, + "release_number" : release_number, + "version_release" : version_release, + "epoch" : epoch} + + return return_data + + + +if __name__ == "__main__" : + + my_rhsa = AuditSourceRHSA(source_key=RHSA) + + #validated = my_usn.validate_audit_live() + + #LOGGER.info("validated : {}".format(validated)) + + print(json.dumps(my_rhsa.return_audit(), indent=2, sort_keys=True)) + + + + + diff --git a/audittools/redhat_cve.py b/audittools/redhat_cve.py new file mode 100755 index 0000000..c67f6e6 --- /dev/null +++ b/audittools/redhat_cve.py @@ -0,0 +1,199 @@ +#!/usr/bin/env python3 + +''' +A new Ubuntu CVE different than shuttlefish. Utilizes Launchpad data to grab Ubuntu +CVE Data +''' + +import time +import logging +import argparse +import re +import datetime +import json + +from urllib.parse import urljoin +from configparser import ConfigParser + +import cvss +import cpe +import requests + +# Library doesn't exist +# import capec +if __name__ in ["__main__", "ubuntu_cve", "redhat_cve"]: + from cve_class import mowCVE +else: + from audittools.cve_class import mowCVE + +if __name__ == "__main__" : + parser = argparse.ArgumentParser() + #parser.add_argument("-v", "--verbose", action='store_true', help="Turn on Verbosity") + parser.add_argument("-v", "--verbose", action='append_const', help="Turn on Verbosity", const=1, default=[]) + parser.add_argument("-c", "--cve", required=True) + + args = parser.parse_args() + + VERBOSE = len(args.verbose) + + if VERBOSE == 0: + logging.basicConfig(level=logging.ERROR) + elif VERBOSE == 1: + logging.basicConfig(level=logging.WARNING) + elif VERBOSE == 2: + logging.basicConfig(level=logging.INFO) + else: + logging.basicConfig(level=logging.DEBUG) + + CVE = args.cve + + LOGGER = logging.getLogger("redhat_cve.py") + + LOGGER.debug("Welcome to RedHat CVE.") + +class mowCVERedHat(mowCVE): + + ''' + Red Hat CVE Class that Updates mowCVE with Data from CVE + ''' + + # Case Insensitive + #__okay_severities = ["unknown", "none", "low", "medium", "high", "critical"] + + __redhat_security_endpoint = "https://access.redhat.com/hydra/rest/securitydata/cve/" + __redhat_cve_hr = "https://access.redhat.com/security/cve" + + def __init__(self, cve=None, **kwargs): + + ''' + Initialze a Holder for CVE Things + ''' + + if cve is None: + raise ValueError("CVE ID Required") + + mowCVE.__init__(self, cve=cve, **kwargs) + + ''' + self.description = kwargs.get("description", None) + self.title = kwargs.get("title", None) + self.cvss2 = cvss.CVSS2(kwargs.get("cvss2", None)) + self.cvss3 = cvss.CVSS3(kwargs.get("cvss3", None)) + self.severity_override = kwargs.get("severity_override", None) + self.score_override = kwargs.get("score_override", None) + self.cpe_list = [cpe.CPE(indv_cpe) for indv_cpe in kwargs.get("cpe_list", list())] + self.capec_list = kwargs.get("capec_list", list()) + self.references = kwargs.get("references", dict()) + self.primary_reference = kwargs.get("primary_reference", None) + self.last_updated = kwargs.get("last_updated", None) + self.published = kwargs.get("published", None) + + # Updated Now! + self.self_updated = int(time.time()) + + # Audit Items + self.filters = kwargs.get("bucket_def", {}) + self.comparisons = kwargs.get("comparisons", {}) + ''' + + # Default Custom Thing + self.rh_cust_package_fixed = dict() + + + # Don't Run When Testing + if kwargs.get("test", False) is False: + self.pull_rh_cve() + + def pull_rh_cve(self): + + ''' + Reach out, Grab the CVE Data and Parse it + ''' + + rh_endpoint = urljoin(self.__redhat_security_endpoint, "{}.json".format(self.cve_id)) + + self.logger.debug("RH API Call : {}".format(rh_endpoint)) + + parsed_data = None + + try: + response = requests.get(rh_endpoint) + except Exception as url_error: + self.logger.error("Unable to Query Red Hat Data for CVE : {}".format(self.cve_id)) + self.logger.debug("Query Error for CVE {} : {}".format(self.cve_id, url_error)) + else: + + if response.status_code == requests.codes.ok: + # Good Data + parsed_data = response.json() + + self.enhance_cve(parsed_cve_data=parsed_data, rh_url=rh_endpoint) + + elif response.status_code == 404: + self.logger.warning("CVE {} Not found on Red Hat Site.".format(self.cve_id)) + else: + self.logger.error("CVE {} unable to Query for CVE Recieved {}".format(self.cve_id, response.status_code)) + finally: + pass + + + + def enhance_cve(self, parsed_cve_data=None, rh_url=None): + + ''' + Takes the parsed Data and Updates all the various bits + ''' + + self.title = parsed_cve_data["name"] + + self.description = "\n\n".join(parsed_cve_data["details"]) + + if "cvss3" in parsed_cve_data.keys(): + self.cvss3 = cvss.CVSS3(parsed_cve_data["cvss3"]["cvss3_scoring_vector"]) + + if "cwe" in parsed_cve_data.keys(): + self.cwe_list = parsed_cve_data["cwe"].split("->") + + + readable_url = urljoin(self.__redhat_cve_hr, self.cve_id) + + self.references = {"Red Hat {}".format(self.cve_id) : readable_url, + "{} API".format(self.cve_id) : rh_url} + + + self.primary_reference = readable_url + + if "bugzilla" in parsed_cve_data.keys(): + self.references["RH Bugzilla {}".format(parsed_cve_data["bugzilla"]["id"])] = parsed_cve_data["bugzilla"]["url"] + + try: + updated_date = datetime.datetime.strptime(parsed_cve_data["public_date"], "%Y-%m-%dT%H:%M:%SZ") + except Exception as date_error: + self.logger.warning("Unable to Read date of {}".format(parsed_cve_data["publicdate"])) + self.logger.debug("Date Error {}".format(date_error)) + else: + self.published = int(updated_date.timestamp()) + + for package in [*parsed_cve_data.get("package_state", list()), *parsed_cve_data.get("affected_release", list())]: + try: + self.cpe_list.append(cpe.CPE(package["cpe"])) + except Exception as cpe_error: + self.logger.error("CPE Error {} with CPE {}".format(cpe_error, package["cpe"])) + else: + if "package" in package.keys() and "advisory" in package.keys(): + self.logger.debug("Found package fix for package {} and advisory {}".format(package["package"], + package["advisory"])) + + if package["advisory"] not in self.rh_cust_package_fixed.keys(): + self.rh_cust_package_fixed[package["advisory"]] = list() + + self.rh_cust_package_fixed[package["advisory"]].append(package["package"]) + + + +if __name__ == "__main__" : + + my_usn = mowCVERedHat(cve=CVE) + + print(json.dumps(my_usn.summarize(), sort_keys=True, indent=2, default=str)) + From 1111d434886fb0be5684bda1f6ed4b46cdce269e Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Tue, 7 Jan 2020 14:51:04 -0800 Subject: [PATCH 008/143] Updates to Travis Configuration * More incoming this is likely still broken. --- .travis.yml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index 710c0fa..dc5fd00 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,9 @@ language: python +cache: pip +python: + - 3.7 sudo: required +dist: xenial notifications: email: recipients: @@ -7,15 +11,14 @@ notifications: on_success: always on_failure: always before_script: -- sudo apt-get -y install openssh-server - "./travis/pre_build.sh" - "./travis/ssh_setup.sh" - export DEBIAN_FRONTEND=noninteractive; sudo -E apt-get -y install python3-pip python-dev - shellcheck mariadb-server mariadb-client jq openjdk-7-jre graphviz + shellcheck mariadb-server mariadb-client jq openjdk-7-jre graphviz openssh-server - "./travis/db_setup.sh" - sudo pip3 install --upgrade setuptools -- sudo pip3 install bandit pylint paramiko pymysql colorama flask flask_cors jsonschema - lxml pika mkdocs requests +- sudo pip3 install -r requirements.txt +- sudo pip3 install mkdocs - gem install mdl script: - "./travis/testing.sh" From 0cbb9a7182cadcbedb03adfe59e49f8ea82db413 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Tue, 7 Jan 2020 14:51:29 -0800 Subject: [PATCH 009/143] Modernizing Audittools Modernizing Audittools. --- audittools/audit_source.py | 11 +- audittools/audits_rhsa.py | 192 +++++++++++----- audittools/audits_usn.py | 33 +-- audittools/cve_class.py | 29 +++ audittools/ubuntu_cve.py | 12 +- audittools/verifyAudits.py | 7 + .../redhat_rhsa/RHSA-2020:0027.json | 64 ++++++ .../redhat_rhsa/RHSA-2020:0028.json | 62 +++++ .../audittools.d/ubuntu_usn/USN-4225-1.json | 161 +++++++++++++ .../audittools.d/ubuntu_usn/USN-4226-1.json | 183 +++++++++++++++ .../audittools.d/ubuntu_usn/USN-4227-1.json | 217 ++++++++++++++++++ .../audittools.d/ubuntu_usn/USN-4227-2.json | 61 +++++ .../audittools.d/ubuntu_usn/USN-4228-1.json | 118 ++++++++++ .../audittools.d/ubuntu_usn/USN-4228-2.json | 100 ++++++++ 14 files changed, 1164 insertions(+), 86 deletions(-) create mode 100644 travis/artifacts/audits.d/audittools.d/redhat_rhsa/RHSA-2020:0027.json create mode 100644 travis/artifacts/audits.d/audittools.d/redhat_rhsa/RHSA-2020:0028.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4225-1.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4226-1.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4227-1.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4227-2.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4228-1.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4228-2.json diff --git a/audittools/audit_source.py b/audittools/audit_source.py index 81db986..8790e47 100644 --- a/audittools/audit_source.py +++ b/audittools/audit_source.py @@ -14,7 +14,10 @@ import requests import yaml -from audittools.verifyAudits import verifySingleAudit +if __name__ == "__main__" or __name__ == "audit_source": + from verifyAudits import verifySingleAudit +else: + from audittools.verifyAudits import verifySingleAudit class AuditSource: @@ -154,9 +157,3 @@ def write_audit(self, file_format="json"): written = [True, "File written to {}".format(audit_file)] return written - - - - - - diff --git a/audittools/audits_rhsa.py b/audittools/audits_rhsa.py index cd2d898..50a9cf1 100755 --- a/audittools/audits_rhsa.py +++ b/audittools/audits_rhsa.py @@ -62,6 +62,7 @@ class AuditSourceRHSA(AuditSource): __rhsa_regex = r"[Rr][Hh][Ss][Aa]-\d{4}\:\d{1,6}" __redhat_security_endpoint = "https://access.redhat.com/hydra/rest/securitydata/cvrf/" + __redhat_oval_endpoint = "https://access.redhat.com/hydra/rest/securitydata/oval/" __epoch_regex = "^(\d)\:" __el_regex = "^(\S+)\.el(\d{0,2})" @@ -80,6 +81,7 @@ def __init__(self, **kwargs): self.rhsa_comparisons = dict() self.rhsa_data = self.get_rhsa_data() + self.oval_data = self.get_oval_data() self.populate_audit() @@ -90,7 +92,7 @@ def populate_audit(self): parent function. ''' - self.audit_name = self.rhsa_data["cvrfdoc"]["document_title"] + self.audit_name = self.source_key self.audit_data = {**self.audit_data, @@ -162,51 +164,101 @@ def get_rhsa_data(self): if "cve" in this_vuln.keys(): this_cve = mowCVERedHat(cve=this_vuln["cve"]) + rhsa_data["RHCVE : {}".format(this_cve.cve_id)] = this_cve.primary_reference - self.logger.debug(this_cve.rh_cust_package_fixed) + for this_product_vuln in this_vuln["product_statuses"]["status"]["product_id"]: - if self.source_key in this_cve.rh_cust_package_fixed.keys(): - for package_string in this_cve.rh_cust_package_fixed[self.source_key]: + this_prioirty = this_vuln.get("threats", dict()).get("ordinal", 0) + if this_prioirty > highest_priority: + highest_priority = this_priority - try: - cve_split = self.break_package_release(package_string, extended=False) - except Exception as cve_package_parse_error: - self.logger.error("From CVE {} Found Package {} that has a parse error.".format(this_cve.cve_id, - package_string)) - self.logger.debug("From CVE {} Found Package {} that has a parse error {}.".format(this_cve.cve_id, - package_string)) - else: - self.insert_into_matrix(**cve_split) + rhsa_data["highest_priority"] = highest_priority + return rhsa_data - for this_product_vuln in this_vuln["product_statuses"]["status"]["product_id"]: + def get_oval_data(self): - if this_vuln["product_statuses"]["status"]["type"] == "Fixed": - # This is a Fixed Vuln + ''' + Gets the OVAL data for this Finding + ''' + + endpoint = "{}{}.json".format(self.__redhat_oval_endpoint, self.source_key) + + oval_data = {"has_oval" : False} + + try: + self.logger.debug("Requesting {} URL of {}".format(self.source_key, endpoint)) + response = requests.get(endpoint) + except Exception as get_oval_url_error: + self.logger.error("Error when Requesting OVAL data for RHSA {}".format(self.source_key)) + self.logger.info("Error for RHSA OVAL Request : {}".format(get_hrsa_url_error)) + else: + if response.status_code == requests.codes.ok: + # Good Data + oval_data["data"] = response.json() + if oval_data["data"].get("message", None) == "Not Found": + oval_data["has_oval"] = False + self.logger.warning("RHSA {} has no OVAL data for this valid RHSA.".format(self.source_key)) + else: + oval_data["has_oval"] = True + + elif response.status_code == 404: + self.logger.warning("RHSA {} has no OVAL data.".format(self.source_key)) + else: + self.logger.error("RHSA {} unable to Query for RHSA Recieved {}".format(self.source_key, response.status_code)) + + finally: - try: - i_split = self.break_package_release(this_product_vuln, extended=True) - except Exception as parse_error: - self.logger.error("Not parsing {} as I ran into an issue.".format(this_product_vuln)) - self.logger.info("Error {} when parsing {}".format(parse_error, this_product_vuln)) - else: - # I split it well let's put it in the buckets/keys + if oval_data["has_oval"] is True: - self.insert_into_matrix(**i_split) + self.logger.debug("RHSA {} has Oval Data.".format(self.source_key)) + # " packages -> releases -> comparisons " + versions_matrix = dict() + + for oval_comparison in oval_data["data"]["oval_definitions"]["states"].get("rpminfo_state", list()): + self.logger.debug("Found oval comparison thing for Comparsion named : {}".format(oval_comparison["id"])) + versions_matrix[oval_comparison["id"]] = oval_comparison + + if "evr" not in oval_comparison: + # It's really just useless to me. + versions_matrix[oval_comparison["id"]]["isversion"] = False else: - self.logger.warning("Ignoring Product {} as it's not listed as fixed.".format(this_product_vuln)) + versions_matrix[oval_comparison["id"]]["isversion"] = True + package_matrix = dict() - this_prioirty = this_vuln.get("threats", dict()).get("ordinal", 0) - if this_prioirty > highest_priority: - highest_priority = this_priority + for package_obj in oval_data["data"]["oval_definitions"]["objects"].get("rpminfo_object", list()): + self.logger.debug("Found oval for RPM Package named : {}".format(package_obj["name"])) + package_matrix[package_obj["id"]] = package_obj - rhsa_data["filters"] = filters - rhsa_data["comparisons"] = comparisons - rhsa_data["highest_priority"] = highest_priority + # Testing Comprehension + for oval_test in oval_data["data"]["oval_definitions"]["tests"].get("rpminfo_test", list()): + self.logger.debug("Found oval for Test Case : {}".format(oval_test["id"])) - return rhsa_data + test_id = oval_test["id"] + tested_thing_id = oval_test["object"]["object_ref"] + case_covered_id = oval_test["state"]["state_ref"] + + if versions_matrix[case_covered_id]["isversion"] is True: + # We Use it + self.logger.debug("Adding Comparison for OVAL ID {}".format(test_id)) + i_split = self.break_package_release(extended=False, + package_name=package_matrix[tested_thing_id]["name"], + package_version=versions_matrix[case_covered_id]["evr"]) + + self.logger.debug("ISplit Found : {}".format(i_split)) + + # I have my Split let's add it + self.insert_into_matrix(**i_split) + + else: + self.logger.debug("Oval Test {} Isn't useful to us. Ignoring.".format(test_id)) + + else: + self.logger.debug("RHSA {} has no Oval Data but is a valid RHSA".format(self.source_key)) + + return oval_data def insert_into_matrix(self, bucket_name=None, release_number=None, package=None, version=None, **kwargs): @@ -251,7 +303,7 @@ def insert_into_matrix(self, bucket_name=None, release_number=None, package=None return - def break_package_release(self, package_text, extended=True): + def break_package_release(self, package_text=None, extended=True, **kwargs): ''' Takes a package text like : 7Server-7.6.EUS:kpatch-patch-3_10_0-957_38_1-0:1-3.el7 or kernel-2.6.32-754.24.2.el6 @@ -267,48 +319,60 @@ def break_package_release(self, package_text, extended=True): fixed_product = ":".join(package_text.split(":")[1:]) else: - application_stream = "ns" + # This will be none if I've given explicit package name and package version information fixed_product = package_text - product_regex = re.match(self.__el_regex, str(fixed_product)) + if kwargs.get("package_name", None) is None and kwargs.get("package_version", None) is None: + # I have a package_text with package-version + product_regex = re.match(self.__el_regex, str(fixed_product)) - if product_regex is not None: - release_number = int(product_regex.group(2)) - package_n_version = product_regex.group(1) + if product_regex is not None: + release_number = int(product_regex.group(2)) + package_n_version = product_regex.group(1) - self.logger.debug("Found Package for Release {} : {}".format(release_number, package_n_version)) + self.logger.debug("Found Package for Release {} : {}".format(release_number, package_n_version)) - # Split Package Name from Version - pnv_array = package_n_version.split("-") + # Split Package Name from Version + pnv_array = package_n_version.split("-") - for chunk_index in range(1, len(pnv_array)-1): - if pnv_array[chunk_index][0].isdigit(): - # This is the chunk that starts the version - package = "-".join(pnv_array[:chunk_index]) - full_version = "-".join(pnv_array[chunk_index:]) - best_version = full_version + for chunk_index in range(1, len(pnv_array)-1): + if pnv_array[chunk_index][0].isdigit(): + # This is the chunk that starts the version + package = "-".join(pnv_array[:chunk_index]) + full_version = "-".join(pnv_array[chunk_index:]) + else: + # I was given package and version split + package = kwargs["package_name"] + product_regex = re.match(self.__el_regex, str(kwargs["package_version"])) - if re.match(self.__epoch_regex, full_version) is not None: - best_version = full_version.split(":")[1] - epoch = full_version.split(":")[0] - else: - epoch = None + if product_regex is not None: + release_number = int(product_regex.group(2)) + # Since I was given it split, I don't have to worry about understanding the package + # vs. Version split. I can go straight to full_version + full_version = product_regex.group(1) - if len(best_version.split("-")) == 2: - version_release = best_version.split("-")[1] - best_version = best_version.split("-")[0] - else: - version_release = None - break + # Okay Now let's handle Version stuff. + best_version = full_version + # Let's see if I have an epoch, if so let's handle that + if re.match(self.__epoch_regex, full_version) is not None: + best_version = full_version.split(":")[1] + epoch = full_version.split(":")[0] + else: + epoch = None + # Strip out Release Information if it exists + if len(best_version.split("-")) == 2: + version_release = best_version.split("-")[1] + best_version = best_version.split("-")[0] + else: + version_release = None bucket_name = "{}-bucket".format(release_number) - return_data = {"application_stream" : application_stream, "bucket_name" : bucket_name, "package" : package, @@ -326,11 +390,15 @@ def break_package_release(self, package_text, extended=True): my_rhsa = AuditSourceRHSA(source_key=RHSA) - #validated = my_usn.validate_audit_live() + if my_rhsa.oval_data["has_oval"] is True: + + validated = my_rhsa.validate_audit_live() - #LOGGER.info("validated : {}".format(validated)) + LOGGER.info("validated : {}".format(validated)) - print(json.dumps(my_rhsa.return_audit(), indent=2, sort_keys=True)) + print(json.dumps(my_rhsa.return_audit(), indent=2, sort_keys=True)) + else: + print(json.dumps({"no_audit" : True}, indent=2, sort_keys=True)) diff --git a/audittools/audits_usn.py b/audittools/audits_usn.py index 228896e..c14321b 100755 --- a/audittools/audits_usn.py +++ b/audittools/audits_usn.py @@ -16,8 +16,12 @@ import requests -import audittools.audit_source -import audittools.ubuntu_cve +if __name__ == "__main__": + from audit_source import AuditSource + from ubuntu_cve import mowCVEUbuntu +else: + from audittools.audit_source import AuditSource + from audittools.ubuntu_cve import mowCVEUbuntu @@ -54,7 +58,7 @@ LOGGER.debug("Welcome to Audits USN.") -class AuditSourceUSN(audittools.audit_source.AuditSource): +class AuditSourceUSN(AuditSource): ''' Implements a Public AuditSource object for Ubuntu Security Notices @@ -69,7 +73,7 @@ class AuditSourceUSN(audittools.audit_source.AuditSource): def __init__(self, **kwargs): # Run My Parent Init Function - audittools.audit_source.AuditSource.__init__(self, **kwargs) + AuditSource.__init__(self, **kwargs) self.cachefile = kwargs.get("cachefile", self.__default_cachefile) self.cacheage = kwargs.get("cacheage", self.__default_cacheage) @@ -150,7 +154,7 @@ def get_usn_data(self): for cve_string in usn_data["cves"]: try: - this_cve_obj = audittools.ubuntu_cve.mowCVEUbuntu(cve=cve_string) + this_cve_obj = mowCVEUbuntu(cve=cve_string) except ValueError as cve_parse_error: self.logger.warning("Ignoring CVE of : {}".format(cve_string)) @@ -193,15 +197,18 @@ def cal_bucket_defs(self): bucket_name = "{}-bucket".format(this_release) - filters[bucket_name] = {"filter-collection-type" : ["os", "release"], - "filter-collection-subtype" : ["default", "default"], - "filter-match-value" : ["Ubuntu", this_release], - "filter-match" : "is"} + if bucket_name not in filters.keys(): - comparisons[bucket_name] = {"comparison-collection-type" : list(), - "comparison-collection-subtype" : list(), - "comparison-match-value" : list(), - "comparison-match" : "aptge"} + # Populate Blank Buckets/Comparisons + filters[bucket_name] = {"filter-collection-type" : ["os", "release"], + "filter-collection-subtype" : ["default", "default"], + "filter-match-value" : ["Ubuntu", this_release], + "filter-match" : "is"} + + comparisons[bucket_name] = {"comparison-collection-type" : list(), + "comparison-collection-subtype" : list(), + "comparison-match-value" : list(), + "comparison-match" : "aptge"} for package in self.usn_data["releases"][this_release]["binaries"].keys(): # For Each Package populate it's relevant comparison diff --git a/audittools/cve_class.py b/audittools/cve_class.py index 70ce374..fc31a26 100644 --- a/audittools/cve_class.py +++ b/audittools/cve_class.py @@ -72,6 +72,7 @@ def __init__(self, cve=None, **kwargs): self.score_override = kwargs.get("score_override", None) self.cpe_list = [cpe.CPE(indv_cpe) for indv_cpe in kwargs.get("cpe_list", list())] self.capec_list = kwargs.get("capec_list", list()) + self.cwe_list = kwargs.get("cwe_list", list()) self.references = kwargs.get("references", dict()) self.primary_reference = kwargs.get("primary_reference", None) self.last_updated = kwargs.get("last_updated", None) @@ -84,6 +85,34 @@ def __init__(self, cve=None, **kwargs): self.filters = kwargs.get("bucket_def", {}) self.comparisons = kwargs.get("comparisons", {}) + def summarize(self): + + ''' + Give a Dict Summarization + ''' + + the_thing = {"cve_id" : self.cve_id, + "title" : self.title, + "description" : self.description, + "severity_override" : self.severity_override, + "score_override" : self.score_override, + "cpe_list" : [plat.as_uri_2_3() for plat in self.cpe_list], + "capec_list" : self.capec_list, + "cwe_list" : self.cwe_list, + "references" : self.references, + "primary_reference" : self.primary_reference, + "last_updated" : self.last_updated, + "published" : self.published, + "self_updated" : self.self_updated + } + + if self.cvss2 is not None: + the_thing["cvss2"] = self.cvss2.clean_vector() + if self.cvss3 is not None: + the_thing["cvss3"] = self.cvss3.clean_vector() + + return the_thing + def get_severity(self): ''' diff --git a/audittools/ubuntu_cve.py b/audittools/ubuntu_cve.py index 0175325..2ed58f6 100755 --- a/audittools/ubuntu_cve.py +++ b/audittools/ubuntu_cve.py @@ -10,6 +10,7 @@ import argparse import re import datetime +import json from urllib.parse import urljoin from configparser import ConfigParser @@ -20,7 +21,10 @@ # Library doesn't exist # import capec -import audittools.cve_class +if __name__ in ["__main__", "ubuntu_cve", "redhat_cve"]: + from cve_class import mowCVE +else: + from audittools.cve_class import mowCVE if __name__ == "__main__" : parser = argparse.ArgumentParser() @@ -47,7 +51,7 @@ LOGGER.debug("Welcome to Ubuntu CVE.") -class mowCVEUbuntu(audittools.cve_class.mowCVE): +class mowCVEUbuntu(mowCVE): ''' Ubuntu CVE Class that Updates mowCVE with Data from CVE @@ -67,7 +71,7 @@ def __init__(self, cve=None, **kwargs): if cve is None: raise ValueError("CVE ID Required") - audittools.cve_class.mowCVE.__init__(self, cve=cve, **kwargs) + mowCVE.__init__(self, cve=cve, **kwargs) ''' self.description = kwargs.get("description", None) @@ -241,7 +245,7 @@ def enhance_cve(self, parsed_cve_data=None, ubuntu_url=None): my_usn = mowCVEUbuntu(cve=CVE) - print(my_usn.comparisons) + print(json.dumps(my_usn.summarize(), sort_keys=True, indent=2)) #print(my_usn.get_severity()) #print(my_usn.best_numeric_score()) diff --git a/audittools/verifyAudits.py b/audittools/verifyAudits.py index 871ef81..8a776a4 100755 --- a/audittools/verifyAudits.py +++ b/audittools/verifyAudits.py @@ -246,6 +246,13 @@ def verifySingleAudit(auditfile): verified = False ## Compare buckets + total_buckets_check = [bucket for bucket in comparison_object.keys()] + + if len(total_buckets_check) == 0: + logger.error("Issue with file {} audit {} 0 Buckets Defined.".format(audit_file_name, section)) + logger.debug("No Buckets defined in Comparisons.") + verified = False + comparison_okay = [bucket for bucket in comparison_object.keys() if bucket not in filter_object.keys()] filter_okay = [bucket for bucket in comparison_object.keys() if bucket not in filter_object.keys()] diff --git a/travis/artifacts/audits.d/audittools.d/redhat_rhsa/RHSA-2020:0027.json b/travis/artifacts/audits.d/audittools.d/redhat_rhsa/RHSA-2020:0027.json new file mode 100644 index 0000000..2a0cb9a --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/redhat_rhsa/RHSA-2020:0027.json @@ -0,0 +1,64 @@ +{ + "RHSA-2020:0027": { + "comparisons": { + "7-bucket": { + "comparison-collection-subtype": [ + "kpatch-patch-3_10_0-1062_1_1", + "kpatch-patch-3_10_0-1062_4_1", + "kpatch-patch-3_10_0-1062_1_2", + "kpatch-patch-3_10_0-1062", + "kpatch-patch-3_10_0-1062_4_2", + "kpatch-patch-3_10_0-1062_4_3" + ], + "comparison-collection-type": [ + "packages", + "packages", + "packages", + "packages", + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1", + "1", + "1", + "1", + "1", + "1" + ] + } + }, + "filters": { + "7-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os_family", + "os_release" + ], + "filter-match": "is", + "filter-match-value": [ + "RedHat", + 7 + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-14821": "https://access.redhat.com/security/cve/CVE-2019-14821", + "CVE-2019-15239": "https://access.redhat.com/security/cve/CVE-2019-15239", + "bz#1746708: CVE-2019-14821 Kernel: KVM: OOB memory access via mmio ring buffer": "https://bugzilla.redhat.com/show_bug.cgi?id=1746708", + "bz#1747353: CVE-2019-15239 kernel: local attacker can trigger multiple use-after-free conditions results in privilege escalation": "https://bugzilla.redhat.com/show_bug.cgi?id=1747353", + "https://access.redhat.com/errata/RHSA-2020:0027": "https://access.redhat.com/errata/RHSA-2020:0027", + "https://access.redhat.com/security/updates/classification/#important": "https://access.redhat.com/security/updates/classification/#important" + }, + "vuln-long-description": "An update for kpatch-patch is now available for Red Hat Enterprise Linux 7.\nRed Hat Product Security has rated this update as having a security impact of Important. A Common Vulnerability Scoring System (CVSS) base score, which gives a detailed severity rating, is available for each vulnerability from the CVE link(s) in the References section.\n\nThis is a kernel live patch module which is automatically loaded by the RPM post-install script to modify the code of a running kernel.\nSecurity fix(es):\n* Kernel: KVM: OOB memory access via mmio ring buffer (CVE-2019-14821)\n* kernel: local attacker can trigger multiple use-after-free conditions results in privilege escalation (CVE-2019-15239)\nFor more details about the security issue(s), including the impact, a CVSS score, acknowledgements, and other related information, refer to the CVE page(s) listed in the References section.\n\nPlease see https://www.redhat.com/footer/terms-of-use.html", + "vuln-name": "RHSA-2020:0027", + "vuln-primary-link": "https://access.redhat.com/errata/RHSA-2020:0027", + "vuln-priority": 1, + "vuln-short-description": "An update for kpatch-patch is now available for Red Hat Enterprise Linux 7.\nRed Hat Product Security has rated this update as having a security impact of Important. A Common Vulnerability Scoring System (CVSS) base score, which gives a detailed severity rating, is available for each vulnerability from the CVE link(s) in the References section." + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/redhat_rhsa/RHSA-2020:0028.json b/travis/artifacts/audits.d/audittools.d/redhat_rhsa/RHSA-2020:0028.json new file mode 100644 index 0000000..63d2a59 --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/redhat_rhsa/RHSA-2020:0028.json @@ -0,0 +1,62 @@ +{ + "RHSA-2020:0028": { + "comparisons": { + "7-bucket": { + "comparison-collection-subtype": [ + "kpatch-patch-3_10_0-1062", + "kpatch-patch-3_10_0-1062_1_2", + "kpatch-patch-3_10_0-1062_4_1", + "kpatch-patch-3_10_0-1062_1_1" + ], + "comparison-collection-type": [ + "packages", + "packages", + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1", + "1", + "1", + "1" + ] + } + }, + "filters": { + "7-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os_family", + "os_release" + ], + "filter-match": "is", + "filter-match-value": [ + "RedHat", + 7 + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2018-12207": "https://access.redhat.com/security/cve/CVE-2018-12207", + "CVE-2019-11135": "https://access.redhat.com/security/cve/CVE-2019-11135", + "bz#1646768: CVE-2018-12207 hw: Machine Check Error on Page Size Change (IFU)": "https://bugzilla.redhat.com/show_bug.cgi?id=1646768", + "bz#1753062: CVE-2019-11135 hw: TSX Transaction Asynchronous Abort (TAA)": "https://bugzilla.redhat.com/show_bug.cgi?id=1753062", + "https://access.redhat.com/errata/RHSA-2020:0028": "https://access.redhat.com/errata/RHSA-2020:0028", + "https://access.redhat.com/security/updates/classification/#important": "https://access.redhat.com/security/updates/classification/#important", + "https://access.redhat.com/security/vulnerabilities/ifu-page-mce": "https://access.redhat.com/security/vulnerabilities/ifu-page-mce", + "https://access.redhat.com/solutions/tsx-asynchronousabort": "https://access.redhat.com/solutions/tsx-asynchronousabort", + "https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00210.html": "https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00210.html", + "https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00270.html": "https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00270.html" + }, + "vuln-long-description": "An update for kpatch-patch is now available for Red Hat Enterprise Linux 7.\nRed Hat Product Security has rated this update as having a security impact of Important. A Common Vulnerability Scoring System (CVSS) base score, which gives a detailed severity rating, is available for each vulnerability from the CVE link(s) in the References section.\n\nThis is a kernel live patch module which is automatically loaded by the RPM post-install script to modify the code of a running kernel.\nSecurity Fix(es):\n* hw: Machine Check Error on Page Size Change (IFU) (CVE-2018-12207)\n* hw: TSX Transaction Asynchronous Abort (TAA) (CVE-2019-11135)\nFor more details about the security issue(s), including the impact, a CVSS score, acknowledgments, and other related information, refer to the CVE page(s) listed in the References section.\n\nPlease see https://www.redhat.com/footer/terms-of-use.html", + "vuln-name": "RHSA-2020:0028", + "vuln-primary-link": "https://access.redhat.com/errata/RHSA-2020:0028", + "vuln-priority": 1, + "vuln-short-description": "An update for kpatch-patch is now available for Red Hat Enterprise Linux 7.\nRed Hat Product Security has rated this update as having a security impact of Important. A Common Vulnerability Scoring System (CVSS) base score, which gives a detailed severity rating, is available for each vulnerability from the CVE link(s) in the References section." + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4225-1.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4225-1.json new file mode 100644 index 0000000..57e222e --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4225-1.json @@ -0,0 +1,161 @@ +{ + "USN-4225-1": { + "comparisons": { + "bionic-bucket": { + "comparison-collection-subtype": [ + "linux-image-5.3.0-1009-azure", + "linux-image-5.3.0-1010-gcp", + "linux-image-azure-edge", + "linux-image-gcp-edge" + ], + "comparison-collection-type": [ + "packages", + "packages", + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "5.3.0-1009.10~18.04.1", + "5.3.0-1010.11~18.04.1", + "5.3.0.1009.9", + "5.3.0.1010.10" + ] + }, + "eoan-bucket": { + "comparison-collection-subtype": [ + "linux-image-5.3.0-26-generic", + "linux-image-5.3.0-1009-kvm", + "linux-image-5.3.0-1011-gcp", + "linux-image-oracle", + "linux-image-azure", + "linux-image-5.3.0-26-lowlatency", + "linux-image-5.3.0-1009-aws", + "linux-image-5.3.0-1015-raspi2", + "linux-image-virtual", + "linux-image-5.3.0-1009-azure", + "linux-image-snapdragon", + "linux-image-gke", + "linux-image-5.3.0-26-snapdragon", + "linux-image-generic", + "linux-image-5.3.0-1008-oracle", + "linux-image-aws", + "linux-image-kvm", + "linux-image-raspi2", + "linux-image-generic-lpae", + "linux-image-gcp", + "linux-image-5.3.0-26-generic-lpae", + "linux-image-lowlatency" + ], + "comparison-collection-type": [ + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "5.3.0-26.28", + "5.3.0-1009.10", + "5.3.0-1011.12", + "5.3.0.1008.9", + "5.3.0.1009.27", + "5.3.0-26.28", + "5.3.0-1009.10", + "5.3.0-1015.17", + "5.3.0.26.30", + "5.3.0-1009.10", + "5.3.0.26.30", + "5.3.0.1011.12", + "5.3.0-26.28", + "5.3.0.26.30", + "5.3.0-1008.9", + "5.3.0.1009.11", + "5.3.0.1009.11", + "5.3.0.1015.12", + "5.3.0.26.30", + "5.3.0.1011.12", + "5.3.0-26.28", + "5.3.0.26.30" + ] + } + }, + "filters": { + "bionic-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "bionic" + ] + }, + "eoan-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "eoan" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-14895_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14895.html", + "CVE-2019-14896_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14896.html", + "CVE-2019-14897_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14897.html", + "CVE-2019-14901_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14901.html", + "CVE-2019-16231_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-16231.html", + "CVE-2019-18660_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-18660.html", + "CVE-2019-18813_(Unknown)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-18813.html", + "CVE-2019-19044_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19044.html", + "CVE-2019-19045_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19045.html", + "CVE-2019-19047_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19047.html", + "CVE-2019-19051_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19051.html", + "CVE-2019-19052_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19052.html", + "CVE-2019-19055_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19055.html", + "CVE-2019-19072_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19072.html", + "CVE-2019-19524_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19524.html", + "CVE-2019-19529_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19529.html", + "CVE-2019-19534_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19534.html", + "CVE-2019-19807_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19807.html", + "USN-4225-1": "https://usn.ubuntu.com/4225-1/" + }, + "vuln-long-description": "It was discovered that a heap-based buffer overflow existed in the Marvell\nWiFi-Ex Driver for the Linux kernel. A physically proximate attacker could\nuse this to cause a denial of service (system crash) or possibly execute\narbitrary code. (CVE-2019-14895, CVE-2019-14901)\n\nIt was discovered that a heap-based buffer overflow existed in the Marvell\nLibertas WLAN Driver for the Linux kernel. A physically proximate attacker\ncould use this to cause a denial of service (system crash) or possibly\nexecute arbitrary code. (CVE-2019-14896, CVE-2019-14897)\n\nIt was discovered that the Fujitsu ES network device driver for the Linux\nkernel did not properly check for errors in some situations, leading to a\nNULL pointer dereference. A local attacker could use this to cause a denial\nof service. (CVE-2019-16231)\n\nAnthony Steinhauser discovered that the Linux kernel did not properly\nperform Spectre_RSB mitigations to all processors for PowerPC architecture\nsystems in some situations. A local attacker could use this to expose\nsensitive information. (CVE-2019-18660)\n\nIt was discovered that the Broadcom V3D DRI driver in the Linux kernel did\nnot properly deallocate memory in certain error conditions. A local\nattacker could possibly use this to cause a denial of service (kernel\nmemory exhaustion). (CVE-2019-19044)\n\nIt was discovered that the Mellanox Technologies Innova driver in the Linux\nkernel did not properly deallocate memory in certain failure conditions. A\nlocal attacker could use this to cause a denial of service (kernel memory\nexhaustion). (CVE-2019-19045)\n\nIt was discovered that the Mellanox Technologies ConnectX driver in the\nLinux kernel did not properly deallocate memory in certain failure\nconditions. A local attacker could use this to cause a denial of service\n(kernel memory exhaustion). (CVE-2019-19047)\n\nIt was discovered that the Intel WiMAX 2400 driver in the Linux kernel did\nnot properly deallocate memory in certain situations. A local attacker\ncould use this to cause a denial of service (kernel memory exhaustion).\n(CVE-2019-19051)\n\nIt was discovered that Geschwister Schneider USB CAN interface driver in\nthe Linux kernel did not properly deallocate memory in certain failure\nconditions. A physically proximate attacker could use this to cause a\ndenial of service (kernel memory exhaustion). (CVE-2019-19052)\n\nIt was discovered that the netlink-based 802.11 configuration interface in\nthe Linux kernel did not deallocate memory in certain error conditions. A\nlocal attacker could possibly use this to cause a denial of service (kernel\nmemory exhaustion). (CVE-2019-19055)\n\nIt was discovered that the event tracing subsystem of the Linux kernel did\nnot properly deallocate memory in certain error conditions. A local\nattacker could use this to cause a denial of service (kernel memory\nexhaustion). (CVE-2019-19072)\n\nIt was discovered that the driver for memoryless force-feedback input\ndevices in the Linux kernel contained a use-after-free vulnerability. A\nphysically proximate attacker could possibly use this to cause a denial of\nservice (system crash) or execute arbitrary code. (CVE-2019-19524)\n\nIt was discovered that the Microchip CAN BUS Analyzer driver in the Linux\nkernel contained a use-after-free vulnerability on device disconnect. A\nphysically proximate attacker could use this to cause a denial of service\n(system crash) or possibly execute arbitrary code. (CVE-2019-19529)\n\nIt was discovered that the PEAK-System Technik USB driver in the Linux\nkernel did not properly sanitize memory before sending it to the device. A\nphysically proximate attacker could use this to expose sensitive\ninformation (kernel memory). (CVE-2019-19534)\n\nTristan Madani discovered that the ALSA timer implementation in the Linux\nkernel contained a use-after-free vulnerability. A local attacker could use\nthis to cause a denial of service (system crash) or possibly execute\narbitrary code. (CVE-2019-19807)\n\nIt was discovered that the DesignWare USB3 controller driver in the Linux\nkernel did not properly deallocate memory in some error conditions. A local\nattacker could possibly use this to cause a denial of service (memory\nexhaustion). (CVE-2019-18813)\n\n\nTLDR: After a standard system update you need to reboot your computer to make\nall the necessary changes.\n\nATTENTION: Due to an unavoidable ABI change the kernel updates have\nbeen given a new version number, which requires you to recompile and\nreinstall all third party kernel modules you might have installed.\nUnless you manually uninstalled the standard kernel metapackages\n(e.g. linux-generic, linux-generic-lts-RELEASE, linux-virtual,\nlinux-powerpc), a standard system upgrade will automatically perform\nthis as well.\n\n", + "vuln-name": "USN-4225-1", + "vuln-primary-link": "https://usn.ubuntu.com/4225-1/", + "vuln-priority": 5, + "vuln-short-description": "Several security issues were fixed in the Linux kernel.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4226-1.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4226-1.json new file mode 100644 index 0000000..d5a3364 --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4226-1.json @@ -0,0 +1,183 @@ +{ + "USN-4226-1": { + "comparisons": { + "bionic-bucket": { + "comparison-collection-subtype": [ + "linux-image-5.0.0-1033-oem-osp1", + "linux-image-aws-edge", + "linux-image-5.0.0-1023-aws", + "linux-image-5.0.0-1028-azure", + "linux-image-5.0.0-1027-gke", + "linux-image-5.0.0-1009-oracle", + "linux-image-oracle-edge", + "linux-image-oem-osp1", + "linux-image-gke-5.0", + "linux-image-azure" + ], + "comparison-collection-type": [ + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "5.0.0-1033.38", + "5.0.0.1023.37", + "5.0.0-1023.26~18.04.1", + "5.0.0-1028.30~18.04.1", + "5.0.0-1027.28~18.04.1", + "5.0.0-1009.14~18.04.1", + "5.0.0.1009.8", + "5.0.0.1033.37", + "5.0.0.1027.16", + "5.0.0.1028.39" + ] + }, + "disco-bucket": { + "comparison-collection-subtype": [ + "linux-image-kvm", + "linux-image-gke", + "linux-image-5.0.0-38-generic-lpae", + "linux-image-5.0.0-1024-raspi2", + "linux-image-5.0.0-1023-aws", + "linux-image-5.0.0-38-generic", + "linux-image-5.0.0-1024-kvm", + "linux-image-5.0.0-1028-azure", + "linux-image-generic-lpae", + "linux-image-virtual", + "linux-image-5.0.0-1009-oracle", + "linux-image-azure", + "linux-image-aws", + "linux-image-generic", + "linux-image-gcp", + "linux-image-5.0.0-38-lowlatency", + "linux-image-5.0.0-1028-gcp", + "linux-image-oracle", + "linux-image-lowlatency", + "linux-image-raspi2" + ], + "comparison-collection-type": [ + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "5.0.0.1024.25", + "5.0.0.1028.53", + "5.0.0-38.41", + "5.0.0-1024.25", + "5.0.0-1023.26", + "5.0.0-38.41", + "5.0.0-1024.26", + "5.0.0-1028.30", + "5.0.0.38.40", + "5.0.0.38.40", + "5.0.0-1009.14", + "5.0.0.1028.28", + "5.0.0.1023.25", + "5.0.0.38.40", + "5.0.0.1028.53", + "5.0.0-38.41", + "5.0.0-1028.29", + "5.0.0.1009.35", + "5.0.0.38.40", + "5.0.0.1024.22" + ] + } + }, + "filters": { + "bionic-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "bionic" + ] + }, + "disco-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "disco" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-10220_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-10220.html", + "CVE-2019-14895_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14895.html", + "CVE-2019-14896_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14896.html", + "CVE-2019-14897_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14897.html", + "CVE-2019-14901_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14901.html", + "CVE-2019-16231_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-16231.html", + "CVE-2019-16233_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-16233.html", + "CVE-2019-17075_(Unknown)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17075.html", + "CVE-2019-17133_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17133.html", + "CVE-2019-18660_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-18660.html", + "CVE-2019-18813_(Unknown)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-18813.html", + "CVE-2019-19045_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19045.html", + "CVE-2019-19048_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19048.html", + "CVE-2019-19052_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19052.html", + "CVE-2019-19055_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19055.html", + "CVE-2019-19060_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19060.html", + "CVE-2019-19065_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19065.html", + "CVE-2019-19067_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19067.html", + "CVE-2019-19072_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19072.html", + "CVE-2019-19075_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19075.html", + "CVE-2019-19083_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19083.html", + "CVE-2019-19524_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19524.html", + "CVE-2019-19526_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19526.html", + "CVE-2019-19529_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19529.html", + "CVE-2019-19532_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19532.html", + "CVE-2019-19534_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19534.html", + "CVE-2019-19922_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19922.html", + "CVE-2019-2214_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-2214.html", + "USN-4226-1": "https://usn.ubuntu.com/4226-1/" + }, + "vuln-long-description": "Michael Hanselmann discovered that the CIFS implementation in the Linux\nkernel did not sanitize paths returned by an SMB server. An attacker\ncontrolling an SMB server could use this to overwrite arbitrary files.\n(CVE-2019-10220)\n\nIt was discovered that a heap-based buffer overflow existed in the Marvell\nWiFi-Ex Driver for the Linux kernel. A physically proximate attacker could\nuse this to cause a denial of service (system crash) or possibly execute\narbitrary code. (CVE-2019-14895, CVE-2019-14901)\n\nIt was discovered that a heap-based buffer overflow existed in the Marvell\nLibertas WLAN Driver for the Linux kernel. A physically proximate attacker\ncould use this to cause a denial of service (system crash) or possibly\nexecute arbitrary code. (CVE-2019-14896, CVE-2019-14897)\n\nIt was discovered that the Fujitsu ES network device driver for the Linux\nkernel did not properly check for errors in some situations, leading to a\nNULL pointer dereference. A local attacker could use this to cause a denial\nof service. (CVE-2019-16231)\n\nIt was discovered that the QLogic Fibre Channel driver in the Linux kernel\ndid not properly check for error, leading to a NULL pointer dereference. A\nlocal attacker could possibly use this to cause a denial of service (system\ncrash). (CVE-2019-16233)\n\nNicolas Waisman discovered that the WiFi driver stack in the Linux kernel\ndid not properly validate SSID lengths. A physically proximate attacker\ncould use this to cause a denial of service (system crash).\n(CVE-2019-17133)\n\nAnthony Steinhauser discovered that the Linux kernel did not properly\nperform Spectre_RSB mitigations to all processors for PowerPC architecture\nsystems in some situations. A local attacker could use this to expose\nsensitive information. (CVE-2019-18660)\n\nIt was discovered that the Mellanox Technologies Innova driver in the Linux\nkernel did not properly deallocate memory in certain failure conditions. A\nlocal attacker could use this to cause a denial of service (kernel memory\nexhaustion). (CVE-2019-19045)\n\nIt was discovered that the VirtualBox guest driver implementation in the\nLinux kernel did not properly deallocate memory in certain error\nconditions. A local attacker could use this to cause a denial of service\n(memory exhaustion). (CVE-2019-19048)\n\nIt was discovered that Geschwister Schneider USB CAN interface driver in\nthe Linux kernel did not properly deallocate memory in certain failure\nconditions. A physically proximate attacker could use this to cause a\ndenial of service (kernel memory exhaustion). (CVE-2019-19052)\n\nIt was discovered that the netlink-based 802.11 configuration interface in\nthe Linux kernel did not deallocate memory in certain error conditions. A\nlocal attacker could possibly use this to cause a denial of service (kernel\nmemory exhaustion). (CVE-2019-19055)\n\nIt was discovered that the ADIS16400 IIO IMU Driver for the Linux kernel\ndid not properly deallocate memory in certain error conditions. A local\nattacker could use this to cause a denial of service (memory exhaustion).\n(CVE-2019-19060)\n\nIt was discovered that the Intel OPA Gen1 Infiniband Driver for the Linux\nkernel did not properly deallocate memory in certain error conditions. A\nlocal attacker could use this to cause a denial of service (memory\nexhaustion). (CVE-2019-19065)\n\nIt was discovered that the AMD Audio CoProcessor Driver for the Linux\nkernel did not properly deallocate memory in certain error conditions. A\nlocal attacker with the ability to load modules could use this to cause a\ndenial of service (memory exhaustion). (CVE-2019-19067)\n\nIt was discovered that the event tracing subsystem of the Linux kernel did\nnot properly deallocate memory in certain error conditions. A local\nattacker could use this to cause a denial of service (kernel memory\nexhaustion). (CVE-2019-19072)\n\nIt was discovered that the Cascoda CA8210 SPI 802.15.4 wireless controller\ndriver for the Linux kernel did not properly deallocate memory in certain\nerror conditions. A local attacker could use this to cause a denial of\nservice (memory exhaustion). (CVE-2019-19075)\n\nIt was discovered that the AMD Display Engine Driver in the Linux kernel\ndid not properly deallocate memory in certain error conditions. A local\nattack could use this to cause a denial of service (memory exhaustion).\n(CVE-2019-19083)\n\nIt was discovered that the driver for memoryless force-feedback input\ndevices in the Linux kernel contained a use-after-free vulnerability. A\nphysically proximate attacker could possibly use this to cause a denial of\nservice (system crash) or execute arbitrary code. (CVE-2019-19524)\n\nIt was discovered that the NXP PN533 NFC USB driver in the Linux kernel did\nnot properly free resources after a late probe error, leading to a use-\nafter-free vulnerability. A physically proximate attacker could use this to\ncause a denial of service (system crash) or possibly execute arbitrary\ncode. (CVE-2019-19526)\n\nIt was discovered that the Microchip CAN BUS Analyzer driver in the Linux\nkernel contained a use-after-free vulnerability on device disconnect. A\nphysically proximate attacker could use this to cause a denial of service\n(system crash) or possibly execute arbitrary code. (CVE-2019-19529)\n\nIt was discovered that multiple USB HID device drivers in the Linux kernel\ndid not properly validate device metadata on attachment, leading to out-of-\nbounds writes. A physically proximate attacker could use this to cause a\ndenial of service (system crash) or possibly execute arbitrary code.\n(CVE-2019-19532)\n\nIt was discovered that the PEAK-System Technik USB driver in the Linux\nkernel did not properly sanitize memory before sending it to the device. A\nphysically proximate attacker could use this to expose sensitive\ninformation (kernel memory). (CVE-2019-19534)\n\nIt was discovered that in some situations the fair scheduler in the Linux\nkernel did not permit a process to use its full quota time slice. A local\nattacker could use this to cause a denial of service. (CVE-2019-19922)\n\nIt was discovered that the binder IPC implementation in the Linux kernel\ndid not properly perform bounds checking in some situations, leading to an\nout-of-bounds write. A local attacker could use this to cause a denial of\nservice (system crash) or possibly execute arbitrary code. (CVE-2019-2214)\n\nNicolas Waisman discovered that the Chelsio T4/T5 RDMA Driver for the Linux\nkernel performed DMA from a kernel stack. A local attacker could use this\nto cause a denial of service (system crash). (CVE-2019-17075)\n\nIt was discovered that the DesignWare USB3 controller driver in the Linux\nkernel did not properly deallocate memory in some error conditions. A local\nattacker could possibly use this to cause a denial of service (memory\nexhaustion). (CVE-2019-18813)\n\n\nTLDR: After a standard system update you need to reboot your computer to make\nall the necessary changes.\n\nATTENTION: Due to an unavoidable ABI change the kernel updates have\nbeen given a new version number, which requires you to recompile and\nreinstall all third party kernel modules you might have installed.\nUnless you manually uninstalled the standard kernel metapackages\n(e.g. linux-generic, linux-generic-lts-RELEASE, linux-virtual,\nlinux-powerpc), a standard system upgrade will automatically perform\nthis as well.\n\n", + "vuln-name": "USN-4226-1", + "vuln-primary-link": "https://usn.ubuntu.com/4226-1/", + "vuln-priority": 5, + "vuln-short-description": "Several security issues were fixed in the Linux kernel.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4227-1.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4227-1.json new file mode 100644 index 0000000..2c4c3cd --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4227-1.json @@ -0,0 +1,217 @@ +{ + "USN-4227-1": { + "comparisons": { + "bionic-bucket": { + "comparison-collection-subtype": [ + "linux-image-powerpc-e500mc", + "linux-image-aws", + "linux-image-oracle", + "linux-image-4.15.0-74-generic", + "linux-image-gke-4.15", + "linux-image-4.15.0-1066-oem", + "linux-image-oracle-lts-18.04", + "linux-image-4.15.0-1031-oracle", + "linux-image-virtual", + "linux-image-snapdragon", + "linux-image-powerpc64-emb", + "linux-image-gke", + "linux-image-4.15.0-1070-snapdragon", + "linux-image-aws-lts-18.04", + "linux-image-4.15.0-74-lowlatency", + "linux-image-generic", + "linux-image-oem", + "linux-image-4.15.0-1053-raspi2", + "linux-image-4.15.0-1050-gke", + "linux-image-kvm", + "linux-image-raspi2", + "linux-image-powerpc-smp", + "linux-image-4.15.0-1057-aws", + "linux-image-generic-lpae", + "linux-image-4.15.0-1052-kvm", + "linux-image-powerpc64-smp", + "linux-image-4.15.0-74-generic-lpae", + "linux-image-lowlatency" + ], + "comparison-collection-type": [ + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "4.15.0.74.76", + "4.15.0.1057.58", + "4.15.0.1031.36", + "4.15.0-74.84", + "4.15.0.1050.53", + "4.15.0-1066.76", + "4.15.0.1031.36", + "4.15.0-1031.34", + "4.15.0.74.76", + "4.15.0.1070.73", + "4.15.0.74.76", + "4.15.0.1050.53", + "4.15.0-1070.77", + "4.15.0.1057.58", + "4.15.0-74.84", + "4.15.0.74.76", + "4.15.0.1066.70", + "4.15.0-1053.57", + "4.15.0-1050.53", + "4.15.0.1052.52", + "4.15.0.1053.51", + "4.15.0.74.76", + "4.15.0-1057.59", + "4.15.0.74.76", + "4.15.0-1052.52", + "4.15.0.74.76", + "4.15.0-74.84", + "4.15.0.74.76" + ] + }, + "xenial-bucket": { + "comparison-collection-subtype": [ + "linux-image-gke", + "linux-image-4.15.0-1057-aws", + "linux-image-azure-edge", + "linux-image-aws-hwe", + "linux-image-4.15.0-74-lowlatency", + "linux-image-gcp", + "linux-image-generic-hwe-16.04", + "linux-image-oem", + "linux-image-4.15.0-74-generic", + "linux-image-lowlatency-hwe-16.04", + "linux-image-4.15.0-1066-azure", + "linux-image-4.15.0-74-generic-lpae", + "linux-image-virtual-hwe-16.04", + "linux-image-4.15.0-1031-oracle", + "linux-image-oracle", + "linux-image-4.15.0-1052-gcp", + "linux-image-azure", + "linux-image-generic-lpae-hwe-16.04" + ], + "comparison-collection-type": [ + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "4.15.0.1052.66", + "4.15.0-1057.59~16.04.1", + "4.15.0.1066.69", + "4.15.0.1057.57", + "4.15.0-74.83~16.04.1", + "4.15.0.1052.66", + "4.15.0.74.94", + "4.15.0.74.94", + "4.15.0-74.83~16.04.1", + "4.15.0.74.94", + "4.15.0-1066.71", + "4.15.0-74.83~16.04.1", + "4.15.0.74.94", + "4.15.0-1031.34~16.04.1", + "4.15.0.1031.24", + "4.15.0-1052.56", + "4.15.0.1066.69", + "4.15.0.74.94" + ] + } + }, + "filters": { + "bionic-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "bionic" + ] + }, + "xenial-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "xenial" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-14895_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14895.html", + "CVE-2019-14896_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14896.html", + "CVE-2019-14897_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14897.html", + "CVE-2019-14901_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14901.html", + "CVE-2019-16231_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-16231.html", + "CVE-2019-16233_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-16233.html", + "CVE-2019-18660_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-18660.html", + "CVE-2019-19045_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19045.html", + "CVE-2019-19052_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19052.html", + "CVE-2019-19083_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19083.html", + "CVE-2019-19524_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19524.html", + "CVE-2019-19529_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19529.html", + "CVE-2019-19534_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19534.html", + "CVE-2019-19807_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19807.html", + "USN-4227-1": "https://usn.ubuntu.com/4227-1/" + }, + "vuln-long-description": "It was discovered that a heap-based buffer overflow existed in the Marvell\nWiFi-Ex Driver for the Linux kernel. A physically proximate attacker could\nuse this to cause a denial of service (system crash) or possibly execute\narbitrary code. (CVE-2019-14895, CVE-2019-14901)\n\nIt was discovered that a heap-based buffer overflow existed in the Marvell\nLibertas WLAN Driver for the Linux kernel. A physically proximate attacker\ncould use this to cause a denial of service (system crash) or possibly\nexecute arbitrary code. (CVE-2019-14896, CVE-2019-14897)\n\nIt was discovered that the Fujitsu ES network device driver for the Linux\nkernel did not properly check for errors in some situations, leading to a\nNULL pointer dereference. A local attacker could use this to cause a denial\nof service. (CVE-2019-16231)\n\nIt was discovered that the QLogic Fibre Channel driver in the Linux kernel\ndid not properly check for error, leading to a NULL pointer dereference. A\nlocal attacker could possibly use this to cause a denial of service (system\ncrash). (CVE-2019-16233)\n\nAnthony Steinhauser discovered that the Linux kernel did not properly\nperform Spectre_RSB mitigations to all processors for PowerPC architecture\nsystems in some situations. A local attacker could use this to expose\nsensitive information. (CVE-2019-18660)\n\nIt was discovered that the Mellanox Technologies Innova driver in the Linux\nkernel did not properly deallocate memory in certain failure conditions. A\nlocal attacker could use this to cause a denial of service (kernel memory\nexhaustion). (CVE-2019-19045)\n\nIt was discovered that Geschwister Schneider USB CAN interface driver in\nthe Linux kernel did not properly deallocate memory in certain failure\nconditions. A physically proximate attacker could use this to cause a\ndenial of service (kernel memory exhaustion). (CVE-2019-19052)\n\nIt was discovered that the AMD Display Engine Driver in the Linux kernel\ndid not properly deallocate memory in certain error conditions. A local\nattack could use this to cause a denial of service (memory exhaustion).\n(CVE-2019-19083)\n\nIt was discovered that the driver for memoryless force-feedback input\ndevices in the Linux kernel contained a use-after-free vulnerability. A\nphysically proximate attacker could possibly use this to cause a denial of\nservice (system crash) or execute arbitrary code. (CVE-2019-19524)\n\nIt was discovered that the Microchip CAN BUS Analyzer driver in the Linux\nkernel contained a use-after-free vulnerability on device disconnect. A\nphysically proximate attacker could use this to cause a denial of service\n(system crash) or possibly execute arbitrary code. (CVE-2019-19529)\n\nIt was discovered that the PEAK-System Technik USB driver in the Linux\nkernel did not properly sanitize memory before sending it to the device. A\nphysically proximate attacker could use this to expose sensitive\ninformation (kernel memory). (CVE-2019-19534)\n\nTristan Madani discovered that the ALSA timer implementation in the Linux\nkernel contained a use-after-free vulnerability. A local attacker could use\nthis to cause a denial of service (system crash) or possibly execute\narbitrary code. (CVE-2019-19807)\n\n\nTLDR: After a standard system update you need to reboot your computer to make\nall the necessary changes.\n\nATTENTION: Due to an unavoidable ABI change the kernel updates have\nbeen given a new version number, which requires you to recompile and\nreinstall all third party kernel modules you might have installed.\nUnless you manually uninstalled the standard kernel metapackages\n(e.g. linux-generic, linux-generic-lts-RELEASE, linux-virtual,\nlinux-powerpc), a standard system upgrade will automatically perform\nthis as well.\n\n", + "vuln-name": "USN-4227-1", + "vuln-primary-link": "https://usn.ubuntu.com/4227-1/", + "vuln-priority": 5, + "vuln-short-description": "Several security issues were fixed in the Linux kernel.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4227-2.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4227-2.json new file mode 100644 index 0000000..013c9a0 --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4227-2.json @@ -0,0 +1,61 @@ +{ + "USN-4227-2": { + "comparisons": { + "trusty-bucket": { + "comparison-collection-subtype": [ + "linux-image-azure", + "linux-image-4.15.0-1066-azure" + ], + "comparison-collection-type": [ + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "4.15.0.1066.52", + "4.15.0-1066.71~14.04.1" + ] + } + }, + "filters": { + "trusty-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "trusty" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-14895_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14895.html", + "CVE-2019-14896_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14896.html", + "CVE-2019-14897_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14897.html", + "CVE-2019-14901_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14901.html", + "CVE-2019-16231_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-16231.html", + "CVE-2019-16233_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-16233.html", + "CVE-2019-18660_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-18660.html", + "CVE-2019-19045_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19045.html", + "CVE-2019-19052_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19052.html", + "CVE-2019-19083_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19083.html", + "CVE-2019-19524_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19524.html", + "CVE-2019-19529_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19529.html", + "CVE-2019-19534_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19534.html", + "CVE-2019-19807_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19807.html", + "USN-4227-2": "https://usn.ubuntu.com/4227-2/" + }, + "vuln-long-description": "USN-4227-1 fixed vulnerabilities in the Linux kernel for Ubuntu\n18.04 LTS. This update provides the corresponding updates for the\nLinux kernel for Microsoft Azure Cloud systems for Ubuntu 14.04 ESM.\n\nIt was discovered that a heap-based buffer overflow existed in the Marvell\nWiFi-Ex Driver for the Linux kernel. A physically proximate attacker could\nuse this to cause a denial of service (system crash) or possibly execute\narbitrary code. (CVE-2019-14895, CVE-2019-14901)\n\nIt was discovered that a heap-based buffer overflow existed in the Marvell\nLibertas WLAN Driver for the Linux kernel. A physically proximate attacker\ncould use this to cause a denial of service (system crash) or possibly\nexecute arbitrary code. (CVE-2019-14896, CVE-2019-14897)\n\nIt was discovered that the Fujitsu ES network device driver for the Linux\nkernel did not properly check for errors in some situations, leading to a\nNULL pointer dereference. A local attacker could use this to cause a denial\nof service. (CVE-2019-16231)\n\nIt was discovered that the QLogic Fibre Channel driver in the Linux kernel\ndid not properly check for error, leading to a NULL pointer dereference. A\nlocal attacker could possibly use this to cause a denial of service (system\ncrash). (CVE-2019-16233)\n\nAnthony Steinhauser discovered that the Linux kernel did not properly\nperform Spectre_RSB mitigations to all processors for PowerPC architecture\nsystems in some situations. A local attacker could use this to expose\nsensitive information. (CVE-2019-18660)\n\nIt was discovered that the Mellanox Technologies Innova driver in the Linux\nkernel did not properly deallocate memory in certain failure conditions. A\nlocal attacker could use this to cause a denial of service (kernel memory\nexhaustion). (CVE-2019-19045)\n\nIt was discovered that Geschwister Schneider USB CAN interface driver in\nthe Linux kernel did not properly deallocate memory in certain failure\nconditions. A physically proximate attacker could use this to cause a\ndenial of service (kernel memory exhaustion). (CVE-2019-19052)\n\nIt was discovered that the AMD Display Engine Driver in the Linux kernel\ndid not properly deallocate memory in certain error conditions. A local\nattack could use this to cause a denial of service (memory exhaustion).\n(CVE-2019-19083)\n\nIt was discovered that the driver for memoryless force-feedback input\ndevices in the Linux kernel contained a use-after-free vulnerability. A\nphysically proximate attacker could possibly use this to cause a denial of\nservice (system crash) or execute arbitrary code. (CVE-2019-19524)\n\nIt was discovered that the Microchip CAN BUS Analyzer driver in the Linux\nkernel contained a use-after-free vulnerability on device disconnect. A\nphysically proximate attacker could use this to cause a denial of service\n(system crash) or possibly execute arbitrary code. (CVE-2019-19529)\n\nIt was discovered that the PEAK-System Technik USB driver in the Linux\nkernel did not properly sanitize memory before sending it to the device. A\nphysically proximate attacker could use this to expose sensitive\ninformation (kernel memory). (CVE-2019-19534)\n\nTristan Madani discovered that the ALSA timer implementation in the Linux\nkernel contained a use-after-free vulnerability. A local attacker could use\nthis to cause a denial of service (system crash) or possibly execute\narbitrary code. (CVE-2019-19807)\n\n\nTLDR: After a standard system update you need to reboot your computer to make\nall the necessary changes.\n\nATTENTION: Due to an unavoidable ABI change the kernel updates have\nbeen given a new version number, which requires you to recompile and\nreinstall all third party kernel modules you might have installed.\nUnless you manually uninstalled the standard kernel metapackages\n(e.g. linux-generic, linux-generic-lts-RELEASE, linux-virtual,\nlinux-powerpc), a standard system upgrade will automatically perform\nthis as well.\n\n", + "vuln-name": "USN-4227-2", + "vuln-primary-link": "https://usn.ubuntu.com/4227-2/", + "vuln-priority": 5, + "vuln-short-description": "Several security issues were fixed in the Linux kernel.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4228-1.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4228-1.json new file mode 100644 index 0000000..9cb2051 --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4228-1.json @@ -0,0 +1,118 @@ +{ + "USN-4228-1": { + "comparisons": { + "xenial-bucket": { + "comparison-collection-subtype": [ + "linux-image-powerpc-e500mc", + "linux-image-4.4.0-1100-aws", + "linux-image-4.4.0-171-generic-lpae", + "linux-image-4.4.0-171-generic", + "linux-image-4.4.0-171-lowlatency", + "linux-image-4.4.0-171-powerpc-e500mc", + "linux-image-4.4.0-171-powerpc64-smp", + "linux-image-virtual", + "linux-image-snapdragon", + "linux-image-powerpc64-emb", + "linux-image-4.4.0-1064-kvm", + "linux-image-generic", + "linux-image-4.4.0-171-powerpc64-emb", + "linux-image-aws", + "linux-image-kvm", + "linux-image-4.4.0-1127-raspi2", + "linux-image-raspi2", + "linux-image-powerpc-smp", + "linux-image-generic-lpae", + "linux-image-4.4.0-1131-snapdragon", + "linux-image-powerpc64-smp", + "linux-image-lowlatency", + "linux-image-4.4.0-171-powerpc-smp" + ], + "comparison-collection-type": [ + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "4.4.0.171.179", + "4.4.0-1100.111", + "4.4.0-171.200", + "4.4.0-171.200", + "4.4.0-171.200", + "4.4.0-171.200", + "4.4.0-171.200", + "4.4.0.171.179", + "4.4.0.1131.123", + "4.4.0.171.179", + "4.4.0-1064.71", + "4.4.0.171.179", + "4.4.0-171.200", + "4.4.0.1100.104", + "4.4.0.1064.64", + "4.4.0-1127.136", + "4.4.0.1127.127", + "4.4.0.171.179", + "4.4.0.171.179", + "4.4.0-1131.139", + "4.4.0.171.179", + "4.4.0.171.179", + "4.4.0-171.200" + ] + } + }, + "filters": { + "xenial-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "xenial" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-14895_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14895.html", + "CVE-2019-14896_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14896.html", + "CVE-2019-14897_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14897.html", + "CVE-2019-14901_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14901.html", + "CVE-2019-18660_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-18660.html", + "CVE-2019-19052_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19052.html", + "CVE-2019-19524_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19524.html", + "CVE-2019-19534_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19534.html", + "USN-4228-1": "https://usn.ubuntu.com/4228-1/" + }, + "vuln-long-description": "It was discovered that a heap-based buffer overflow existed in the Marvell\nWiFi-Ex Driver for the Linux kernel. A physically proximate attacker could\nuse this to cause a denial of service (system crash) or possibly execute\narbitrary code. (CVE-2019-14895, CVE-2019-14901)\n\nIt was discovered that a heap-based buffer overflow existed in the Marvell\nLibertas WLAN Driver for the Linux kernel. A physically proximate attacker\ncould use this to cause a denial of service (system crash) or possibly\nexecute arbitrary code. (CVE-2019-14896, CVE-2019-14897)\n\nAnthony Steinhauser discovered that the Linux kernel did not properly\nperform Spectre_RSB mitigations to all processors for PowerPC architecture\nsystems in some situations. A local attacker could use this to expose\nsensitive information. (CVE-2019-18660)\n\nIt was discovered that Geschwister Schneider USB CAN interface driver in\nthe Linux kernel did not properly deallocate memory in certain failure\nconditions. A physically proximate attacker could use this to cause a\ndenial of service (kernel memory exhaustion). (CVE-2019-19052)\n\nIt was discovered that the driver for memoryless force-feedback input\ndevices in the Linux kernel contained a use-after-free vulnerability. A\nphysically proximate attacker could possibly use this to cause a denial of\nservice (system crash) or execute arbitrary code. (CVE-2019-19524)\n\nIt was discovered that the PEAK-System Technik USB driver in the Linux\nkernel did not properly sanitize memory before sending it to the device. A\nphysically proximate attacker could use this to expose sensitive\ninformation (kernel memory). (CVE-2019-19534)\n\n\nTLDR: After a standard system update you need to reboot your computer to make\nall the necessary changes.\n\nATTENTION: Due to an unavoidable ABI change the kernel updates have\nbeen given a new version number, which requires you to recompile and\nreinstall all third party kernel modules you might have installed.\nUnless you manually uninstalled the standard kernel metapackages\n(e.g. linux-generic, linux-generic-lts-RELEASE, linux-virtual,\nlinux-powerpc), a standard system upgrade will automatically perform\nthis as well.\n\n", + "vuln-name": "USN-4228-1", + "vuln-primary-link": "https://usn.ubuntu.com/4228-1/", + "vuln-priority": 5, + "vuln-short-description": "Several security issues were fixed in the Linux kernel.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4228-2.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4228-2.json new file mode 100644 index 0000000..39ac3ce --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4228-2.json @@ -0,0 +1,100 @@ +{ + "USN-4228-2": { + "comparisons": { + "trusty-bucket": { + "comparison-collection-subtype": [ + "linux-image-powerpc-smp-lts-xenial", + "linux-image-generic-lpae-lts-xenial", + "linux-image-virtual-lts-xenial", + "linux-image-powerpc64-emb-lts-xenial", + "linux-image-4.4.0-171-generic-lpae", + "linux-image-4.4.0-171-powerpc64-smp", + "linux-image-lowlatency-lts-xenial", + "linux-image-powerpc64-smp-lts-xenial", + "linux-image-4.4.0-171-powerpc64-emb", + "linux-image-4.4.0-1060-aws", + "linux-image-4.4.0-171-generic", + "linux-image-generic-lts-xenial", + "linux-image-aws", + "linux-image-4.4.0-171-lowlatency", + "linux-image-powerpc-e500mc-lts-xenial", + "linux-image-4.4.0-171-powerpc-smp", + "linux-image-4.4.0-171-powerpc-e500mc" + ], + "comparison-collection-type": [ + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "4.4.0.171.150", + "4.4.0.171.150", + "4.4.0.171.150", + "4.4.0.171.150", + "4.4.0-171.200~14.04.1", + "4.4.0-171.200~14.04.1", + "4.4.0.171.150", + "4.4.0.171.150", + "4.4.0-171.200~14.04.1", + "4.4.0-1060.64", + "4.4.0-171.200~14.04.1", + "4.4.0.171.150", + "4.4.0.1060.61", + "4.4.0-171.200~14.04.1", + "4.4.0.171.150", + "4.4.0-171.200~14.04.1", + "4.4.0-171.200~14.04.1" + ] + } + }, + "filters": { + "trusty-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "trusty" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-14895_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14895.html", + "CVE-2019-14896_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14896.html", + "CVE-2019-14897_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14897.html", + "CVE-2019-14901_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-14901.html", + "CVE-2019-18660_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-18660.html", + "CVE-2019-19052_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19052.html", + "CVE-2019-19524_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19524.html", + "CVE-2019-19534_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-19534.html", + "USN-4228-2": "https://usn.ubuntu.com/4228-2/" + }, + "vuln-long-description": "USN-4228-1 fixed vulnerabilities in the Linux kernel for Ubuntu 16.04\nLTS. This update provides the corresponding updates for the Linux\nHardware Enablement (HWE) kernel from Ubuntu 16.04 LTS for Ubuntu\n14.04 ESM.\n\nIt was discovered that a heap-based buffer overflow existed in the Marvell\nWiFi-Ex Driver for the Linux kernel. A physically proximate attacker could\nuse this to cause a denial of service (system crash) or possibly execute\narbitrary code. (CVE-2019-14895, CVE-2019-14901)\n\nIt was discovered that a heap-based buffer overflow existed in the Marvell\nLibertas WLAN Driver for the Linux kernel. A physically proximate attacker\ncould use this to cause a denial of service (system crash) or possibly\nexecute arbitrary code. (CVE-2019-14896, CVE-2019-14897)\n\nAnthony Steinhauser discovered that the Linux kernel did not properly\nperform Spectre_RSB mitigations to all processors for PowerPC architecture\nsystems in some situations. A local attacker could use this to expose\nsensitive information. (CVE-2019-18660)\n\nIt was discovered that Geschwister Schneider USB CAN interface driver in\nthe Linux kernel did not properly deallocate memory in certain failure\nconditions. A physically proximate attacker could use this to cause a\ndenial of service (kernel memory exhaustion). (CVE-2019-19052)\n\nIt was discovered that the driver for memoryless force-feedback input\ndevices in the Linux kernel contained a use-after-free vulnerability. A\nphysically proximate attacker could possibly use this to cause a denial of\nservice (system crash) or execute arbitrary code. (CVE-2019-19524)\n\nIt was discovered that the PEAK-System Technik USB driver in the Linux\nkernel did not properly sanitize memory before sending it to the device. A\nphysically proximate attacker could use this to expose sensitive\ninformation (kernel memory). (CVE-2019-19534)\n\n\nTLDR: After a standard system update you need to reboot your computer to make\nall the necessary changes.\n\nATTENTION: Due to an unavoidable ABI change the kernel updates have\nbeen given a new version number, which requires you to recompile and\nreinstall all third party kernel modules you might have installed.\nUnless you manually uninstalled the standard kernel metapackages\n(e.g. linux-generic, linux-generic-lts-RELEASE, linux-virtual,\nlinux-powerpc), a standard system upgrade will automatically perform\nthis as well.\n\n", + "vuln-name": "USN-4228-2", + "vuln-primary-link": "https://usn.ubuntu.com/4228-2/", + "vuln-priority": 5, + "vuln-short-description": "Several security issues were fixed in the Linux kernel.\n" + } +} \ No newline at end of file From 08af924718414ffd6a249d2e85c25bfb4445ede5 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Tue, 7 Jan 2020 14:51:40 -0800 Subject: [PATCH 010/143] RSS Feed Creator for Audittools. --- rss_creator.py | 66 ++++++++++++++++++++++++++++++++++---------------- 1 file changed, 45 insertions(+), 21 deletions(-) diff --git a/rss_creator.py b/rss_creator.py index 2dc7292..ee5ad2e 100755 --- a/rss_creator.py +++ b/rss_creator.py @@ -11,6 +11,7 @@ import pyjq import audittools.audits_usn +import audittools.audits_rhsa _known_feeds = {"usn" : {"url" : "https://usn.ubuntu.com/usn/atom.xml", @@ -20,7 +21,16 @@ "update_existing" : False, "audit_source_obj" : audittools.audits_usn.AuditSourceUSN, "format" : "json" - } + }, + "rhsa" : {"url" : "https://linuxsecurity.com/advisories/red-hat?format=feed&type=rss", + "subdir": "redhat_rhsa", + "jq_obj_source_key" : ".title", + "regex_obj_source_key" : r"^RedHat: (RHSA-\d{4}-\d{1,4})", + "regex_obj_replace" : [r"(RHSA-\d{4})-(\d{1,4})", r"\1:\2"], + "update_existing" : False, + "audit_source_obj" : audittools.audits_rhsa.AuditSourceRHSA, + "format" : "json" + } } @@ -102,7 +112,6 @@ def feed_create(feed_name, feed_config=None, basedir=None, confirm=False, max_au logger.error("Unable to Read RSS Feed Returning Empty") feed_obj = {"entries" : list()} - if len(feed_obj["entries"]) == 0: logger.warning("No Entries in Given URL.") else: @@ -110,6 +119,7 @@ def feed_create(feed_name, feed_config=None, basedir=None, confirm=False, max_au current_num = 0 for entry in feed_obj["entries"]: + logger.debug("Entry : {}".format(entry)) current_num = current_num + 1 best_source_key = None @@ -132,6 +142,16 @@ def feed_create(feed_name, feed_config=None, basedir=None, confirm=False, max_au logger.debug("Best Source key After Regex : {}".format(best_source_key)) + if "regex_obj_replace" in feed_config.keys(): + + regex_replace = re.sub(*[*feed_config["regex_obj_replace"], str(best_source_key)]) + + if regex_replace is not None: + best_source_key = regex_replace + + logger.debug("Best Source key After Replace : {}".format(best_source_key)) + + if best_source_key is not None and len(best_source_key) > 0: @@ -143,32 +163,36 @@ def feed_create(feed_name, feed_config=None, basedir=None, confirm=False, max_au as_args = list() - as_obj = feed_config["audit_source_obj"](*as_args, **as_kwargs) + try: - if as_obj.validate_audit_live() is True: - - # See if File Exists - if as_obj.audit_file_exists() is False: - # Add to Object - if confirm is False: - logger.info("Audit {} File Not Written to {} Confirm not Set.".format(best_source_key, as_obj.audit_filename)) - audit_source_items[best_source_key] = ["False", "Confirm not Set"] + as_obj = feed_config["audit_source_obj"](*as_args, **as_kwargs) + except Exception as audit_source_error: + logger.error("Unable to Pull Audit {}.".format(best_source_key)) + logger.debug("Pull Error : {}".format(audit_source_error)) + audit_source_items[best_source_key] = [False, "Error on Creation."] + else: + if as_obj.validate_audit_live() is True: + + # See if File Exists + if as_obj.audit_file_exists() is False: + # Add to Object + if confirm is False: + logger.info("Audit {} File Not Written to {} Confirm not Set.".format(best_source_key, as_obj.audit_filename)) + audit_source_items[best_source_key] = ["False", "Confirm not Set"] + else: + logger.info("Audit {} Writing to {}.".format(best_source_key, as_obj.audit_filename)) + + audit_source_items[best_source_key] = as_obj.write_audit(file_format=feed_config["format"]) else: - logger.info("Audit {} Writing to {}.".format(best_source_key, as_obj.audit_filename)) - - audit_source_items[best_source_key] = as_obj.write_audit(file_format=feed_config["format"]) + logger.info("Audit File {} Has existing File.".format(best_source_key)) + audit_source_items[best_source_key] = [False, "Pre-Existing File."] else: - logger.info("Audit File {} Has existing File.".format(best_source_key)) - audit_source_items[best_source_key] = [False, "Pre-Existing File."] - else: - logger.warning("Audit Finding for Source {} Not Valid.".format(best_source_key)) - audit_source_items[best_source_key] = [False, "Invalid Audit on Creation"] + logger.warning("Audit Finding for Source {} Not Valid.".format(best_source_key)) + audit_source_items[best_source_key] = [False, "Invalid Audit on Creation"] else: logger.warning("No Source Key found for Entry : {}".format(entry["id"])) - - if max_audit is not None and max_audit != -1 and current_num > (max_audit - 1): logger.info("Reached Maximum of {} Audits Processed.".format(current_num)) break From f9b0ee96a713248d599126cf61c65d53d9b8c877 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Tue, 7 Jan 2020 14:51:51 -0800 Subject: [PATCH 011/143] Bumping MDB to 10.4 --- travis/pre_build.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/travis/pre_build.sh b/travis/pre_build.sh index 5a73f8a..81f18f9 100755 --- a/travis/pre_build.sh +++ b/travis/pre_build.sh @@ -12,9 +12,9 @@ echo -e "Enabling Mariadb 10.2 Repo" sudo apt-get install software-properties-common -sudo apt-key adv --recv-keys --keyserver hkp://keyserver.ubuntu.com:80 0xcbcb082a1bb943db +sudo apt-key adv --recv-keys --keyserver hkp://keyserver.ubuntu.com:80 0xF1656F24C74CD1D8 -sudo add-apt-repository 'deb [arch=amd64] http://sfo1.mirrors.digitalocean.com/mariadb/repo/10.2/ubuntu trusty main' +sudo add-apt-repository 'deb [arch=amd64] http://sfo1.mirrors.digitalocean.com/mariadb/repo/10.4/ubuntu xenial main' sudo apt-get update From 94739af4ae7fdf49712e51e157bdb73d75815a6a Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 12:20:32 -0800 Subject: [PATCH 012/143] Yoyo Usage Using Yoyo to manage changes in schema in a definable fashion. --- analyze.py | 2 +- requirements.txt | 1 + source_docs/db_setup.md | 31 +++ .../20200107_01_Gv0ql-initialize.py | 208 ++++++++++++++++++ .../20200107_02_xxxxx-credentials.py | 128 +++++++++++ yoyo_steps/yoyo.ini.sample | 46 ++++ 6 files changed, 415 insertions(+), 1 deletion(-) create mode 100644 source_docs/db_setup.md create mode 100644 yoyo_steps/migrations/20200107_01_Gv0ql-initialize.py create mode 100644 yoyo_steps/migrations/20200107_02_xxxxx-credentials.py create mode 100644 yoyo_steps/yoyo.ini.sample diff --git a/analyze.py b/analyze.py index bfb8afa..cb188d7 100755 --- a/analyze.py +++ b/analyze.py @@ -88,7 +88,7 @@ def analyze(CONFIGDIR, CONFIG): sys.exit('Bad configuration file {} {}'.format(CONFIG, general_config_error)) else: # DB Config Items - db_config_items=dict() + db_config_items = dict() for section in config: if section in ["database"] : for item in config[section]: diff --git a/requirements.txt b/requirements.txt index 1e9b3c3..a5a0887 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,3 +14,4 @@ cvss cpe feedparser pyjq +yoyo-database-migrations diff --git a/source_docs/db_setup.md b/source_docs/db_setup.md new file mode 100644 index 0000000..ed29684 --- /dev/null +++ b/source_docs/db_setup.md @@ -0,0 +1,31 @@ +# DB Setup + +## Yoyo + +Setting up the database has been reworked to utilize +[yoyo](https://marcosschroh.github.io/yoyo-database-migrations/). You need a +modern mariadb Database (10.x and higher) available to you named `manowar2` and +and administraive user. + +Then in the `yoyo_steps` directory you need to create a `yoyo.ini` file (see the +`yoyo.ini.sample` as an example). In it you need to edit the target database with +your administrator username and password. Additionally you'll need to populate +credentials for your big 3 users, the api user, the storage user and the analyze +user. + +I'd encourage managing this file with a change management system so that you can +better manage these secrets contained here. + +## Application + +Inside the `yoyo_steps` directory do the following: + +``` +yoyo showmigrations +``` + +This should show you the migrations that are available and haven't been applied yet. +If you're downgrading, you'll want to utilize the rollback options. But generally a +simple `apply` should get you the latest database version for your version of manowar. + + diff --git a/yoyo_steps/migrations/20200107_01_Gv0ql-initialize.py b/yoyo_steps/migrations/20200107_01_Gv0ql-initialize.py new file mode 100644 index 0000000..9150af1 --- /dev/null +++ b/yoyo_steps/migrations/20200107_01_Gv0ql-initialize.py @@ -0,0 +1,208 @@ +""" +Initialize + +Initial Value to Create Database +""" + +from yoyo import step + +__depends__ = {} + +steps = [ + # Create Hosts Table + step('''create table hosts ( + host_id INT UNSIGNED NOT NULL AUTO_INCREMENT, + host_uber_id INT UNSIGNED, + hostname VARCHAR(25) NOT NULL, + pop VARCHAR(10), + srvtype VARCHAR(25), + hoststatus VARCHAR(25), + last_update TIMESTAMP, + PRIMARY KEY ( host_id ), + CONSTRAINT host_uber_id UNIQUE (host_uber_id) +)''', "drop table if exists hosts"), + step('''create or replace index hosts_last_update_index + on hosts (last_update)''', + "drop index if exists hosts_last_update_index on hosts"), + step('''create table collection ( + collection_id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + fk_host_id INT UNSIGNED NOT NULL REFERENCES hosts(host_id), + initial_update TIMESTAMP NOT NULL default CURRENT_TIMESTAMP, + last_update TIMESTAMP NOT NULL default '0000-00-00 00:00:00' on UPDATE CURRENT_TIMESTAMP, + collection_type VARCHAR(256) NOT NULL, + collection_subtype VARCHAR(256) NOT NULL, + collection_value VARCHAR(256), + PRIMARY KEY (collection_id) +)''', "drop table if exists collection"), + step("create or replace index fk_host_id on collection (fk_host_id)", + "drop index if exists fk_host_id on collection"), + step('''create or replace index generic_collection_index on collection (fk_host_id, collection_type, collection_subtype, last_update)''', + "drop index if exists generic_collection_index on collection"), + step('''create or replace index type_subtype_index + on collection (collection_type, collection_subtype)''', + "drop index if exists type_subtype_index on collection"), + step('''create or replace index collection_type_index + on collection (last_update, collection_type, collection_subtype);''', + "drop index if exists collection_type_index on collection"), + step('''create or replace index host_index + on collection (fk_host_id);''', + "drop index if exists host_index on collection"), + step('''create table audits ( + audit_id INT UNSIGNED NOT NULL AUTO_INCREMENT, + audit_name VARCHAR(64) NOT NULL, + /* Priorities should be between 0-10 but it will accept up to 255 */ + audit_priority TINYINT UNSIGNED NOT NULL DEFAULT 5, + audit_short_description VARCHAR(64) NOT NULL, + audit_long_description VARCHAR(512), + /* HTTP Link */ + audit_primary_link VARCHAR(64) NOT NULL, + /* Secondary Links stored as a BLOB */ + audit_secondary_links BLOB, + /* Collection Columns */ + audit_filters VARCHAR(512), + audit_comparison VARCHAR(512), + filename VARCHAR(512), + CONSTRAINT audit_name_unique UNIQUE (audit_name), + PRIMARY KEY (audit_id) )''', + "drop table if exists audits"), + step('''create table audits_by_host ( + audit_result_id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + fk_host_id INT UNSIGNED NOT NULL REFERENCES hosts(host_id), + fk_audits_id INT UNSIGNED NOT NULL REFERENCES audits(audit_id), + initial_audit TIMESTAMP NOT NULL default CURRENT_TIMESTAMP, + last_audit TIMESTAMP NOT NULL default '0000-00-00 00:00:00' on UPDATE CURRENT_TIMESTAMP, + bucket VARCHAR(64), + audit_result ENUM('pass','fail','notafflicted'), + audit_result_text VARCHAR(256), + PRIMARY KEY (audit_result_id))''', + "drop table if exists audits_by_host"), + step('''create or replace index audits_by_host_vector_index + on audits_by_host (fk_audits_id, fk_host_id, audit_result, bucket)''', + "drop index if exists audits_by_host_vector_index on audits_by_host"), + step('''create table audits_by_pop ( + pop_id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + pop_text VARCHAR(10) NOT NULL, + fk_audits_id INT UNSIGNED NOT NULL REFERENCES audits(audit_id), + pop_initial_audit TIMESTAMP NOT NULL default CURRENT_TIMESTAMP, + pop_last_audit TIMESTAMP NOT NULL default '0000-00-00 00:00:00' on UPDATE CURRENT_TIMESTAMP, + pop_passed BIGINT UNSIGNED NOT NULL, + pop_failed BIGINT UNSIGNED NOT NULL, + pop_exempt BIGINT UNSIGNED NOT NULL, + PRIMARY KEY(pop_id))''', + "drop table if exists audits_by_pop"), + step('''create or replace index pop_time_index + on audits_by_pop (fk_audits_id, pop_last_audit)''', + "drop index if exists pop_time_index on audits_by_pop"), + step('''create table audits_by_srvtype ( + srvtype_id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + srvtype_text VARCHAR(64) NOT NULL, + fk_audits_id INT UNSIGNED NOT NULL REFERENCES audits(audit_id), + srvtype_initial_audit TIMESTAMP NOT NULL default CURRENT_TIMESTAMP, + srvtype_last_audit TIMESTAMP NOT NULL default '0000-00-00 00:00:00' on UPDATE CURRENT_TIMESTAMP, + srvtype_passed BIGINT UNSIGNED NOT NULL, + srvtype_failed BIGINT UNSIGNED NOT NULL, + srvtype_exempt BIGINT UNSIGNED NOT NULL, + PRIMARY KEY(srvtype_id) )''', + "drop table if exists audits_by_srvtype"), + step('''create or replace index srvtype_time_index + on audits_by_srvtype (fk_audits_id, srvtype_last_audit)''', + "drop index if exists srvtype_time_index on audits_by_srvtype"), + step('''create table audits_by_acoll ( + acoll_id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + acoll_text varchar(64), + fk_audits_id INT UNSIGNED NOT NULL REFERENCES audits(audit_id), + acoll_initial_audit TIMESTAMP NOT NULL default CURRENT_TIMESTAMP, + acoll_last_audit TIMESTAMP NOT NULL default '0000-00-00 00:00:00' on UPDATE CURRENT_TIMESTAMP, + acoll_passed BIGINT UNSIGNED NOT NULL, + acoll_failed BIGINT UNSIGNED NOT NULL, + acoll_exempt BIGINT UNSIGNED NOT NULL, + PRIMARY KEY(acoll_id) )''', + "drop table if exists audits_by_acoll"), + step('''create or replace index acoll_time_index + on audits_by_acoll (fk_audits_id, acoll_last_audit);''', + "drop index if exists acoll_time_index on audits_by_acoll"), + step('''create table custdashboard ( + custdashboardid INT UNSIGNED NOT NULL AUTO_INCREMENT, + owner VARCHAR(64) NOT NULL, + dashboard_name VARCHAR(16), + dashboard_description MEDIUMTEXT NOT NULL, + CONSTRAINT dashboard_name UNIQUE(dashboard_name), + PRIMARY KEY (custdashboardid) )''', + "drop table if exists custdashboard"), + step('''create table custdashboardmembers ( + membershipid INT UNSIGNED NOT NULL AUTO_INCREMENT, + fk_custdashboardid INT UNSIGNED NOT NULL REFERENCES audits(audit_id), + fk_audits_id INT UNSIGNED NOT NULL REFERENCES audits(audit_id), + PRIMARY KEY (membershipid), + CONSTRAINT unique_combo UNIQUE(fk_custdashboardid, fk_audits_id))''', + "drop table if exists custdashboardmembers"), + step('''create table apiUsers ( + apiuid INT UNSIGNED NOT NULL AUTO_INCREMENT, + apiusername VARCHAR(64) NOT NULL, + apiuser_purpose TEXT, + CONSTRAINT apiusername UNIQUE (apiusername), + PRIMARY KEY (apiuid) )''', + "drop table if exists apiUsers"), + step('''create or replace index apiusername_index + on apiUsers (apiusername)''', + "drop index if exists apiusername_index on apiUsers"), + step('''create table apiActiveTokens ( + tokenid INT UNSIGNED NOT NULL AUTO_INCREMENT, + token VARCHAR(512) NOT NULL, + tokentype VARCHAR(12) NOT NULL, + fk_apikeyid VARCHAR(64) NOT NULL REFERENCES apiUsers(apikeyid), + token_issue_date TIMESTAMP NOT NULL default CURRENT_TIMESTAMP, + token_expire_date TIMESTAMP NOT NULL default '0000-00-00 00:00:00' on UPDATE CURRENT_TIMESTAMP, + salt INT UNSIGNED NOT NULL, + activated BOOL NOT NULL default true, + PRIMARY KEY (tokenid) )''', + "drop table if exists apiActiveTokens"), + step('''create or replace index token_by_userid + on apiActiveTokens(fk_apikeyid, token)''', + "drop index if exists token_by_userid on apiActiveTokens"), + step('''create table sapiActiveHosts ( + sapihost_record INT UNSIGNED NOT NULL AUTO_INCREMENT, + fk_host_id INT UNSIGNED NOT NULL REFERENCES hosts(host_id), + hostname VARCHAR(25) NOT NULL, + first_seen TIMESTAMP NOT NULL default CURRENT_TIMESTAMP, + last_updated TIMESTAMP NOT NULL default '0000-00-00 00:00:00' on UPDATE CURRENT_TIMESTAMP, + PRIMARY KEY (sapihost_record) )''', + "drop table if exists sapiActiveHosts"), + step('''create table collection_archive ( + collection_id BIGINT UNSIGNED NOT NULL, + fk_host_id INT UNSIGNED NOT NULL, + initial_update TIMESTAMP NOT NULL, + last_update TIMESTAMP NOT NULL, + collection_type VARCHAR(256) NOT NULL, + collection_subtype VARCHAR(256) NOT NULL, + collection_value VARCHAR(256), + PRIMARY KEY (collection_id) )''', + "drop table if exists collection_archive"), + step('''create table audits_by_acoll_archive ( + acoll_id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + acoll_text varchar(64), + fk_audits_id INT UNSIGNED NOT NULL REFERENCES audits(audit_id), + acoll_initial_audit TIMESTAMP NOT NULL default CURRENT_TIMESTAMP, + acoll_last_audit TIMESTAMP NOT NULL default '0000-00-00 00:00:00' on UPDATE CURRENT_TIMESTAMP, + acoll_passed BIGINT UNSIGNED NOT NULL, + acoll_failed BIGINT UNSIGNED NOT NULL, + acoll_exempt BIGINT UNSIGNED NOT NULL, + PRIMARY KEY(acoll_id) )''', + "drop table if exists audits_by_acoll_archive"), + step('''create table ip_intel( + ip_id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + ip_hex VARBINARY(16) NOT NULL, + fk_host_id INT UNSIGNED NOT NULL REFERENCES hosts(host_id), + guessed_type ENUM('vips4','vips6','host4','host6','drac4','drac6','netdev4','netdev6','unknown') NOT NULL, + first_seen TIMESTAMP NOT NULL default CURRENT_TIMESTAMP, + last_seen TIMESTAMP NOT NULL default CURRENT_TIMESTAMP on UPDATE CURRENT_TIMESTAMP, + CONSTRAINT ip_hex_unique UNIQUE (ip_hex, fk_host_id, guessed_type), + PRIMARY KEY (ip_id) )''', + "drop table if exists ip_intel"), + step('''create or replace index ip_intel_ip_hex + on ip_intel (ip_hex)''', + "drop index if exists ip_intel_ip_hex on ip_intel"), + step('''create or replace index ip_intel_host_to_iphex + on ip_intel (fk_host_id, ip_hex)''', + "drop index if exists ip_intel_host_to_iphex on ip_intel") +] diff --git a/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py b/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py new file mode 100644 index 0000000..28236c7 --- /dev/null +++ b/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python3 + +''' +Configuration of API Users +''' + +import os +import os.path +import logging + +from configparser import ConfigParser + +from yoyo import step + +__depends__ = {"20200107_01_Gv0ql-initialize"} + +# Permissions as they "should" be +_table_defs = {"manowar2.*" : {"api" : ["select"]}, + "manowar2.apiUsers": {"api" : ["insert", "update", "select", "delete"]}, + "manowar2.apiActiveTokens": {"api" : ["insert", "update", "select", "delete"]}, + "manowar2.custdashboard": {"api" : ["insert", "update", "select", "delete"]}, + "manowar2.custdashboardmembers": {"api" : ["insert", "update", "select", "delete"]}, + "manowar2.hosts": {"api" : ["insert", "update", "select", "delete"], + "store" : ["insert", "update", "select", "delete"], + "analyze" : ["select"] + }, + "manowar2.collection": {"api" : ["insert", "update", "select", "delete"], + "store" : ["insert", "update", "select", "delete"], + "analyze" : ["select"] + }, + "manowar2.collection_archive": {"analyze" : ["insert", "update", "select", "delete"], + "store" : ["insert", "update", "select", "delete"] + }, + "manowar2.audits_by_acoll_archive": {"analyze" : ["insert", "update", "select", "delete"], + "store" : ["insert", "update", "select", "delete"] + }, + "manowar2.sapiActiveHosts": {"api" : ["insert", "update", "select", "delete"], + "store" : ["insert", "update", "select", "delete"] + }, + "manowar2.ip_intel": {"api" : ["insert", "update", "select", "delete"]}, + "manowar2.audits": {"analyze" : ["insert", "update", "select", "delete"]}, + "manowar2.audits_by_host": {"analyze" : ["insert", "update", "select", "delete"]}, + "manowar2.audits_by_pop": {"analyze" : ["insert", "update", "select", "delete"]}, + "manowar2.audits_by_srvtype": {"analyze" : ["insert", "update", "select", "delete"]}, + "manowar2.audits_by_acoll": {"analyze" : ["insert", "update", "select", "delete"]} + } + + +# Manowar API User manowar_api +# Lets find the yoyo.ini file + +logger = logging.getLogger("yoyo-credentials Step") + +logger.info("Finding Configuration File yoyo.ini") + +do_api_attempt = True +config_file = None + +# User Data +username = None +hostmask = None +req_enc = None +password = None + +steps = list() + +for default_file in ("../yoyo.ini", "./yoyo.ini"): + if os.path.isfile(default_file) and os.access(default_file, os.R_OK): + logger.debug("Using Default File : {}".format(default_file)) + config_file = default_file + break + +if config_file is None: + logger.warning("Yoyo applied without a configuration file not attempting to update credential stuff.") + raise ValueError("No yoyo configuration file given.") +else: + # Load Configuration + try: + # Read Our INI with our data collection rules + config = ConfigParser() + config.read(config_file) + except Exception as general_config_error: + logger.warning("Couldn't read configuration file {} properly. Not doing api credential stuff.") + raise general_config_error + else: + # DB Config Items + yoyo_config = dict(config._sections) + + for user_type in ["api", "store", "analyze"]: + + if yoyo_config.get("manowar", dict()).get("{}_password".format(user_type), None) is None: + # We not doing it + do_api_attempt = False + else: + username = yoyo_config.get("manowar", dict()).get("{}_username".format(user_type), None) + hostmask = yoyo_config.get("manowar", dict()).get("{}_hostmask".format(user_type), None) + password = yoyo_config.get("manowar", dict()).get("{}_password".format(user_type), None) + + req_enc = yoyo_config.get("manowar", dict()).get("req_enc", "SSL") + + + this_u = '"{0}"@"{1}"'.format(username, hostmask) + + steps.append(step("create or replace user {0} identified by \"{1}\"".format(this_u, + password), + "drop user {}".format(this_u))) + + if req_enc in ("SSL", "NONE", "X509"): + steps.append(step("alter user {0} REQUIRE {1}".format(this_u, + req_enc))) + else: + raise ValueError("req_enc not set to valid entry") + + # Now Add Permissions + for this_table in _table_defs.keys(): + if user_type in _table_defs[this_table].keys(): + logger.info("Adding rights to {} for {}".format(this_table, user_type)) + + for right in _table_defs[this_table][user_type]: + + steps.append(step("grant {} on {} to {}".format(right, + this_table, + this_u))) + else: + logger.debug("User {} Not granted access to {}".format(user_type, this_table)) + + finally: + pass diff --git a/yoyo_steps/yoyo.ini.sample b/yoyo_steps/yoyo.ini.sample new file mode 100644 index 0000000..4d03b78 --- /dev/null +++ b/yoyo_steps/yoyo.ini.sample @@ -0,0 +1,46 @@ +[DEFAULT] + +# +# Sample Yoyo Configuration file +# Big thing is to replace the database parameter with +# the location of your databasredentials +# + +sources = %(here)s/migrations + +# Target Database +database = mysql://administrator:administrator@localhost/manowar2 + +# Verbosity +verbosity = 3 + +# Batch Mode +batch_mode = on + +[manowar] +# Require an Encrypted connection. Useful if using TLS encryption in transit +# with an RDS instance as you can force an encrypted connection +# Options are NONE, SSL or X509 you'll have to do custom work to make +# Certificate auth work (or just request it and I'll hack on it) +req_enc = SSL + +# User for API that does most of everything nowadays. +# In the future almost everything should go through this +# and the anayze user +api_username = manowar +#api_password = replaceme +api_hostmask = % + +# This is for the old SSH collector system. In the future +# This should get replaced with either an SSH collector that +# uses the api or manowar_agent directly +store_username = manowar_store +#store_password = replaceme +store_hostmask = % + +# User for the analyze audit processes +# Seperates out the two for better splitting of things. +# Mainly read +analyze_username = manowar_store +#analyze_password = replaceme +analyze_hostmask = % From 22710a85aaafe68ab34471681e11aadeee2030ec Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 12:20:44 -0800 Subject: [PATCH 013/143] Travis upgrade to mdb 10.4 --- travis/pre_build.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/travis/pre_build.sh b/travis/pre_build.sh index 81f18f9..b211a1c 100755 --- a/travis/pre_build.sh +++ b/travis/pre_build.sh @@ -8,7 +8,7 @@ echo -e "Enabling Trusty Backports for Shellcheck" echo -e "deb http://archive.ubuntu.com/ubuntu trusty-backports main restricted universe multiverse" | sudo tee /etc/apt/sources.list.d/backports.list -echo -e "Enabling Mariadb 10.2 Repo" +echo -e "Enabling Mariadb 10.4 Repo" sudo apt-get install software-properties-common From 11988dc13ec508041175a7d0f282cd1e04363225 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 12:21:47 -0800 Subject: [PATCH 014/143] New Audits ui chagnes New Audits changing ui.py to use a .yaml file and not an .ini file. --- .../audittools.d/ubuntu_usn/USN-4230-1.json | 126 ++++++++++++ .../audittools.d/ubuntu_usn/USN-4231-1.json | 180 ++++++++++++++++++ .../audittools.d/ubuntu_usn/USN-4232-1.json | 61 ++++++ ui.py | 28 +-- 4 files changed, 373 insertions(+), 22 deletions(-) create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4230-1.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4231-1.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4232-1.json diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4230-1.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4230-1.json new file mode 100644 index 0000000..f304a72 --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4230-1.json @@ -0,0 +1,126 @@ +{ + "USN-4230-1": { + "comparisons": { + "bionic-bucket": { + "comparison-collection-subtype": [ + "clamav" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "0.102.1+dfsg-0ubuntu0.18.04.2" + ] + }, + "disco-bucket": { + "comparison-collection-subtype": [ + "clamav" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "0.102.1+dfsg-0ubuntu0.19.04.2" + ] + }, + "eoan-bucket": { + "comparison-collection-subtype": [ + "clamav" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "0.102.1+dfsg-0ubuntu0.19.10.2" + ] + }, + "xenial-bucket": { + "comparison-collection-subtype": [ + "clamav" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "0.102.1+dfsg-0ubuntu0.16.04.2" + ] + } + }, + "filters": { + "bionic-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "bionic" + ] + }, + "disco-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "disco" + ] + }, + "eoan-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "eoan" + ] + }, + "xenial-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "xenial" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-15961_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-15961.html", + "USN-4230-1": "https://usn.ubuntu.com/4230-1/" + }, + "vuln-long-description": "It was discovered that ClamAV incorrectly handled certain MIME messages. A\nremote attacker could possibly use this issue to cause ClamAV to crash,\nresulting in a denial of service.\n\n\nTLDR: This update uses a new upstream release, which includes additional bug\nfixes. In general, a standard system update will make all the necessary\nchanges.\n\n", + "vuln-name": "USN-4230-1", + "vuln-primary-link": "https://usn.ubuntu.com/4230-1/", + "vuln-priority": 5, + "vuln-short-description": "ClamAV could be made to crash if it opened a specially crafted file.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4231-1.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4231-1.json new file mode 100644 index 0000000..5033ebf --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4231-1.json @@ -0,0 +1,180 @@ +{ + "USN-4231-1": { + "comparisons": { + "bionic-bucket": { + "comparison-collection-subtype": [ + "libnss3" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "2:3.35-2ubuntu2.7" + ] + }, + "disco-bucket": { + "comparison-collection-subtype": [ + "libnss3" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "2:3.42-1ubuntu2.5" + ] + }, + "eoan-bucket": { + "comparison-collection-subtype": [ + "libnss3" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "2:3.45-1ubuntu2.2" + ] + }, + "precise-bucket": { + "comparison-collection-subtype": [ + "libnss3" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "2:3.28.4-0ubuntu0.12.04.7" + ] + }, + "trusty-bucket": { + "comparison-collection-subtype": [ + "libnss3" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "2:3.28.4-0ubuntu0.14.04.5+esm4" + ] + }, + "xenial-bucket": { + "comparison-collection-subtype": [ + "libnss3" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "2:3.28.4-0ubuntu0.16.04.10" + ] + } + }, + "filters": { + "bionic-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "bionic" + ] + }, + "disco-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "disco" + ] + }, + "eoan-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "eoan" + ] + }, + "precise-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "precise" + ] + }, + "trusty-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "trusty" + ] + }, + "xenial-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "xenial" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-17006_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17006.html", + "USN-4231-1": "https://usn.ubuntu.com/4231-1/" + }, + "vuln-long-description": "It was discovered that NSS incorrectly handled certain inputs. An\nattacker could possibly use this issue to execute arbitrary code.\n\n\nTLDR: After a standard system update you need to reboot your computer to make\nall the necessary changes.\n\n", + "vuln-name": "USN-4231-1", + "vuln-primary-link": "https://usn.ubuntu.com/4231-1/", + "vuln-priority": 5, + "vuln-short-description": "NSS could be made to execute arbitrary code if it received a specially\ncrafted input.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4232-1.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4232-1.json new file mode 100644 index 0000000..dd843a8 --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4232-1.json @@ -0,0 +1,61 @@ +{ + "USN-4232-1": { + "comparisons": { + "xenial-bucket": { + "comparison-collection-subtype": [ + "graphicsmagick", + "libgraphicsmagick-q16-3", + "libgraphicsmagick++-q16-12" + ], + "comparison-collection-type": [ + "packages", + "packages", + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1.3.23-1ubuntu0.4", + "1.3.23-1ubuntu0.4", + "1.3.23-1ubuntu0.4" + ] + } + }, + "filters": { + "xenial-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "xenial" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2017-14165_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-14165.html", + "CVE-2017-14314_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-14314.html", + "CVE-2017-14504_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-14504.html", + "CVE-2017-14649_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-14649.html", + "CVE-2017-14733_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-14733.html", + "CVE-2017-14994_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-14994.html", + "CVE-2017-14997_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-14997.html", + "CVE-2017-15277_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-15277.html", + "CVE-2017-15930_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-15930.html", + "CVE-2017-16352_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-16352.html", + "CVE-2017-16353_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2017/CVE-2017-16353.html", + "USN-4232-1": "https://usn.ubuntu.com/4232-1/" + }, + "vuln-long-description": "It was discovered that GraphicsMagick incorrectly handled certain image files.\nAn attacker could possibly use this issue to cause a denial of service or other\nunspecified impact.\n\n\nTLDR: In general, a standard system update will make all the necessary changes.\n\n", + "vuln-name": "USN-4232-1", + "vuln-primary-link": "https://usn.ubuntu.com/4232-1/", + "vuln-priority": 5, + "vuln-short-description": "Several security issues were fixed in GraphicsMagick.\n" + } +} \ No newline at end of file diff --git a/ui.py b/ui.py index cb5fdc3..07b537a 100755 --- a/ui.py +++ b/ui.py @@ -19,6 +19,7 @@ from flask import Flask, current_app, g, request, render_template, abort from flask_cors import CORS, cross_origin import pymysql +import yaml from tokenmgmt import validate_key from canonical_cve import shuttlefish @@ -64,28 +65,11 @@ def ui(CONFIG, FDEBUG): logger = logging.getLogger("ui.ui") try: - # Read Our INI with our data collection rules - config = ConfigParser() - config.read(CONFIG) - # Debug - #for i in config : - #for key in config[i] : - #print (i, "-", key, ":", config[i][key]) - except Exception as e: # pylint: disable=broad-except, invalid-name - sys.exit('Bad configuration file {}'.format(e)) - - - - # Grab me Collections Items Turn them into a Dictionary - config_items = dict() - - # Collection Items - for section in config: - config_items[section] = dict() - for item in config[section]: - config_items[section][item] = config[section][item] - - + with open(CONFIG) as yaml_config: + config_items = yaml.safe_load(yaml_config) + except Exception as yaml_error: # pylint: disable=broad-except, invalid-name + logger.debug("Error when reading yaml config {} ".format(yaml_error)) + sys.exit("Bad configuration file {}".format(CONFIG)) logger.debug("Configuration Items: {}".format(config_items)) From 362b1bf5b8da7656f7d6f3784ce4e216d6fb8cce Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 12:27:11 -0800 Subject: [PATCH 015/143] Moving from ui.ini to ui.yaml --- travis/artifacts/ui.ini | 26 -------------------------- travis/artifacts/ui.yaml | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+), 26 deletions(-) delete mode 100644 travis/artifacts/ui.ini create mode 100644 travis/artifacts/ui.yaml diff --git a/travis/artifacts/ui.ini b/travis/artifacts/ui.ini deleted file mode 100644 index 8b0f3df..0000000 --- a/travis/artifacts/ui.ini +++ /dev/null @@ -1,26 +0,0 @@ -[GLOBAL] - -; Default -jellyfish_version=2 - -[webserver] -bindaddress=127.0.0.1 -accesslink=http://localhost:5000 -port=5000 - -[database] -dbhostname=localhost -dbusername=jellyfish_ui -dbpassword=travis_ui -dbport=3306 -dbdb=jellyfish2 - -[v2api] -root=/v2 -preroot=/jellyfish -cachelocation=/home/travis/build/chalbersma/manowar/travis/cache - -[v2ui] -root=/display/v2 -preroot=/jellyfish - diff --git a/travis/artifacts/ui.yaml b/travis/artifacts/ui.yaml new file mode 100644 index 0000000..181d974 --- /dev/null +++ b/travis/artifacts/ui.yaml @@ -0,0 +1,33 @@ +--- +global: + jellyfish_version: 4 +webserver: + bindaddress: 127.0.0.1 + accesslink: http://localhost:5000 + port: 5000 +database: + dbhostname: localhost + dbusername: manowar + dbpassword: longshittypassword_api + dbport: 3306 + dbdb: manowar2 +v2api: + root: /v2 + preroot: /jellyfish + cachelocation: /opt/manowar/cache + cachelocation: /home/travis/build/chalbersma/manowar/travis/cache +v2ui: + root: /display/v2 + preroot: /jellyfish +#tokenmgmt: +# vt_types: +# - sapi +# - robot +# - ipintel +#sapi: +# collection_config: /etc/manowar/collector.ini +# puthost_schema: /etc/manowar/jellyfish_storage.json.schema +# extendhost_schema: /etc/manowar/jellyfish_extension_schema.json.schema +# storageconfig: /etc/manowar/storage.ini +# storagemaxchars: 255 + From 8976867de1d1f07afcf838fc9db42898e1215891 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 12:34:08 -0800 Subject: [PATCH 016/143] Travis Using Yoyo --- travis/db_setup.sh | 24 ++++++------------------ 1 file changed, 6 insertions(+), 18 deletions(-) diff --git a/travis/db_setup.sh b/travis/db_setup.sh index 532ca61..c21f548 100755 --- a/travis/db_setup.sh +++ b/travis/db_setup.sh @@ -6,26 +6,14 @@ set -x sudo mysql -u root -e "show full processlist;" -sudo bash -c "mysql -u root < setup/jellyfish2_db_schema.sql" - +sudo bash -c "mysql -u root < echo create database manowar2" schema_success=$? -if [[ "${schema_success}" -eq 0 ]] ; then - echo "DB Schema Successfully setup. ${schema_success}" -else - echo "DB Schema has an issue, please investigate. ${schema_success}" - exit 1 -fi - -## Setup DB Users with Sample Passwords +echo -e "Copying Yoyo Travis Config" +cp -v travis/artifacts/yoyo.ini yoyo_migrations/ -sudo bash -c "mysql -u root < travis/artifacts/travis_sql_users.sql" +echo -e "Using Yoyo Travis Configs" -users_success=$? +cd yoyo_migrations -if [[ "${users_success}" -eq 0 ]] ; then - echo "DB USers Setup Successfully" -else - echo "DB User Setup failed." - exit 1 -fi +yoyo apply From 0749a1ca7f6330e1fc967eb928dcab68608e4122 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 12:36:45 -0800 Subject: [PATCH 017/143] Various Travis Fixes. --- .travis.yml | 2 +- travis/api_test.sh | 2 +- travis/sapi_test.sh | 16 +--------------- 3 files changed, 3 insertions(+), 17 deletions(-) diff --git a/.travis.yml b/.travis.yml index dc5fd00..dd10b16 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,9 +12,9 @@ notifications: on_failure: always before_script: - "./travis/pre_build.sh" -- "./travis/ssh_setup.sh" - export DEBIAN_FRONTEND=noninteractive; sudo -E apt-get -y install python3-pip python-dev shellcheck mariadb-server mariadb-client jq openjdk-7-jre graphviz openssh-server +- "./travis/ssh_setup.sh" - "./travis/db_setup.sh" - sudo pip3 install --upgrade setuptools - sudo pip3 install -r requirements.txt diff --git a/travis/api_test.sh b/travis/api_test.sh index 0f782f8..5064a34 100755 --- a/travis/api_test.sh +++ b/travis/api_test.sh @@ -3,7 +3,7 @@ set -x # Run Scheduler Test -./ui.py -d -c ./travis/artifacts/ui.ini > /home/travis/ui.log & +./ui.py -d -c ./travis/artifacts/ui.yaml > /home/travis/ui.log & uipid=$! diff --git a/travis/sapi_test.sh b/travis/sapi_test.sh index e855021..bae5857 100755 --- a/travis/sapi_test.sh +++ b/travis/sapi_test.sh @@ -2,18 +2,4 @@ set -x -# Run Scheduler Test -./storageAPI.py -d -c ./travis/artifacts/storageAPI.ini > /home/travis/sapi.log & - -sapipid=$! - -ps "${sapipid}" &> /dev/null - -running=$? - -if [[ ${running} -eq 0 ]] ; then - echo "Jellyfish SAPI 2 - UI running with pid ${sapipid}" -else - echo "Jellyfish SAPI 2 - UI Not Running. Test Failed" - exit 1 -fi +echo -e "Storage API is Depreciated" From dea19897bf45620359572db43ffcb48599cec496 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 12:37:50 -0800 Subject: [PATCH 018/143] Bumping jre to 8 for xenail build. --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index dd10b16..dd0b2d3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,7 +13,7 @@ notifications: before_script: - "./travis/pre_build.sh" - export DEBIAN_FRONTEND=noninteractive; sudo -E apt-get -y install python3-pip python-dev - shellcheck mariadb-server mariadb-client jq openjdk-7-jre graphviz openssh-server + shellcheck mariadb-server mariadb-client jq openjdk-8-jre graphviz openssh-server - "./travis/ssh_setup.sh" - "./travis/db_setup.sh" - sudo pip3 install --upgrade setuptools From f4cd2ee7151817a514d242d1907554698a5b28f9 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 13:21:58 -0800 Subject: [PATCH 019/143] travis checks --- travis/db_setup.sh | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/travis/db_setup.sh b/travis/db_setup.sh index c21f548..1c8b1e9 100755 --- a/travis/db_setup.sh +++ b/travis/db_setup.sh @@ -10,7 +10,10 @@ sudo bash -c "mysql -u root < echo create database manowar2" schema_success=$? echo -e "Copying Yoyo Travis Config" -cp -v travis/artifacts/yoyo.ini yoyo_migrations/ + +ls -l + +cp -v ./travis/artifacts/yoyo.ini ./yoyo_migrations/ echo -e "Using Yoyo Travis Configs" From f436c0bbb03895d66da3f6f0a4b8a1f20e57591d Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 13:26:01 -0800 Subject: [PATCH 020/143] yoyo_migrations != yoyo_steps --- travis/db_setup.sh | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/travis/db_setup.sh b/travis/db_setup.sh index 1c8b1e9..0874ce2 100755 --- a/travis/db_setup.sh +++ b/travis/db_setup.sh @@ -11,12 +11,10 @@ schema_success=$? echo -e "Copying Yoyo Travis Config" -ls -l - -cp -v ./travis/artifacts/yoyo.ini ./yoyo_migrations/ +cp -v ./travis/artifacts/yoyo.ini ./yoyo_steps/ echo -e "Using Yoyo Travis Configs" -cd yoyo_migrations +cd ./yoyo_steps yoyo apply From 38688eae40d7ea65ab8ba22978257400de5d2449 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 13:30:37 -0800 Subject: [PATCH 021/143] more travis tshooting --- travis/db_setup.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/travis/db_setup.sh b/travis/db_setup.sh index 0874ce2..d3c2cf8 100755 --- a/travis/db_setup.sh +++ b/travis/db_setup.sh @@ -11,6 +11,8 @@ schema_success=$? echo -e "Copying Yoyo Travis Config" +file ./travis/artifacts/yoyo.ini + cp -v ./travis/artifacts/yoyo.ini ./yoyo_steps/ echo -e "Using Yoyo Travis Configs" From e93aa008c75221dc949822957c5b6a472df31221 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 13:35:25 -0800 Subject: [PATCH 022/143] adding travis yoyo configuration --- travis/artifacts/yoyo.ini | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 travis/artifacts/yoyo.ini diff --git a/travis/artifacts/yoyo.ini b/travis/artifacts/yoyo.ini new file mode 100644 index 0000000..ab5d628 --- /dev/null +++ b/travis/artifacts/yoyo.ini @@ -0,0 +1,37 @@ +[DEFAULT] + +sources = %(here)s/migrations + +# Target Database +# Database needs to be created +database = mysql://root@localhost/manowar2?unix_socket=/tmp/mysql.sock + +# Verbosity +verbosity = 3 + +# Batch Mode +batch_mode = on + +[manowar] +req_enc = NONE + +# User for API that does most of everything nowadays. +# In the future almost everything should go through this +# and the anayze user +api_username = manowar +api_password = longshittypassword_api +api_hostmask = % + +# This is for the old SSH collector system. In the future +# This should get replaced with either an SSH collector that +# uses the api or manowar_agent directly +store_username = manowar_store +store_password = longshittypassword_store +store_hostmask = % + +# User for the analyze audit processes +# Seperates out the two for better splitting of things. +# Mainly read +analyze_username = manowar_analyze +analyze_password = longshittypassword_analyze +analyze_hostmask = % From ccef5af6c71a78c710cf3e330752abab683780c7 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 13:41:00 -0800 Subject: [PATCH 023/143] yoyo mysql.sock --- travis/artifacts/yoyo.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/travis/artifacts/yoyo.ini b/travis/artifacts/yoyo.ini index ab5d628..074524b 100644 --- a/travis/artifacts/yoyo.ini +++ b/travis/artifacts/yoyo.ini @@ -4,7 +4,7 @@ sources = %(here)s/migrations # Target Database # Database needs to be created -database = mysql://root@localhost/manowar2?unix_socket=/tmp/mysql.sock +database = mysql://root@localhost/manowar2?unix_socket=/var/run/mysqld/mysql.sock # Verbosity verbosity = 3 From 2c84affc462f26f30a2bfd8c2616663090fd011f Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 13:48:11 -0800 Subject: [PATCH 024/143] db_setup.sh --- collector.py | 2 +- travis/artifacts/yoyo.ini | 2 +- travis/db_setup.sh | 4 +++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/collector.py b/collector.py index 41d21ef..3ced31b 100755 --- a/collector.py +++ b/collector.py @@ -85,7 +85,7 @@ # Set paramiko_host -def collector(HOST, CONFIG, USERNAME, KEYFILE, POP, SRVTYPE, UBERID, STATUS, IPV4, IPV6): +def collector(HOST, CONFIG, USERNAME, KEYFILE, POP, SRVTYPE, UBERID, STATUS, IPV4, IPV6, **kwargs): ''' Main Function for Collector. Calling this with the right variable should allow you to utilize diff --git a/travis/artifacts/yoyo.ini b/travis/artifacts/yoyo.ini index 074524b..1661aeb 100644 --- a/travis/artifacts/yoyo.ini +++ b/travis/artifacts/yoyo.ini @@ -4,7 +4,7 @@ sources = %(here)s/migrations # Target Database # Database needs to be created -database = mysql://root@localhost/manowar2?unix_socket=/var/run/mysqld/mysql.sock +database = mysql://root@localhost/manowar2?unix_socket=/var/run/mysqld/mysqld.sock # Verbosity verbosity = 3 diff --git a/travis/db_setup.sh b/travis/db_setup.sh index d3c2cf8..b02a425 100755 --- a/travis/db_setup.sh +++ b/travis/db_setup.sh @@ -3,14 +3,16 @@ set -x ## Setup DB Schema - sudo mysql -u root -e "show full processlist;" +sudo mysql -u root -e "shwo variables;" sudo bash -c "mysql -u root < echo create database manowar2" schema_success=$? echo -e "Copying Yoyo Travis Config" +ls -l /var/run/mysqld/ + file ./travis/artifacts/yoyo.ini cp -v ./travis/artifacts/yoyo.ini ./yoyo_steps/ From 79f2d2e8f9a050a57443650596a515117894c105 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 13:51:15 -0800 Subject: [PATCH 025/143] db_setup.sh --- travis/db_setup.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/travis/db_setup.sh b/travis/db_setup.sh index b02a425..bf8f8b1 100755 --- a/travis/db_setup.sh +++ b/travis/db_setup.sh @@ -4,7 +4,7 @@ set -x ## Setup DB Schema sudo mysql -u root -e "show full processlist;" -sudo mysql -u root -e "shwo variables;" +sudo mysql -u root -e "show variables;" sudo bash -c "mysql -u root < echo create database manowar2" schema_success=$? From 26423ed2bb0a3fcaa7e00ed5f14d48698cc276de Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 14:02:14 -0800 Subject: [PATCH 026/143] have to have a db to work with. --- travis/db_setup.sh | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/travis/db_setup.sh b/travis/db_setup.sh index bf8f8b1..73b127e 100755 --- a/travis/db_setup.sh +++ b/travis/db_setup.sh @@ -3,10 +3,8 @@ set -x ## Setup DB Schema -sudo mysql -u root -e "show full processlist;" -sudo mysql -u root -e "show variables;" +sudo mysql -u root -e "create database manowar2;" -sudo bash -c "mysql -u root < echo create database manowar2" schema_success=$? echo -e "Copying Yoyo Travis Config" From c2d536c0f9a6e9a8305d30127650d88fa53e8463 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 14:20:03 -0800 Subject: [PATCH 027/143] Updating Build Definitions --- .travis.yml | 2 +- travis/artifacts/analyze.ini | 4 ++-- travis/artifacts/storage.ini | 6 +++--- travis/db_setup.sh | 2 ++ 4 files changed, 8 insertions(+), 6 deletions(-) diff --git a/.travis.yml b/.travis.yml index dd0b2d3..36c4b2f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -18,7 +18,7 @@ before_script: - "./travis/db_setup.sh" - sudo pip3 install --upgrade setuptools - sudo pip3 install -r requirements.txt -- sudo pip3 install mkdocs +- sudo pip3 install mkdocs bandit - gem install mdl script: - "./travis/testing.sh" diff --git a/travis/artifacts/analyze.ini b/travis/artifacts/analyze.ini index 6e6584c..80b092e 100644 --- a/travis/artifacts/analyze.ini +++ b/travis/artifacts/analyze.ini @@ -10,8 +10,8 @@ jellyfishversion = 2 ; !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ; Be sure to change the default db password before going to production! dbhostname=localhost -dbusername=jellyfish_analyze -dbpassword=travis_analyze +dbusername=manowar_analyze +dbpassword=longshittypassword_analyze dbport=3306 dbdb=jellyfish2 # How Many Chars the collection_[type,subtype,value] should store. By default first 64 (0:63 diff --git a/travis/artifacts/storage.ini b/travis/artifacts/storage.ini index 8fe88d6..4c04623 100644 --- a/travis/artifacts/storage.ini +++ b/travis/artifacts/storage.ini @@ -1,6 +1,6 @@ [GLOBAL] -; Default +; Default jellyfish_version=2 [database] @@ -8,8 +8,8 @@ jellyfish_version=2 ; !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ; Be sure to change the default db password before going to production! dbhostname=localhost -dbusername=jellyfish_store -dbpassword=travis_store +dbusername=manowar_store +dbpassword=longshittypassword_store dbport=3306 dbdb=jellyfish2 # How Many Chars the collection_[type,subtype,value] should store. By default first 64 (0:63 diff --git a/travis/db_setup.sh b/travis/db_setup.sh index 73b127e..b198079 100755 --- a/travis/db_setup.sh +++ b/travis/db_setup.sh @@ -20,3 +20,5 @@ echo -e "Using Yoyo Travis Configs" cd ./yoyo_steps yoyo apply + +yoyo showmigrations From a3850c65ad68ed99ab0169aaa651f0b018b2bf72 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 15:21:20 -0800 Subject: [PATCH 028/143] Updateing to fix some badnit errros related to default cache files. --- audittools/audits_usn.py | 79 ++++++++++++++++++++++++---------------- rss_creator.py | 19 ++++++++-- 2 files changed, 63 insertions(+), 35 deletions(-) diff --git a/audittools/audits_usn.py b/audittools/audits_usn.py index c14321b..ea2d4fc 100755 --- a/audittools/audits_usn.py +++ b/audittools/audits_usn.py @@ -13,6 +13,7 @@ import os.path import time import re +import tempfile import requests @@ -31,7 +32,7 @@ parser.add_argument("-v", "--verbose", action='append_const', help="Turn on Verbosity", const=1, default=[]) parser.add_argument("--nocache", action="store_true", help="Don't use local usn_db.json") parser.add_argument("--cacheage", default=21600, help="How long (in seconds) to accept local usn_db.json file default 6 hours 21600 seconds") - parser.add_argument("--cachefile", default="/tmp/usn_db.json", help="Location of Cachefile default /tmp/usn_db.json") + parser.add_argument("--cachefile", default=None, help="Use this if you want to cache the db.json between runs") parser.add_argument("-o", "--output", default=False, help="File to output to") parser.add_argument("-p", "--print", action="store_true", help="Print Audit to Screen") parser.add_argument("-u", "--usn", required=True) @@ -67,7 +68,7 @@ class AuditSourceUSN(AuditSource): __usn_regex = "[Uu][Ss][Nn]-\d{4}-\d{1}" __usn_url = "https://usn.ubuntu.com/usn-db/database.json" - __default_cachefile = "/tmp/usn_db.json" + __default_cachefile = None __default_cacheage = 21600 def __init__(self, **kwargs): @@ -77,6 +78,7 @@ def __init__(self, **kwargs): self.cachefile = kwargs.get("cachefile", self.__default_cachefile) self.cacheage = kwargs.get("cacheage", self.__default_cacheage) + self.cachedata = None # Confirm I have a USN if re.match(self.__usn_regex, self.source_key) is None: @@ -124,11 +126,15 @@ def get_usn_data(self): usn_num = "-".join(self.source_key.split("-")[1:]) - with open(self.cachefile) as cachefile_obj: - try: - all_data = json.load(cachefile_obj) - except Exception as json_fomat_error: - self.logger.error("JSON Formatting Error, Try removing Cache file.") + if self.cachedata is None: + with open(self.cachefile) as cachefile_obj: + try: + all_data = json.load(cachefile_obj) + except Exception as json_fomat_error: + self.logger.error("JSON Formatting Error, Try removing Cache file.") + else: + self.logger.debug("Cachedata not loaded from file, already have in memory.") + all_data = self.cachedata try: usn_data = all_data[usn_num] @@ -230,47 +236,58 @@ def handle_local_cache(self): get_file = False - if os.path.isfile(self.cachefile): - file_create_time = os.path.getmtime(self.cachefile) + if self.cachefile is not None: - time_left = file_create_time - (now - self.cacheage) + if os.path.isfile(self.cachefile): + file_create_time = os.path.getmtime(self.cachefile) - self.logger.info("File has {} Seconds before expiration.".format(time_left)) + time_left = file_create_time - (now - self.cacheage) - if time_left <= 0: - self.logger.info("File {} seconds {} too old. Pulling New Version.".format(abs(time_left), self.cachefile)) + self.logger.info("File has {} Seconds before expiration.".format(time_left)) - get_file = True + if time_left <= 0: + self.logger.info("File {} seconds {} too old. Pulling New Version.".format(abs(time_left), self.cachefile)) + get_file = True + + else: + self.logger.debug("File {} new enough. {} seconds left.".format(self.cachefile, time_left)) else: - self.logger.debug("File {} new enough. {} seconds left.".format(self.cachefile, time_left)) + self.logger.debug("File {} missing. Pulling it.".format(self.cachefile)) + get_file = True else: - self.logger.debug("File {} missing. Pulling it.".format(self.cachefile)) + self.logger.debug("Not cacheing results to disk.") get_file = True if get_file is True: - with open(self.cachefile, "wb") as new_cachefile: - try: - response = requests.get(self.__usn_url) - except Exception as get_json_error: - self.logger.error("Unable to Get usn db with error : {}".format(get_json_error)) - raise get_json_error + try: + response = requests.get(self.__usn_url) + except Exception as get_json_error: + self.logger.error("Unable to Get usn db with error : {}".format(get_json_error)) + raise get_json_error + else: + if response.status_code == requests.codes.ok: + self.logger.info("Writing new Cache File.") + + if self.cachefile is not None: + self.logger.info("Persistent Cache File Requested, utilizing") + with open(self.cachefile, "wb") as new_cachefile: + new_cachefile.write(response.content) + + self.cachedata = response.json() + else: - if response.status_code == requests.codes.ok: - self.logger.info("Writing new Cache File.") - new_cachefile.write(response.content) - else: - self.logger.error("Error getting DB. HTTP Response Code {}".format(response.status_code)) - raise ValueError("Response {} Recieved".format(respone.status_code)) - finally: - self.logger.debug("New Cache File Written.") + self.logger.error("Error getting DB. HTTP Response Code {}".format(response.status_code)) + raise ValueError("Response {} Recieved".format(respone.status_code)) + finally: + self.logger.debug("Upstream Data Acquired.") if __name__ == "__main__" : - my_usn = AuditSourceUSN(source_key=USN) + my_usn = AuditSourceUSN(source_key=USN, cachefile=CACHEFILE, cacheage=CACHEAGE) validated = my_usn.validate_audit_live() diff --git a/rss_creator.py b/rss_creator.py index ee5ad2e..f3d1ea0 100755 --- a/rss_creator.py +++ b/rss_creator.py @@ -1,11 +1,18 @@ #!/usr/bin/env python3 +''' +Given an RSS Feed of Vulnerability Intelligence +Utilize an audit source object and some generic regex/jq rules +to build and store an audit for that bit of intel. +''' + import argparse import os import os.path import logging import re import json +import sys import feedparser import pyjq @@ -20,6 +27,8 @@ "regex_obj_source_key" : r"(USN-\d{1,4}-\d{1,2})", "update_existing" : False, "audit_source_obj" : audittools.audits_usn.AuditSourceUSN, + "audit_source_kwargs" : {"cachefile" : "/tmp/usn_db.json", #nosec + "cacheage" : 21600}, "format" : "json" }, "rhsa" : {"url" : "https://linuxsecurity.com/advisories/red-hat?format=feed&type=rss", @@ -34,7 +43,7 @@ } -if __name__ == "__main__" : +if __name__ == "__main__": parser = argparse.ArgumentParser() #parser.add_argument("-v", "--verbose", action='store_true', help="Turn on Verbosity") parser.add_argument("-v", "--verbose", action='append_const', help="Turn on Verbosity", const=1, default=[]) @@ -110,6 +119,7 @@ def feed_create(feed_name, feed_config=None, basedir=None, confirm=False, max_au feed_obj = feedparser.parse(feed_config["url"]) except Exception as feed_read_error: logger.error("Unable to Read RSS Feed Returning Empty") + logger.debug("Feed Read Error : {}".format(feed_read_error)) feed_obj = {"entries" : list()} if len(feed_obj["entries"]) == 0: @@ -158,10 +168,11 @@ def feed_create(feed_name, feed_config=None, basedir=None, confirm=False, max_au as_kwargs = {"source_key" : best_source_key, "audit_filename" : "{}.{}".format(best_source_key, feed_config["format"]), - "audit_path" : this_path + "audit_path" : this_path, + **feed_config.get("audit_source_kwargs", dict()) } - as_args = list() + as_args = [*feed_config.get("audit_source_args", list())] try: @@ -199,7 +210,7 @@ def feed_create(feed_name, feed_config=None, basedir=None, confirm=False, max_au return audit_source_items -if __name__ == "__main__" : +if __name__ == "__main__": # Run the Thing results = feed_create(FEED, basedir=BASEDIR, confirm=CONFIRM, max_audit=MAX) From fe4ddd828642566af62c2b47dc45a2243fca6aae Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 15:36:18 -0800 Subject: [PATCH 029/143] More Bandit Fixes --- jelly_api_2/sapi_addtoken.py | 8 +++++--- travis/testing.sh | 10 +++++++++- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/jelly_api_2/sapi_addtoken.py b/jelly_api_2/sapi_addtoken.py index 4e954f8..945cd30 100644 --- a/jelly_api_2/sapi_addtoken.py +++ b/jelly_api_2/sapi_addtoken.py @@ -118,7 +118,7 @@ def api2_sapi_addtoken(user=None, validfor=7, tokentype="sapi"): pass else : argument_error = True - error_dict["bad_token_type"] = "Bad token type, either zero lenght, not a string or not in the validated list" + error_dict["bad_token_type"] = "Bad token type, either zero lenght, not a string or not in the validated list" #nosec if argument_error : do_query=False @@ -173,7 +173,9 @@ def api2_sapi_addtoken(user=None, validfor=7, tokentype="sapi"): # No Users Found new_token_args=[ tokentype, salt_value, key_value, retrieved_uid, validfor, salt_value ] - new_token_query="insert into apiActiveTokens (tokentype, token, fk_apikeyid, token_expire_date, salt) VALUES ( %s, SHA2(CONCAT(%s,%s),512) , %s, (NOW() + INTERVAL %s DAY), %s ) " + new_token_query='''insert into apiActiveTokens + (tokentype, token, fk_apikeyid, token_expire_date, salt) + VALUES( %s, SHA2(CONCAT(%s,%s),512) , %s, (NOW() + INTERVAL %s DAY), %s ) ''' #nosec # In the Future add Ticket Integration via ecbot (or ecbot like system) here. g.cur.execute(new_token_query, new_token_args) @@ -207,4 +209,4 @@ def api2_sapi_addtoken(user=None, validfor=7, tokentype="sapi"): return jsonify(**response_dict) - + diff --git a/travis/testing.sh b/travis/testing.sh index 2cc4952..44d2693 100755 --- a/travis/testing.sh +++ b/travis/testing.sh @@ -13,6 +13,7 @@ python_files=$(find . -type d -wholename ./jelly_api -prune -o \ -type d -wholename ./lib -prune -o \ -type f -regex ".*\.py$") +bandit_failure="pass" for file in ${python_files} ; do this_temp=$(mktemp /tmp/banditout.XXXXX) bandit "${file}" > "${this_temp}" @@ -20,7 +21,7 @@ for file in ${python_files} ; do if [[ ${this_file_good} -gt 0 ]] ; then echo -e "BANDIT: ${file} had issues please investigate." cat "${this_temp}" - exit 1 + failure="fail" else echo -e "BANDIT: ${file} good." fi @@ -35,6 +36,13 @@ for file in ${python_files} ; do done +if [[ $bandit_failure == "fail" ]] ; then + echo -e "Bandit Failures Detected" + exit 1 +else + echo -e "Bandit Checks Passed" +fi + bash_files=$(find . -type d -wholename ./lib -prune -o \ -type d -wholename ./setup -prune -o \ -type d -prune -o \ From 6fbacd30be0f2902be9a148a1cbabefcc067e637 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 15:44:01 -0800 Subject: [PATCH 030/143] Updating db names in travis --- travis/artifacts/analyze.ini | 2 +- travis/artifacts/collate.ini | 6 +++--- travis/artifacts/storage.ini | 2 +- travis/artifacts/storageAPI.ini | 26 -------------------------- 4 files changed, 5 insertions(+), 31 deletions(-) delete mode 100644 travis/artifacts/storageAPI.ini diff --git a/travis/artifacts/analyze.ini b/travis/artifacts/analyze.ini index 80b092e..7271f5c 100644 --- a/travis/artifacts/analyze.ini +++ b/travis/artifacts/analyze.ini @@ -13,7 +13,7 @@ dbhostname=localhost dbusername=manowar_analyze dbpassword=longshittypassword_analyze dbport=3306 -dbdb=jellyfish2 +dbdb=manowar2 # How Many Chars the collection_[type,subtype,value] should store. By default first 64 (0:63 collectionmaxchars=255 diff --git a/travis/artifacts/collate.ini b/travis/artifacts/collate.ini index a4f1fc4..78fb8a1 100644 --- a/travis/artifacts/collate.ini +++ b/travis/artifacts/collate.ini @@ -10,10 +10,10 @@ jellyfishversion = 2 ; !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ; Be sure to change the default db password before going to production! dbhostname=localhost -dbusername=jellyfish_analyze -dbpassword=travis_analyze +dbusername=manowar_analze +dbpassword=longshittypassword_analyze dbport=3306 -dbdb=jellyfish2 +dbdb=manowar2 # How Many Chars the collection_[type,subtype,value] should store. By default first 64 (0:63 collectionmaxchars=255 diff --git a/travis/artifacts/storage.ini b/travis/artifacts/storage.ini index 4c04623..74fdd84 100644 --- a/travis/artifacts/storage.ini +++ b/travis/artifacts/storage.ini @@ -11,7 +11,7 @@ dbhostname=localhost dbusername=manowar_store dbpassword=longshittypassword_store dbport=3306 -dbdb=jellyfish2 +dbdb=manowar2 # How Many Chars the collection_[type,subtype,value] should store. By default first 64 (0:63 collectionmaxchars=255 diff --git a/travis/artifacts/storageAPI.ini b/travis/artifacts/storageAPI.ini deleted file mode 100644 index 4965acf..0000000 --- a/travis/artifacts/storageAPI.ini +++ /dev/null @@ -1,26 +0,0 @@ -[GLOBAL] - -; Default -jellyfish_version=2 - -[webserver] -accesslink=http://localhost:5001 -port=5001 - -[database] -dbhostname=localhost -dbusername=jellyfish_storeapi -dbpassword=travis_storeapi -dbport=3306 -dbdb=jellyfish2 - -[verification] -# Json Schema Verification -json_schema_file=/home/travis/build/chalbersma/manowar/travis/artifacts/jellyfish_storage.json.schema - -[storage] -# Storage Configuration File -storeconfig=/home/travis/build/chalbersma/manowar/travis/artifacts/storage.ini - -[collections] -collection_config=/home/travis/build/chalbersma/manowar/travis/artifacts/collector.ini From 1987e121a1e45f202313ba1f4050349700c2dcca Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 15:57:11 -0800 Subject: [PATCH 031/143] Analyze and Scheudle test updates --- travis/analyze_test.sh | 2 +- travis/schedule_test.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/travis/analyze_test.sh b/travis/analyze_test.sh index 82d24b0..81d9300 100755 --- a/travis/analyze_test.sh +++ b/travis/analyze_test.sh @@ -3,7 +3,7 @@ set -x # Run Scheduler Test -./analyze.py -a travis/artifacts/audits.d -c travis/artifacts/analyze.ini -V +./analyze.py -a travis/artifacts/audits.d -c travis/artifacts/analyze.ini -vvv analyze_good=$? diff --git a/travis/schedule_test.sh b/travis/schedule_test.sh index 044770d..7603fb2 100755 --- a/travis/schedule_test.sh +++ b/travis/schedule_test.sh @@ -25,7 +25,7 @@ fi hostcollection_test="$(mktemp /tmp/hctest.XXXXX)" # Test that it landed properly. Grab the results for host #1 -sudo bash -c "mysql jellyfish2 -u root < travis/artifacts/test_hostcollection.sql > ${hostcollection_test} " +sudo bash -c "mysql manowar2 -u root < travis/artifacts/test_hostcollection.sql > ${hostcollection_test} " didit=$? @@ -50,7 +50,7 @@ fi ip_intel_test="$(mktemp /tmp/ipintel.XXXXX)" # Test that it landed properly. Grab the results for host #1 -sudo bash -c "mysql jellyfish2 -u root < travis/artifacts/test_ipintel.sql > ${ip_intel_test} " +sudo bash -c "mysql manowar2 -u root < travis/artifacts/test_ipintel.sql > ${ip_intel_test} " didit_ipintel=$? From a5ef6409a01224d99e38b815d4137cb4ac0da384 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 16:17:50 -0800 Subject: [PATCH 032/143] Fixing bug in collate.py where it included a retired module. --- collate.py | 2 +- source_docs/db_setup.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/collate.py b/collate.py index 3a721e4..827ed36 100755 --- a/collate.py +++ b/collate.py @@ -15,7 +15,7 @@ import pymysql import json import re -import apt_pkg +#import apt_pkg from copy import deepcopy from time import time from time import sleep diff --git a/source_docs/db_setup.md b/source_docs/db_setup.md index ed29684..cf6876a 100644 --- a/source_docs/db_setup.md +++ b/source_docs/db_setup.md @@ -4,7 +4,7 @@ Setting up the database has been reworked to utilize [yoyo](https://marcosschroh.github.io/yoyo-database-migrations/). You need a -modern mariadb Database (10.x and higher) available to you named `manowar2` and +modern mariadb Database (10.2 and higher) available to you named `manowar2` and and administraive user. Then in the `yoyo_steps` directory you need to create a `yoyo.ini` file (see the From 9d956b1d0d7933a8a812bf5e5fa5c20157c8b727 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Wed, 8 Jan 2020 16:30:48 -0800 Subject: [PATCH 033/143] Spelling man --- travis/artifacts/collate.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/travis/artifacts/collate.ini b/travis/artifacts/collate.ini index 78fb8a1..4363e90 100644 --- a/travis/artifacts/collate.ini +++ b/travis/artifacts/collate.ini @@ -10,7 +10,7 @@ jellyfishversion = 2 ; !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ; Be sure to change the default db password before going to production! dbhostname=localhost -dbusername=manowar_analze +dbusername=manowar_analyze dbpassword=longshittypassword_analyze dbport=3306 dbdb=manowar2 From 3b729b8df9ac607b00c1394ee4942675e49a5e60 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Thu, 9 Jan 2020 16:37:26 -0800 Subject: [PATCH 034/143] DB_Helper Function * Designed to give a DB connection that will be able to support all the things. --- db_helper.py | 94 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 94 insertions(+) create mode 100644 db_helper.py diff --git a/db_helper.py b/db_helper.py new file mode 100644 index 0000000..ae2c993 --- /dev/null +++ b/db_helper.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python3 + +''' +db_helper.py + +Provides common db functions for the project to standardize capabilities. +Idea is that you get a config that looks like (with an optional prefix) + +dbhostname: hostname +dbport: port +dbuser: user +dbpassword: password +dbname: db +autocommit: bool +charset: string (default: utf8) + + +''' + +import pymysql +import pyjq +import logging + +def get_conn(config, prefix=None, tojq=None, **kwargs): + + # Given a Config Dictionary with an optional prefix and tojq + # Pull the Data Out and Connect to the Database + + logger = logging.getLogger("db_helper.py") + + if isinstance(config, dict) is False: + raise TypeError("config is not a dictionary") + + dbconfig = None + + if tojq is not None and isinstance(tojq, str): + + try: + dbconfig = pyjq.first(tojq, config) + except Exception as jq_error: + logger.error("Unable to find config at jq rule : {}".format(tojq)) + logger.info("Error : {}".format(jq_error)) + + raise jq_error + else: + logger.debug("Successfully tojq'ed this configuration.") + else: + dbconfig = config + + if isinstance(dbconfig, dict) is False: + logger.error("DBConfig isn't here!") + raise TypeError("Problem reading Database Information") + + pymysql_args = {"host" : dbconfig["{}dbhostname".format(prefix)], + "port" : int(dbconfig["{}dbport".format(prefix)]), + "user" : dbconfig["{}dbusername".format(prefix)], + "password" : dbconfig["{}dbpassword".format(prefix)], + "database" : dbconfig["{}dbdb".format(prefix)], + "autocommit" : dbconfig.get("{}autocommit".format(prefix), kwargs.get("ac_def", True)), + "charset" : dbconfig.get("{}charset".format(prefix), kwargs.get("char_def", "utf8mb4"))} + + if dbconfig.get("{}ssl", False) is True: + pymysql_args["ssl"] = dict() + + ca = dbconfig.get("{}dbsslca".format(prefix), None) + capath = dbconfig.get("{}dbsslcapath".format(prefix), None) + cert = dbconfig.get("{}dbsslcert".format(prefix), None) + key = dbconfig.get("{}dbsslkey".format(prefix), None) + cipher = dbconfig.get("{}dbsslcipher".format(prefix), None) + + if ca is not None: + pymysql_args["ssl"]["ca"] = ca + if capath is not None: + pymysql_args["ssl"]["capath"] = capath + if cert is not None: + pymysql_args["ssl"]["capath"] = cert + if key is not None: + pymysql_args["ssl"]["capath"] = key + if cipher is not None: + pymysql_args["ssl"]["capath"] = cipher + + try: + + db_conn = pymysql.connect(**pymysql_args) + + logger.info("Connected to {user}@{host}:{port}/{database}".format(**pymysql_args)) + + except Exception as connection_error: + logger.warning("Connection Failed to {user}@{host}:{port}/{database}".format(**pymysql_args)) + logger.debug("Error {}".format(connection_error)) + + raise connection_error + + return db_conn From 3a66d6f713f1be97eac3b9fa22231e1b78988006 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Thu, 9 Jan 2020 16:38:38 -0800 Subject: [PATCH 035/143] Analyze Refactoring * Semirafactored analyze to utilize the same config file that ui.py does. * Removed a bunch of unused code * Using the new shared db function --- analyze.py | 585 ++++++++++++++++++++--------------------------------- 1 file changed, 222 insertions(+), 363 deletions(-) diff --git a/analyze.py b/analyze.py index cb188d7..a42b74b 100755 --- a/analyze.py +++ b/analyze.py @@ -7,7 +7,6 @@ # Run through Analysis import os - import ast import argparse from configparser import ConfigParser @@ -20,23 +19,27 @@ from time import time from time import sleep import logging - +import sys import threading import multiprocessing # Yes I use both! from queue import Queue +import yaml + # Analyze Specific from generic_large_compare import generic_large_compare from generic_large_analysis_store import generic_large_analysis_store from subtype_large_compare import subtype_large_compare import audittools +import db_helper + if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument("-a", "--auditdir", help="Directory that Contains the audits", required=True, action='append') - parser.add_argument("-c", "--config", help="Main analyze.ini file", required=True) + parser.add_argument("-a", "--auditdir", help="Directory that Contains the audits", required=False, action='append') + parser.add_argument("-c", "--config", help="Main analyze.ini file", required=False, default=False) parser.add_argument("-v", "--verbose", action='append_const', help="Turn on Verbosity", const=1, default=[]) parser._optionals.title = "DESCRIPTION " @@ -53,8 +56,10 @@ else : CONFIGDIR.append(thisdir) - # - CONFIG=args.config + if list(CONFIGDIR) == 0: + for this_path in ["/etc/manowar/audits.d", "./etc/manowar/audits.d"]: + if os.isdir(this_path) is True: + CONFIGDIR.append(this_path) VERBOSE = len(args.verbose) @@ -69,6 +74,20 @@ LOGGER = logging.getLogger("analyze.py") + CONFIG = args.config + + if CONFIG is False: + # Let's Look for a Default File. + LOGGER.debug("No Config File Given Let's Look in Default Locations.") + for default_file in ("/etc/manowar/manoward.yaml", + "./etc/manowar/manoward.yaml", + "/usr/local/etc/manowar/manoward.yaml"): + + if os.path.isfile(default_file) and os.access(default_file, os.R_OK): + LOGGER.debug("Using Default File : {}".format(default_file)) + CONFIG = default_file + break + def analyze(CONFIGDIR, CONFIG): @@ -80,77 +99,91 @@ def analyze(CONFIGDIR, CONFIG): ANALYZE_TIME = int(time()) # Parse my General Configuration - try: - # Read Our INI with our data collection rules - config = ConfigParser() - config.read(CONFIG) - except Exception as general_config_error: - sys.exit('Bad configuration file {} {}'.format(CONFIG, general_config_error)) + if isinstance(CONFIG, dict): + config_items = CONFIG else: - # DB Config Items - db_config_items = dict() - for section in config: - if section in ["database"] : - for item in config[section]: - db_config_items[item] = config[section][item] + try: + with open(CONFIG) as yaml_config: + config_items = yaml.safe_load(yaml_config) + except Exception as yaml_error: + logger.debug("Error when reading yaml config {} ".format(yaml_error)) + sys.exit("Bad configuration file {}".format(CONFIG)) + logger.debug("Configuration Items: {}".format(config_items)) - # Grab all my Audits in CONFIGDIR Stuff - auditfiles = audittools.walk_auditd_dir(CONFIGDIR) - # Read all my Audits - audits = dict() - for auditfile in auditfiles : + if isinstance(CONFIGDIR, dict): + loggger.debug("CONFIGDIR is given from external process.") + audits = CONFIGDIR + elif isinstance(CONFIGDIR, list): + # Grab all my Audits in CONFIGDIR Stuff + auditfiles = audittools.walk_auditd_dir(CONFIGDIR) - these_audits = audittools.load_auditfile(auditfile) + # Read all my Audits + audits = dict() + for auditfile in auditfiles : - for found_audit_name in these_audits.keys(): - if found_audit_name in audits.keys(): - logger.warning("Duplicate definition for {} found. Ignoring definition in file {}".format(found_audit_name, auditfile)) - else: - # Add that audit - audits[found_audit_name] = these_audits[found_audit_name] + these_audits = audittools.load_auditfile(auditfile) + + for found_audit_name in these_audits.keys(): + if found_audit_name in audits.keys(): + logger.warning("Duplicate definition for {} found. Ignoring definition in file {}".format(found_audit_name, auditfile)) + else: + # Add that audit + audits[found_audit_name] = these_audits[found_audit_name] + ''' def null_or_value(data_to_check): + # TODO Remove this Eventually if data_to_check == None : data = "NULL" return data else : data = "'" + str(data_to_check) + "'" return data + ''' - def grab_host_list(db_conn, FRESH): - - cur = db_conn.cursor(pymysql.cursors.DictCursor) - - host_query = '''select - host_id, pop, srvtype, last_update - from - hosts - where - last_update >= now() - INTERVAL %s SECOND''' - - host_query_args = [FRESH] + def grab_host_list(db_conn, FRESH=172800): + # Grab a Host List + logger = logging.getLogger("grab_host_list") - cur.execute(host_query, host_query_args) + db_cur = db_conn.cursor(pymysql.cursors.DictCursor) - all_hosts = cur.fetchall() + host_query = '''select host_id, pop, srvtype, last_update + from hosts + where last_update >= now() - INTERVAL %s SECOND''' - amount_of_hosts = len(all_hosts) + host_query_args = [FRESH] - if amount_of_hosts > 0 : - host_good = True - else : + try: + host_list_debug_query = db_cur.mogrify(host_query, host_query_args) + logger.debug("hostslist Query : {}".format(host_list_debug_query)) + db_cur.execute(host_query, host_query_args) + except Exception as hlq_error: + logger.error("Unable to Query for Hostslist.") + all_hosts = list() + amount_of_hosts = 0 host_good = False + else: + all_hosts = db_cur.fetchall() - cur.close() + amount_of_hosts = len(all_hosts) + + if amount_of_hosts > 0 : + host_good = True + else : + host_good = False + finally: + db_cur.close() return host_good, amount_of_hosts, all_hosts + ''' # Match Type, Collection Type, Collection Subtype, MValue def generic_compare(db_conn, host_id, mtype, ctype, csubtype, mvalue, FRESH): + # TODO Can remove? cur = db_conn.cursor() @@ -231,9 +264,11 @@ def generic_compare(db_conn, host_id, mtype, ctype, csubtype, mvalue, FRESH): cur.close() return pfe_value, this_collected_value + ''' - + ''' def subtype_compare(db_conn, host_id, mtype, ctype, csubtype, mvalue, FRESH) : + # TODO Can Remove? # mtype needs to be "subnonhere", "suballhere", "subknowall" # csubtype Needs to be an array of subtypes. @@ -464,17 +499,20 @@ def subtype_compare(db_conn, host_id, mtype, ctype, csubtype, mvalue, FRESH) : # print("Debug", pfe_value, comparison_result) return pfe_value, comparison_result + ''' def analyze_one_audit(db_config_items, list_of_hosts, oneAudit, auditName, return_dict, audit_id) : + # Note that db config items is the same as config_itesm + + logger = logging.getLogger("analyze_one_audit") + try: # I multithread like a boss now. :) JK But I need to give each audit it's own conn to the DB: - db_conn, db_message, analyze_stats = giveMeDB(db_config_items) - # oneAudit = audits[audit] - # list_of_hosts = list_of_host - # pop_results, srvtype_results, audit_result | Need a global lock for updating. + db_conn = db_helper.get_conn(db_config_items, prefix="analyze_", tojq=".database", ac_def=True) + # host_buckets = dict() host_comparison = dict() @@ -500,20 +538,20 @@ def analyze_one_audit(db_config_items, list_of_hosts, oneAudit, auditName, retur for bucket in oneAudit["filters"] : + this_mtype = oneAudit["filters"][bucket]["filter-match"] + this_ctype = oneAudit["filters"][bucket]["filter-collection-type"] + this_csubtype = oneAudit["filters"][bucket]["filter-collection-subtype"] + this_mvalue = oneAudit["filters"][bucket]["filter-match-value"] - #print("in bucket", bucket) - this_mtype = oneAudit["filters"][bucket]["filter-match"] - this_ctype = oneAudit["filters"][bucket]["filter-collection-type"] - this_csubtype = oneAudit["filters"][bucket]["filter-collection-subtype"] - this_mvalue = oneAudit["filters"][bucket]["filter-match-value"] - - - #print(this_mtype, this_ctype, this_csubtype, this_mvalue) - try: - bucket_results = generic_large_compare(db_conn, items_left_to_bucket, this_mtype, this_ctype, this_csubtype, this_mvalue, FRESH, exemptfail=True) - except Exception as e: - print("Error on Generic Large Compare on bucket", bucket, " For audit ", auditName) + #print(this_mtype, this_ctype, this_csubtype, this_mvalue) + try: + bucket_results = generic_large_compare(db_conn, items_left_to_bucket, this_mtype, this_ctype, this_csubtype, this_mvalue, FRESH, exemptfail=True) + except Exception as glc_bucket_results_error: + logger.error("Error on Generic Large Compare on bucket {} : audit {}".format(bucket, auditName)) + logger.warning("Maybe no Hosts for Bucket {} on audit {}".format(bucket, auditName)) + logger.debug("Error : {}".format(glc_bucket_results_error)) + else: # Grab just the items that passed for result in bucket_results : if "pfe" in result.keys() : @@ -535,10 +573,7 @@ def analyze_one_audit(db_config_items, list_of_hosts, oneAudit, auditName, retur items_left_to_bucket = [ host_id for host_id in list_of_hosts if host_id not in this_bucket_ids ] - #print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!") - #print(host_buckets) - #print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!") - + # Host Bucketing for comparison in host_buckets.keys() : #print(comparison) try: @@ -547,26 +582,34 @@ def analyze_one_audit(db_config_items, list_of_hosts, oneAudit, auditName, retur this_csubtype = oneAudit["comparisons"][comparison]["comparison-collection-subtype"] this_mvalue = oneAudit["comparisons"][comparison]["comparison-match-value"] #print(this_mtype, this_ctype, this_csubtype, this_mvalue) - except Exception as e: - print("Error grabbing comparisons for audit ", auditName, " : " , e) - - # Check What Type - if this_mtype in [ "subnonhere", "suballhere", "subknowall" ] : - # Add Massive Subtype - try: - comparison_results = subtype_large_compare(db_conn, host_buckets[comparison], this_mtype, this_ctype, this_csubtype, this_mvalue, FRESH) - except Exception as e: - print("Error on Subtype Large Compare on Comparison for bucket", comparison, " For audit ", auditName, " : ", e) + except Exception as comparison_error: + logger.error("Error grabbing comparisons for audit {} : {}".format(auditName, comparison_error)) else: - # Generic Comparison - try: - comparison_results = generic_large_compare(db_conn, host_buckets[comparison], this_mtype, this_ctype, this_csubtype, this_mvalue, FRESH) - #print(comparison_results) - except Exception as e: - print("Error on Generic Large Compare on Comparison for bucket", comparison, " For audit ", auditName, " : " , e) + # Check What Type + if this_mtype in ["subnonhere", "suballhere", "subknowall"] : + # Add Massive Subtype + try: + comparison_results = subtype_large_compare(db_conn, host_buckets[comparison], this_mtype, this_ctype, this_csubtype, this_mvalue, FRESH) + except Exception as subtype_large_compare_error: + logger.error("{} Error on Subtype Large Compare on Comparison for bucket {}".format(auditName, + comparison)) + + logger.debug("Error : {}".format(subtype_large_compare_error)) + else: + host_comparison[comparison] = comparison_results - # And Doo it! (But for either collection type - host_comparison[comparison] = comparison_results + else: + # Generic Comparison + try: + comparison_results = generic_large_compare(db_conn, host_buckets[comparison], this_mtype, this_ctype, this_csubtype, this_mvalue, FRESH) + #print(comparison_results) + except Exception as generic_large_compare_error: + logger.error("{} Error on Generic Large Compare on Comparison for bucket {}".format(auditName, + comparison)) + + logger.debug("Error : {}".format(generic_large_compare_error)) + else: + host_comparison[comparison] = comparison_results #bucket in host_bucket #print(auditName, " Results : ", host_comparison) @@ -575,15 +618,16 @@ def analyze_one_audit(db_config_items, list_of_hosts, oneAudit, auditName, retur massinserts, massupdates = generic_large_analysis_store(db_conn, audit_id, host_comparison, FRESH) + # Return Dict is a manager.dict() so the "above" process knows what changes here. return_dict["host_inserts"] = massinserts return_dict["host_updates"] = massupdates - - #print(return_dict) - exit(0) - except Exception as e : - print("Error doing analyze", e) + except Exception as analyze_error: + logger.error("Error doing analyze for {} : {}".format(auditName, analyze_error)) exit(1) + else: + exit(0) + def dequeue_hosts(db_config_items, list_of_hosts): @@ -603,27 +647,26 @@ def dequeue_hosts(db_config_items, list_of_hosts): try: oneAudit, auditName = audit_queue.get() - except Exception as e: - print("Failure to Pull Items off of Queue.") + except Exception as audit_get_error: + logger.error("Failure to Pull Items off of Queue.") + logger.debug("Error : {}".format(audit_get_error)) + audit_queue.task_done() + + # Abnormal Return return try: manager = multiprocessing.Manager() - except Exception as e: - print("Failure to Create Manager for audit ", auditName, " with error: ", e) + except Exception as multiprocess_error: + logger.error("Failure to Create Manager for audit {} with error {}".format(auditName, + multiprocess_error)) audit_queue.task_done() - return - try: - return_dict = manager.dict() - except Exception as e: - print("Failure to Create Return Dictionary for audit ", auditName, " with error: ", e) - audit_queue.task_done() + # Abnormal Return return - - - + else: + return_dict = manager.dict() # Insert Update the Audit in the Database try: @@ -632,16 +675,13 @@ def dequeue_hosts(db_config_items, list_of_hosts): logger.error("Failure to Create Audit {} in DB with error {}".format(auditName, update_audit_db_error)) audit_queue.task_done() return - - - #print("Audit ID Successufl", audit_id) - - # Grab My Audit ID and store it for future Reference with oneAudit - oneAudit["audit_id"] = audit_id + else: + oneAudit["audit_id"] = audit_id + logger.debug("Stored a Record about audit {}/{} in the database.".format(auditName, audit_id)) #print("Pulled Host ", this_one_host_array) # Process One Host Pass it the host_array and the config_array - try : + try: #analyze_one_audit(db_config_items, list_of_hosts, oneAudit, auditName, return_dict, audit_id) # analyze_audit_process is a new instance for every new thread we make. try: @@ -649,83 +689,54 @@ def dequeue_hosts(db_config_items, list_of_hosts): analyze_audit_process.name = auditName analyze_audit_process.daemon = True analyze_audit_process.start() - except Exception as e: - print("Error with Analyze Audit", auditName, " Critical Error: ", e) + except Exception as analyze_pid_error: + logger.error("Error with Analyze Audit {} : {}".format(auditName, analyze_pid_error)) + analyze_audit_process.terminate() + else: - while multiprocessing.Process.is_alive(analyze_audit_process) == True : - if VERBOSE: - print("Waiting for: ", auditName, multiprocessing.Process.is_alive(analyze_audit_process) ) + while multiprocessing.Process.is_alive(analyze_audit_process) == True : + logger.debug("Waiting for: {} {}".format(auditName, multiprocessing.Process.is_alive(analyze_audit_process))) - sleep(30) + # Waith 45 Seconds before Asking again + sleep(45) - try: analyze_audit_process.join() - except Exception as e: - print("Join Error on Audit: ", auditName, " : ", e) - except Exception as e: - print("Failure to Analyze Audit", auditName, " Critical Error: ", e) + except Exception as audit_analyisis_error: + logger.error("Failure to Analyze Audit {} : {}".format(auditName, + audit_analyisis_error)) # I/U Stats only Thing Left + logger.debug(return_dict) try: with audit_host_counts_lock : - if VERBOSE: - print(auditName, " I/U ", return_dict["host_inserts"], return_dict["host_updates"]) + logger.info("{} I:{} U:{}".format(auditName, + return_dict["host_inserts"], + return_dict["host_updates"])) + # This is a Global audit_host_inserts += return_dict["host_inserts"] audit_host_updates += return_dict["host_updates"] - #print("Audit Results", audit_host_inserts, audit_host_updates) - except Exception as e : - #print(return_dict) - print("Failure on Audit ", auditName, " while updating audit counts:", e) - pass - - # moving srvtype, pop & acoll results to own modules - ''' - try: - with srvtype_results_lock : - # Add Srvtype Results with Srvtype Lock - if len(return_dict["srvtype_results"]) : - srvtype_results[audit_id] = return_dict["srvtype_results"] - #print("SRVTYPE Results", srvtype_results) - except Exception as e : - print("Failure on Audit ", auditName, " while updating srvtype_results:", e) - pass - - try: - with pop_results_lock : - # POP Results From Multiprocess Process - if len(return_dict["pop_results"]) > 0 : - pop_results[audit_id] = return_dict["pop_results"] - #print(pop_results) - except Exception as e : - print("Failure on Audit ", auditName, " while updating pop results:", e) - pass - - try: - with audit_results_lock : - # Audit Results - audit_results[audit_id] = return_dict["audit_results"] - #print("Audit Results", audit_results) - except Exception as e : - print("Failure on Audit ", auditName, " while updating audit_results:", e) - #print(return_dict.keys()) - pass - ''' - - # Dequeue the Host from host_queue + except Exception as metrics_error: + #print(return_dict) + logger.error("Failure on Audit when Recording Metrics {} : {}".format(auditName, + metrics_error)) audit_queue.task_done() + return def analyze_all_audits(db_config_items, list_of_hosts, FRESH, MAXTHREADS) : # Audits are a global variable + logger = logging.getLogger("analyze_all_audits") + # Copy Time results_host = deepcopy(list_of_hosts) # Create My ThreadPool for x in range(MAXTHREADS): + # This is the magic. It calls dequeu hostsk t = threading.Thread(target=dequeue_hosts, args=(db_config_items, list_of_hosts)) # Make Threads Die if Parent is Killed t.daemon = True @@ -736,103 +747,39 @@ def analyze_all_audits(db_config_items, list_of_hosts, FRESH, MAXTHREADS) : start = time() for audit in audits : - # Essentially Updates Via Reference. Should be Self Contained + # Populate Audit Queue + logger.info("About to Queue audit {}".format(audit)) #try: - this_queue_item = audits[audit], audit - audit_queue.put( this_queue_item ) + this_queue_item = [audits[audit], audit] + audit_queue.put(this_queue_item) + + # Sleep to allow for better placement sleep(1) # If your running verbosely Print out this stuff Else not while audit_queue.unfinished_tasks > 0 : - if VERBOSE : - nowtime = time() - start - print("---------------------------------------") - print("AuditsLeft \t QSize \t Thread \t QStuff\t Time ") - print(audit_queue.unfinished_tasks, "\t\t", audit_queue.qsize(), "\t", threading.active_count(), "\t\t", audit_queue.empty(),"\t", nowtime) - print("---------------------------------------") - sleep(15) + + nowtime = time() - start + + logger.debug("---------------------------------------") + logger.debug("AuditsLeft : {}".format(audit_queue.unfinished_tasks)) + logger.debug("QSize : {}".format(audit_queue.qsize())) + logger.debug("Thread : {}".format(threading.active_count())) + logger.debug("QStuff : {}".format(audit_queue.empty())) + logger.debug("Time : {}".format(nowtime)) + logger.debug("---------------------------------------") + # Give me an Update every 30 seconds + sleep(15) # When I'm Not Verbose Just wait and don't say shit. + # Otherwise when I see a small number of unfinished tasks Let's move back an djoin. audit_queue.join() jobtime = time() - start return audit_host_inserts, audit_host_updates, jobtime - ''' - def store_audit_by_host(db_conn, bucket, audit_id, host, result, collected_value): - - #print(host) - cur = db_conn.cursor() - - # Set Variables - this_audit_id = audit_id - this_host_id = host["host_id"] - - if result == "pass" : - result_enum = "'pass'" - elif result == "fail" : - result_enum = "'fail'" - else : - # Exempt - result_enum = "'notafflicted'" - - #print(result, result_enum) - - if collected_value == "" : - collected_value_store = "NULL" - else: - collected_value_store = collected_value - - columns = " fk_host_id, fk_audits_id, initial_audit, last_audit, bucket, audit_result, audit_result_text " - value = str(this_host_id) + ", " + str(this_audit_id) + ", " + "FROM_UNIXTIME(" + str(ANALYZE_TIME) + "), " + "FROM_UNIXTIME(" + str(ANALYZE_TIME) + "), '" + bucket + "', " + result_enum + ", '" + collected_value_store + "'" - - #print(columns) - #print(value) - #print("Debug Collected_value", collected_value) - - if collected_value == "" : - # No Value to Compare (For Null Values) - # Otherwise we insert every notafflicted NULL - value_compare_string = " " - else : - value_compare_string = " and audit_result_text = '" + str(collected_value) + "' " - - - # Grab the latest selection - grab_last_collection_query="SELECT audit_result_id, audit_result from audits_by_host where fk_audits_id = " + str(this_audit_id) + " and fk_host_id = " + str(this_host_id) + " and audit_result = " + result_enum + value_compare_string + " and bucket = '" + bucket + "' order by last_audit limit 1 ; " - - #print(grab_last_collection_query) - - cur.execute(grab_last_collection_query) - - if cur.rowcount : - # There's Data so Just Update last_audit - this_audit_result_id = cur.fetchone()[0] - #print(this_audit_result_id) - update_query = "UPDATE audits_by_host SET last_audit = FROM_UNIXTIME(" + str(ANALYZE_TIME) + ") where audit_result_id = '" + str(this_audit_result_id) + "' ; commit ; " - #print(update_query) - cur.execute(update_query) - have_audit_id = True - else: - # No Data Do Insert - have_audit_id = False - insert_query = "Insert into audits_by_host (" + columns + ") VALUES ( " + value + ") ; commit ;" - #print(insert_query) - cur.execute(insert_query) - - if have_audit_id : - inserts = 0 - updates = 1 - else : - inserts = 1 - updates = 0 - - return inserts, updates - - ''' - def insert_update_audit(db_config_items, audit) : logger = logging.getLogger("analyze:insert_update_audit") @@ -848,7 +795,7 @@ def insert_update_audit(db_config_items, audit) : this_audit_filename = audit["filename"] this_audit_priority = audit.get("vuln-priority", 5 ) - db_conn, dbmessage, analyze_stats = giveMeDB(db_config_items) + db_conn = db_helper.get_conn(db_config_items, prefix="analyze_", tojq=".database", ac_def=True) cur = db_conn.cursor() @@ -929,83 +876,6 @@ def insert_update_audit(db_config_items, audit) : cur.close() return this_row - ''' - def store_sub_results(db_conn, subtable, pop_or_srvtype): - - - #print(subtable) - #print(pop_or_srvtype) - - - cur = db_conn.cursor(pymysql.cursors.DictCursor) - - for audit_id in pop_or_srvtype : - # Cycle Through Audits - #print(pop_or_srvtype[audit_id]) - for popSrvtype in pop_or_srvtype[audit_id].keys() : - this_pass = pop_or_srvtype[audit_id][popSrvtype][0] - this_fail = pop_or_srvtype[audit_id][popSrvtype][1] - this_exem = pop_or_srvtype[audit_id][popSrvtype][2] - this_timestamp_columns = subtable + "_initial_audit, " + subtable + "_last_audit" - this_tablename = "audits_by_" + subtable - # Cycle through audits - # Select the The Latest pop for this audit - pop_id_column = subtable + "_id, " - # Also used on inserts - select_columns = subtable + "_passed, " + subtable + "_failed, " + subtable + "_exempt " - tablename = " from " + this_tablename - where_clause = " where " + subtable + "_text = '" + popSrvtype + "' " - where_fk_audit = " and fk_audits_id = " + str(audit_id) + " " - tail = " order by " + subtable + "_last_audit desc limit 1; " - select_query = " select " + pop_id_column + select_columns + tablename + where_clause + where_fk_audit + tail - - # Debug - - cur.execute(select_query) - if cur.rowcount : - # There's Data Check if it matches and then update or insert - this_pop_or_srvtype_data = cur.fetchone() - #print(this_pop_or_srvtype_data) - fail_column = subtable + "_failed" - pass_column = subtable + "_passed" - exempt_column = subtable + "_exempt" - id_column = subtable + "_id" - - if this_pop_or_srvtype_data[fail_column] == this_fail and this_pop_or_srvtype_data[pass_column] == this_pass and this_pop_or_srvtype_data[exempt_column] == this_exem : - # Equals so update row - update_query = "UPDATE " + this_tablename + " SET " + subtable + "_last_audit = FROM_UNIXTIME(" + str(ANALYZE_TIME) + ") where " + id_column + " = '" + str(this_pop_or_srvtype_data[id_column]) + "' ; commit ; " - #print(update_query) - cur.execute(update_query) - insert_new = False - else: - # Need to do insert - #print(id_column + " needs insert") - insert_new = True - else: - #print(id_column + " needs insert") - insert_new = True - - if insert_new : - # We've decided to insert a new row - columns = select_columns + ", fk_audits_id, " + subtable + "_text, " + this_timestamp_columns - values = str(this_pass) + " , " + str(this_fail) + " , " + str(this_exem) + " , " + str(audit_id) + " , '" + popSrvtype +\ - "' ," + " FROM_UNIXTIME(" + str(ANALYZE_TIME) + "), " + "FROM_UNIXTIME(" + str(ANALYZE_TIME) + ") " - query_string = "INSERT INTO " + this_tablename + " ( " + columns + ") VALUES ( " + values + " ); commit; " - #print(query_string) - cur.execute(query_string) - - - return "Completed" - ''' - - def giveMeDB(db_config_items) : - - db_conn = pymysql.connect(host=db_config_items['dbhostname'], port=int(db_config_items['dbport']), user=db_config_items['dbusername'], passwd=db_config_items['dbpassword'], db=db_config_items['dbdb'], autocommit=True) - dbmessage = "Good, connected to " + db_config_items['dbusername'] + "@" + db_config_items['dbhostname'] + ":" + db_config_items['dbport'] + "/" + db_config_items['dbdb'] - analyze_stats["db-status"] = dbmessage - - return db_conn, dbmessage, analyze_stats - # Globals global pop_results global srvtype_results @@ -1015,6 +885,7 @@ def giveMeDB(db_config_items) : global audit_host_updates + # Results dictionaries pop_results = dict() pop_results_lock = threading.Lock() srvtype_results = dict() @@ -1026,57 +897,45 @@ def giveMeDB(db_config_items) : audit_host_updates = 0 audit_host_counts_lock = threading.Lock() - MAX = db_config_items["collectionmaxchars"] - FRESH = db_config_items["freshseconds"] - MAXTHREADS = int(db_config_items["maxthreads"]) - + # COnfig ITems + MAX = config_items["storage"]["collectionmaxchars"] + FRESH = config_items["analyze"]["freshseconds"] + MAXTHREADS = int(config_items["analyze"]["maxthreads"]) # Create A Queue audit_queue = Queue() - analyze_stats = dict() - #try: - db_conn, db_message, analyze_stats = giveMeDB(db_config_items) - - #except: - # analyze_stats["db-status"] = "Connection Failed" - # print(analyze_stats) - - + try: + db_conn = db_helper.get_conn(config_items, prefix="analyze_", tojq=".database", ac_def=True) + dbmessage = "Connected" + except Exception as db_conn_error: + dbmessage = "Unable to Connect" + logger.debug("DB Connection Error : {}".format(db_conn_error)) + finally: + # Start my analyze_stats with data + analyze_stats = {"db-status" : dbmessage} + + # Grab Hosts List (Still Single Threaded) host_good, analyze_stats["FreshHosts"], host_list = grab_host_list(db_conn, FRESH) if host_good : + logger.info("Successfully Collected {} Hosts as 'Live'".format(len(host_list))) + analyze_stats["HostCollectionStatus"] = "Success" - # To avoid Locking pop stats are being moved to their own modul - #analyze_stats["audit_inserts"], analyze_stats["audit_updates"], all_pop_results, all_srvtype_results, all_audit_results, jobtime = analyze_all_audits(db_config_items, host_list, FRESH, MAXTHREADS) - analyze_stats["audit_inserts"], analyze_stats["audit_updates"], jobtime = analyze_all_audits(db_config_items, host_list, FRESH, MAXTHREADS) - #print(audit_results) - # Collation of Population Statistics is Being moved to It's own Module - #try: - # analyze_stats["stor_pop_status"] = store_sub_results(db_conn, "pop" , all_pop_results) - #except Exception as e : - # print("Error Storing Pop Results", e) - # - #try: - # analyze_stats["stor_srvytpe_status"] = store_sub_results(db_conn, "srvtype" , all_srvtype_results) - #except Exception as e : - # print("Error Storing Srvtype Results", e) - # - #try: - # analyze_stats["stor_acoll_status"] = store_sub_results(db_conn, "acoll", all_audit_results) - #except Exception as e : - # print("Error Storing Audit Results", e) - - analyze_stats["jobtime"] = str(jobtime) + + analyze_stats["audit_inserts"], analyze_stats["audit_updates"], analyze_stats["jobtime"] = analyze_all_audits(config_items, host_list, FRESH, MAXTHREADS) + analyze_stats["threads"] = str(MAXTHREADS) analyze_stats["totalaudits"] = len(audits) else : analyze_stats["HostCollectionStatus"] = "Failed" - print(json.dumps(analyze_stats, sort_keys=True, indent=4)) + return analyze_stats if __name__ == "__main__": - analyze(CONFIGDIR, CONFIG) + analyze_stats = analyze(CONFIGDIR, CONFIG) + + print(json.dumps(analyze_stats, sort_keys=True, indent=4)) From 283478956374e8737609f84eb47e7af873c52ebf Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Thu, 9 Jan 2020 16:39:05 -0800 Subject: [PATCH 036/143] GLAS Print-> Logger * Using logger instead of Print in GLAS --- generic_large_analysis_store.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/generic_large_analysis_store.py b/generic_large_analysis_store.py index ef82942..5d3445d 100644 --- a/generic_large_analysis_store.py +++ b/generic_large_analysis_store.py @@ -19,6 +19,7 @@ from copy import deepcopy from time import time from time import sleep +import logging import threading import multiprocessing @@ -32,6 +33,7 @@ def generic_large_analysis_store(db_conn, audit_id, audit_results_dict, FRESH): # Generic Large Analysis Storage # Create my Cursor + logger = logging.getLogger("generic_large_analysis_store") ANALYZE_TIME = int(time()) @@ -139,8 +141,9 @@ def generic_large_analysis_store(db_conn, audit_id, audit_results_dict, FRESH): updates += len(this_bucket_update_ids) except Exception as mysql_error: print("Error updating hosts for audit", audit_id , " : ", str(mysql_error) ) - except Exception as e : - print("Error doing Updates. ", e) + except Exception as updating_hosts_audit: + logger.error("Error doing Updates for audit {} with error {}".format(audit_id, + updating_hosts_audit)) #print("Inserts") @@ -162,9 +165,9 @@ def generic_large_analysis_store(db_conn, audit_id, audit_results_dict, FRESH): except Exception as e : print("Error doing Inserts for audit", audit_id, " : ", e ) - except Exception as e: - print("Error doing Inserts. ", e) - + except Exception as inserts_new_host_audit: + logger.error("Error doing Inserts for Audit {} with error {}".format(audit_id, + inserts_new_host_audit)) # Close the MySQL Cursor cur.close() From c09c086dfa03043ff9411ddff9c065f6df4623f5 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Thu, 9 Jan 2020 16:39:26 -0800 Subject: [PATCH 037/143] GLC Print->Logger Using Logger instead of Print in GLC --- generic_large_compare.py | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/generic_large_compare.py b/generic_large_compare.py index ec455cf..54decd7 100755 --- a/generic_large_compare.py +++ b/generic_large_compare.py @@ -20,6 +20,7 @@ from copy import deepcopy from time import time from time import sleep +import logging import threading import multiprocessing @@ -58,6 +59,8 @@ def generic_large_compare(db_conn, host_list_dict, mtype, ctype, csubtype, \ ''' + logger = logging.getLogger("generic_large_compare") + # Create my Cursor cur = db_conn.cursor() @@ -122,13 +125,19 @@ def generic_large_compare(db_conn, host_list_dict, mtype, ctype, csubtype, \ if len(host_ids_list) > 0: #print(comparison_query) - cur.execute(comparison_query) - if cur.rowcount: - query_results_list = cur.fetchall() + try: + logger.debug("Comparison Query : {}".format(comparison_query)) + cur.execute(comparison_query) + except Exception as DB_Error: + logger.error("Unable to Do Database Query on Generic Large Compare.") + query_results_list = list() else: - # No Results - # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - query_results_list = [] + if cur.rowcount: + query_results_list = cur.fetchall() + else: + # No Results + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + query_results_list = [] else: query_results_list = [] @@ -203,8 +212,9 @@ def generic_large_compare(db_conn, host_list_dict, mtype, ctype, csubtype, \ exempthost = [ host for host in query_results_list if len(host[1]) <= 0 ] passhost = [ host for host in query_results_list if int(host[1]) == int(massaged_mvalue[index_value]) ] failhost = [ host for host in query_results_list if host not in exempthost and host not in passhost ] - except Exception as e: - print("Error Doing Comparisons: ", e) + except Exception as comparisons_error: + logger.error("Error doing Comparisons for generic_large_compare.") + logger.debug(comparisons_error) # Temporary List of HostID's exempthostids = list() From 45a2476f50c8921ef175873d882e80b11a4ad986 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Thu, 9 Jan 2020 16:40:01 -0800 Subject: [PATCH 038/143] SLC Changes * SLC Tabs to SPaces * Using Logger instead of Print --- subtype_large_compare.py | 353 ++++++++++++++++++++------------------- 1 file changed, 179 insertions(+), 174 deletions(-) diff --git a/subtype_large_compare.py b/subtype_large_compare.py index 1b5fb29..4c5a706 100644 --- a/subtype_large_compare.py +++ b/subtype_large_compare.py @@ -19,6 +19,7 @@ from copy import deepcopy from time import time from time import sleep +import logging import threading import multiprocessing @@ -27,192 +28,196 @@ def subtype_large_compare(db_conn, host_list_dict, mtype, ctype, csubtype, mvalue, FRESH): - # Subtype Large Comparison + logger = logger.getLogger("subtype_large_compare") - cur = db_conn.cursor() + # Subtype Large Comparison - results_dict = deepcopy(host_list_dict) + cur = db_conn.cursor() - # Host Lists - host_ids_list = list() - fail_hosts = list() - success_hosts = list() - exempt_hosts = list() + results_dict = deepcopy(host_list_dict) - inserts = 0 - updates = 0 + # Host Lists + host_ids_list = list() + fail_hosts = list() + success_hosts = list() + exempt_hosts = list() - # Host ID - for item in host_list_dict : - host_ids_list.append(item['host_id']) + inserts = 0 + updates = 0 - # Massaging Types + # Host ID + for item in host_list_dict : + host_ids_list.append(item['host_id']) - massaged_ctype = [] - massaged_csubtype = [] - massaged_mvalue = [] + # Massaging Types - # In order for each one. Rehydrate string (with ") and then convert to a tuple + massaged_ctype = [] + massaged_csubtype = [] + massaged_mvalue = [] + # In order for each one. Rehydrate string (with ") and then convert to a tuple - if type(ctype) is str : - massaged_ctype.append(ctype) - else : - # It's a lists - massaged_ctype = ctype - if type(csubtype) is str : - interm_processed_csubtype = ast.literal_eval('"' + csubtype.replace("," , "\",\"") + '"') - massaged_csubtype.append(interm_processed_csubtype) - else : - # Cycle through each subtype list and toss that into csubtype value - massaged_csubtype = [ ast.literal_eval('"' + item.replace("," , "\",\"") + '"') for item in csubtype ] + if type(ctype) is str : + massaged_ctype.append(ctype) + else : + # It's a lists + massaged_ctype = ctype - if type(mvalue) is str : - interm_processed_mvalue = ast.literal_eval('"' + mvalue.replace("," , "\",\"") + '"') - massaged_mvalue.append(interm_processed_mvalue) - else : - # Cycle throught the regexp matches and toss that into csubtype value - massaged_mvalue = [ ast.literal_eval('"' + item.replace("," , "\",\"") + '"') for item in mvalue ] + if type(csubtype) is str : + interm_processed_csubtype = ast.literal_eval('"' + csubtype.replace("," , "\",\"") + '"') + massaged_csubtype.append(interm_processed_csubtype) + else : + # Cycle through each subtype list and toss that into csubtype value + massaged_csubtype = [ ast.literal_eval('"' + item.replace("," , "\",\"") + '"') for item in csubtype ] - #print(type(massaged_ctype), massaged_ctype) - #print(type(massaged_csubtype), massaged_csubtype) - #print(type(massaged_mvalue), massaged_mvalue) + if type(mvalue) is str : + interm_processed_mvalue = ast.literal_eval('"' + mvalue.replace("," , "\",\"") + '"') + massaged_mvalue.append(interm_processed_mvalue) + else : + # Cycle throught the regexp matches and toss that into csubtype value + massaged_mvalue = [ ast.literal_eval('"' + item.replace("," , "\",\"") + '"') for item in mvalue ] - host_id_list_string = ",".join(map(str, host_ids_list)) - - #print(type(host_id_list_string), host_id_list_string) - #print("About to build Queries") - - for index_value in range(0, len(massaged_ctype)) : - - if mtype == "subnonhere" : - # Both do the same thing. They just accept either a numerical match or a zero - COMBINE = " OR " - COLUMNMATCH = " = " - elif mtype == "suballhere" : - COMBINE = " OR " - COLUMNMATCH = " = " - elif mtype == "subknowall" : - # subknowall - COMBINE = " AND " - COLUMNMATCH = " != " - else : - raise Exception("Unknown match type ", mtype) - - #print(COMBINE, COLUMNMATCH) - # Cycle through each ctype & Do a Collection - - collection = [] - collection.append("SELECT fk_host_id, count(DISTINCT(collection_subtype))") - collection.append("from collection") - collection.append("WHERE") - # Where Host List - collection.append(" ( fk_host_id in (" + host_id_list_string + ") )" ) - collection.append(" AND ") - collection.append(" ( collection_type = '" + str(massaged_ctype[index_value]) + "' )") - collection.append(" AND ") - collection.append(" ( ") - - #print(collection) - - # Grab the Column - columns_only = [] - - for sub_index_value in range(0, len(massaged_csubtype[index_value])) : - - - # Generate My Column Match String - if massaged_mvalue[index_value][sub_index_value] == "any" : - matchstring = "" - else : - matchstring = " AND collection_value REGEXP '" + massaged_mvalue[index_value][sub_index_value] + "'" - - #print("Column Match: ", matchstring) - - columnmatch_string = "collection_subtype " + COLUMNMATCH + "'" + massaged_csubtype[index_value][sub_index_value] + "'" - - columns_only.append("( " + columnmatch_string + matchstring + " )") - - #print(columns_only) - - columns_only_string = COMBINE .join(columns_only) - - collection.append(columns_only_string) - collection.append(" ) ") - - collection.append(" and last_update >= now() - INTERVAL " + str(FRESH) + " SECOND " ) - collection.append(" group by fk_host_id order by last_update desc ;" ) - - collection_query = " ".join(collection) - - if len(host_ids_list) > 0 : - cur.execute(collection_query) - if cur.rowcount: - query_results_list = cur.fetchall() - else : - # No Results - # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - query_results_list = [] - else : - query_results_list = [] - - #print(query_results_list) - query_results_list_index = [ host[0] for host in query_results_list ] - #print(query_results_list_index) - - exempthost = list() - passhost = list() - failhost = list() - - try: - if mtype == "subnonhere" : - # No exempt hosts. All hosts not in our results pass - exempthost = [] - passhost = [ host for host in host_ids_list if host not in query_results_list_index ] - failhost = [ host[0] for host in query_results_list if host[1] > 0 ] - elif mtype == "suballhere" : - # All the hosts that Aren't in our query Results Fail - exempthost = [ ] - # All the hosts in the results whose output matches exactly - passhost = [ host[0] for host in query_results_list if host[1] == len(massaged_csubtype[index_value]) ] - # All the hosts in our query results not in exempthosts or passhosts - failhost = [ host[0] for host in query_results_list if host[0] not in exempthost and host[0] not in passhost ] - elif mtype == "subknowall" : - # No exempt hosts. Anything not in our list just fucking passed. :) - exempthost = [] - # Pass host is all hosts that don't show up in query results - passhost = [ host for host in host_ids_list if host not in query_results_list_index ] - failhost = [ host[0] for host in query_results_list if host[1] > 0] - else: - raise Exception("Unknown match type. Potential Race Condition!") - except Exception as e: - print("Error Doing Comparisons: ", e) - - - #print(exempthost, passhost, failhost) - - for host in range(0,len(results_dict)): - # Hydrate if missing - #print(host) - try: - if "pfe" in results_dict[host].keys() : - if results_dict[host]['pfe'] == "fail" : - # I've already failed so fuck you ( :) ) - pass - elif results_dict[host]['host_id'] in failhost : - # I've now failed so place this in the results This will overwrite any exempt or pass entries - results_dict[host]['pfe'] = "fail" - results_dict[host]['pfevalue'] = "Subtype Comparison " + mtype + " Failed" - elif results_dict[host]['host_id'] in passhost : - results_dict[host]['pfe'] = "pass" - results_dict[host]['pfevalue'] = "Subtype Comparison " + mtype + " Passed" - elif results_dict[host]['host_id'] in exempthost : - results_dict[host]['pfe'] = "notafflicted" - results_dict[host]['pfevalue'] = "Subtype Comparison " + mtype + " Exempt" - except Exception as e : - print("Error trying to match items relating to host ", host, " : " , e ) - - cur.close() - return results_dict + #print(type(massaged_ctype), massaged_ctype) + #print(type(massaged_csubtype), massaged_csubtype) + #print(type(massaged_mvalue), massaged_mvalue) + + host_id_list_string = ",".join(map(str, host_ids_list)) + + #print(type(host_id_list_string), host_id_list_string) + #print("About to build Queries") + + for index_value in range(0, len(massaged_ctype)) : + + if mtype == "subnonhere" : + # Both do the same thing. They just accept either a numerical match or a zero + COMBINE = " OR " + COLUMNMATCH = " = " + elif mtype == "suballhere" : + COMBINE = " OR " + COLUMNMATCH = " = " + elif mtype == "subknowall" : + # subknowall + COMBINE = " AND " + COLUMNMATCH = " != " + else : + raise Exception("Unknown match type ", mtype) + + #print(COMBINE, COLUMNMATCH) + # Cycle through each ctype & Do a Collection + + collection = [] + collection.append("SELECT fk_host_id, count(DISTINCT(collection_subtype))") + collection.append("from collection") + collection.append("WHERE") + # Where Host List + collection.append(" ( fk_host_id in (" + host_id_list_string + ") )" ) + collection.append(" AND ") + collection.append(" ( collection_type = '" + str(massaged_ctype[index_value]) + "' )") + collection.append(" AND ") + collection.append(" ( ") + + #print(collection) + + # Grab the Column + columns_only = [] + + for sub_index_value in range(0, len(massaged_csubtype[index_value])) : + + + # Generate My Column Match String + if massaged_mvalue[index_value][sub_index_value] == "any" : + matchstring = "" + else : + matchstring = " AND collection_value REGEXP '" + massaged_mvalue[index_value][sub_index_value] + "'" + + #print("Column Match: ", matchstring) + + columnmatch_string = "collection_subtype " + COLUMNMATCH + "'" + massaged_csubtype[index_value][sub_index_value] + "'" + + columns_only.append("( " + columnmatch_string + matchstring + " )") + + #print(columns_only) + + columns_only_string = COMBINE .join(columns_only) + + collection.append(columns_only_string) + collection.append(" ) ") + + collection.append(" and last_update >= now() - INTERVAL " + str(FRESH) + " SECOND " ) + collection.append(" group by fk_host_id order by last_update desc ;" ) + + collection_query = " ".join(collection) + + if len(host_ids_list) > 0 : + cur.execute(collection_query) + if cur.rowcount: + query_results_list = cur.fetchall() + else : + # No Results + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + query_results_list = [] + else : + query_results_list = [] + + #print(query_results_list) + query_results_list_index = [ host[0] for host in query_results_list ] + #print(query_results_list_index) + + exempthost = list() + passhost = list() + failhost = list() + + try: + if mtype == "subnonhere" : + # No exempt hosts. All hosts not in our results pass + exempthost = [] + passhost = [ host for host in host_ids_list if host not in query_results_list_index ] + failhost = [ host[0] for host in query_results_list if host[1] > 0 ] + elif mtype == "suballhere" : + # All the hosts that Aren't in our query Results Fail + exempthost = [ ] + # All the hosts in the results whose output matches exactly + passhost = [ host[0] for host in query_results_list if host[1] == len(massaged_csubtype[index_value]) ] + # All the hosts in our query results not in exempthosts or passhosts + failhost = [ host[0] for host in query_results_list if host[0] not in exempthost and host[0] not in passhost ] + elif mtype == "subknowall" : + # No exempt hosts. Anything not in our list just fucking passed. :) + exempthost = [] + # Pass host is all hosts that don't show up in query results + passhost = [ host for host in host_ids_list if host not in query_results_list_index ] + failhost = [ host[0] for host in query_results_list if host[1] > 0] + else: + raise Exception("Unknown match type. Potential Race Condition!") + except Exception as subtype_error: + logger.error("Error Doing Comparisons {}".format(subtype_error)) + + + #print(exempthost, passhost, failhost) + + for host in range(0,len(results_dict)): + # Hydrate if missing + #print(host) + try: + if "pfe" in results_dict[host].keys() : + if results_dict[host]['pfe'] == "fail" : + # I've already failed so fuck you ( :) ) + pass + elif results_dict[host]['host_id'] in failhost : + # I've now failed so place this in the results This will overwrite any exempt or pass entries + results_dict[host]['pfe'] = "fail" + results_dict[host]['pfevalue'] = "Subtype Comparison " + mtype + " Failed" + elif results_dict[host]['host_id'] in passhost : + results_dict[host]['pfe'] = "pass" + results_dict[host]['pfevalue'] = "Subtype Comparison " + mtype + " Passed" + elif results_dict[host]['host_id'] in exempthost : + results_dict[host]['pfe'] = "notafflicted" + results_dict[host]['pfevalue'] = "Subtype Comparison " + mtype + " Exempt" + except Exception as subtype_pfe_error: + logger.error("Error tyring to match items relating to host {} with error {}".format(host, + subtype_pfe_error)) + + cur.close() + + return results_dict From e7f46ad9e55c3e734be745959cc5376b007fc6d2 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Thu, 9 Jan 2020 16:41:20 -0800 Subject: [PATCH 039/143] Making ui.py more bettah * Using a single config file that can be shared. * Config file is now yaml * From CLI (or __main__) will attempt to lookup the config file in a few default places if it doesn't exist * Utilizing new DB Helper functions for better future DB stuff --- ui.py | 66 +++++++++++++++++++++++++++++++---------------------------- 1 file changed, 35 insertions(+), 31 deletions(-) diff --git a/ui.py b/ui.py index 07b537a..2db56d1 100755 --- a/ui.py +++ b/ui.py @@ -24,18 +24,17 @@ from tokenmgmt import validate_key from canonical_cve import shuttlefish from generic_large_compare import generic_large_compare +import db_helper + if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument("-c", "--configfile", help="Config File for Scheduler", required=True) + parser.add_argument("-c", "--config", help="Config File for Scheduler", required=False, default="/etc/manowar/manoward.yaml") parser.add_argument("-d", "--flaskdebug", action='store_true', help="Turn on Flask Debugging", default=False) parser.add_argument("-v", "--verbose", action='append_const', help="Turn on Verbosity", const=1, default=[]) args = parser.parse_args() - FDEBUG = args.flaskdebug - CONFIG = args.configfile - VERBOSE = len(args.verbose) if VERBOSE == 0: @@ -54,6 +53,22 @@ LOGGER.info("Welcome to Man 'o War") + FDEBUG = args.flaskdebug + CONFIG = args.config + + if CONFIG is False: + # Let's Look for a Default File. + LOGGER.debug("No Config File Given Let's Look in Default Locations.") + for default_file in ("/etc/manowar/manoward.yaml", + "./etc/manowar/manoward.yaml", + "/usr/local/etc/manowar/manoward.yaml"): + + if os.path.isfile(default_file) and os.access(default_file, os.R_OK): + LOGGER.debug("Using Default File : {}".format(default_file)) + CONFIG = default_file + break + + def ui(CONFIG, FDEBUG): ''' @@ -64,12 +79,17 @@ def ui(CONFIG, FDEBUG): logger = logging.getLogger("ui.ui") - try: - with open(CONFIG) as yaml_config: - config_items = yaml.safe_load(yaml_config) - except Exception as yaml_error: # pylint: disable=broad-except, invalid-name - logger.debug("Error when reading yaml config {} ".format(yaml_error)) - sys.exit("Bad configuration file {}".format(CONFIG)) + if isinstance(CONFIG, dict): + logger.info("Using Configuration as a Dict.") + config_items = CONFIG + else: + logger.info("Reading Configuration from String.") + try: + with open(CONFIG) as yaml_config: + config_items = yaml.safe_load(yaml_config) + except Exception as yaml_error: + logger.debug("Error when reading yaml config {} ".format(yaml_error)) + sys.exit("Bad configuration file {}".format(CONFIG)) logger.debug("Configuration Items: {}".format(config_items)) @@ -98,32 +118,16 @@ def ui(CONFIG, FDEBUG): def before_request(): try: - g.db = pymysql.connect(host=config_items['database']['dbhostname'], - port=int(config_items['database']['dbport']), - user=config_items['database']['dbusername'], - passwd=config_items['database']['dbpassword'], - db=config_items['database']['dbdb'], - autocommit=True) - - dbmessage = "Good, connected to {}@{}:{}/{}".format(config_items['database']['dbusername'], - config_items['database']['dbhostname'], - config_items['database']['dbport'], - config_items['database']['dbdb']) - - logger.info(dbmessage) + g.db = db_helper.get_conn(config_items, prefix="api_", tojq=".database", ac_def=True) g.logger = logger + g.debug = FDEBUG except Exception as connection_error: - dbmessage = "Connection Failed connected to {}@{}:{}/{} with error {}".format(config_items['database']['dbusername'], - config_items['database']['dbhostname'], - config_items['database']['dbport'], - config_items['database']['dbdb'], - connection_error) - logger.error(dbmessage) - - return dbmessage + logger.debug("Connection to DB Error Abandoing Connection Error.") + + return str(connection_error) # Endpoint Authorization List of Endosements and Restrictions that Define what you may and may not access # For endpoints with fine grained controls From b5bbfab13e29aae33783a584a216ce3bbf031930 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Fri, 10 Jan 2020 11:11:53 -0800 Subject: [PATCH 040/143] Update Some New Audits --- .../audittools.d/ubuntu_usn/USN-4229-1.json | 99 +++++++++++++ .../audittools.d/ubuntu_usn/USN-4233-1.json | 72 ++++++++++ .../audittools.d/ubuntu_usn/USN-4234-1.json | 133 ++++++++++++++++++ 3 files changed, 304 insertions(+) create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4229-1.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4233-1.json create mode 100644 travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4234-1.json diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4229-1.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4229-1.json new file mode 100644 index 0000000..0d40c68 --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4229-1.json @@ -0,0 +1,99 @@ +{ + "USN-4229-1": { + "comparisons": { + "precise-bucket": { + "comparison-collection-subtype": [ + "ntp" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1:4.2.6.p3+dfsg-1ubuntu3.13" + ] + }, + "trusty-bucket": { + "comparison-collection-subtype": [ + "ntp" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1:4.2.6.p5+dfsg-3ubuntu2.14.04.13+esm1" + ] + }, + "xenial-bucket": { + "comparison-collection-subtype": [ + "ntp" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "1:4.2.8p4+dfsg-3ubuntu5.10" + ] + } + }, + "filters": { + "precise-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "precise" + ] + }, + "trusty-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "trusty" + ] + }, + "xenial-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "xenial" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2018-12327_(Unknown)": "https://people.canonical.com/~ubuntu-security/cve/2018/CVE-2018-12327.html", + "USN-4229-1": "https://usn.ubuntu.com/4229-1/" + }, + "vuln-long-description": "It was discovered that ntpq and ntpdc incorrectly handled some arguments.\nAn attacker could possibly use this issue to cause ntpq or ntpdc to crash,\nexecute arbitrary code, or escalate to higher privileges. \n\n\nTLDR: In general, a standard system update will make all the necessary changes.\n\n", + "vuln-name": "USN-4229-1", + "vuln-primary-link": "https://usn.ubuntu.com/4229-1/", + "vuln-priority": 1, + "vuln-short-description": "A security issue was fixed in ntpq and ntpdc.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4233-1.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4233-1.json new file mode 100644 index 0000000..be2b754 --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4233-1.json @@ -0,0 +1,72 @@ +{ + "USN-4233-1": { + "comparisons": { + "bionic-bucket": { + "comparison-collection-subtype": [ + "libgnutls30" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "3.5.18-1ubuntu1.2" + ] + }, + "xenial-bucket": { + "comparison-collection-subtype": [ + "libgnutls30" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "3.4.10-4ubuntu1.6" + ] + } + }, + "filters": { + "bionic-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "bionic" + ] + }, + "xenial-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "xenial" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "LP : 1858691": "https://launchpad.net/bugs/1858691", + "USN-4233-1": "https://usn.ubuntu.com/4233-1/" + }, + "vuln-long-description": "As a security improvement, this update marks SHA1 as being untrusted for\ndigital signature operations.\n\n\nTLDR: In general, a standard system update will make all the necessary changes.\n\n", + "vuln-name": "USN-4233-1", + "vuln-primary-link": "https://usn.ubuntu.com/4233-1/", + "vuln-priority": 1, + "vuln-short-description": "SHA1 has been marked as untrusted in GnuTLS.\n" + } +} \ No newline at end of file diff --git a/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4234-1.json b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4234-1.json new file mode 100644 index 0000000..8552cf0 --- /dev/null +++ b/travis/artifacts/audits.d/audittools.d/ubuntu_usn/USN-4234-1.json @@ -0,0 +1,133 @@ +{ + "USN-4234-1": { + "comparisons": { + "bionic-bucket": { + "comparison-collection-subtype": [ + "firefox" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "72.0.1+build1-0ubuntu0.18.04.1" + ] + }, + "disco-bucket": { + "comparison-collection-subtype": [ + "firefox" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "72.0.1+build1-0ubuntu0.19.04.1" + ] + }, + "eoan-bucket": { + "comparison-collection-subtype": [ + "firefox" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "72.0.1+build1-0ubuntu0.19.10.1" + ] + }, + "xenial-bucket": { + "comparison-collection-subtype": [ + "firefox" + ], + "comparison-collection-type": [ + "packages" + ], + "comparison-match": "aptge", + "comparison-match-value": [ + "72.0.1+build1-0ubuntu0.16.04.1" + ] + } + }, + "filters": { + "bionic-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "bionic" + ] + }, + "disco-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "disco" + ] + }, + "eoan-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "eoan" + ] + }, + "xenial-bucket": { + "filter-collection-subtype": [ + "default", + "default" + ], + "filter-collection-type": [ + "os", + "release" + ], + "filter-match": "is", + "filter-match-value": [ + "Ubuntu", + "xenial" + ] + } + }, + "jellyfishversion": 2, + "vuln-additional-links": { + "CVE-2019-17016_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17016.html", + "CVE-2019-17017_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17017.html", + "CVE-2019-17020_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17020.html", + "CVE-2019-17022_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17022.html", + "CVE-2019-17023_(Low)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17023.html", + "CVE-2019-17024_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17024.html", + "CVE-2019-17025_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17025.html", + "CVE-2019-17026_(Medium)": "https://people.canonical.com/~ubuntu-security/cve/2019/CVE-2019-17026.html", + "USN-4234-1": "https://usn.ubuntu.com/4234-1/" + }, + "vuln-long-description": "Multiple security issues were discovered in Firefox. If a user were\ntricked in to opening a specially crafted website, an attacker could\npotentially exploit these to cause a denial of service, obtain sensitive\ninformation, bypass Content Security Policy (CSP) restrictions, conduct\ncross-site scripting (XSS) attacks, or execute arbitrary code.\n\n\nTLDR: After a standard system update you need to restart Firefox to make\nall the necessary changes.\n\n", + "vuln-name": "USN-4234-1", + "vuln-primary-link": "https://usn.ubuntu.com/4234-1/", + "vuln-priority": 5, + "vuln-short-description": "Firefox could be made to crash or run programs as your login if it\nopened a malicious website.\n" + } +} \ No newline at end of file From 52911acdbfd2c93b9ac4b63ecafd4ba20468fa2b Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Fri, 10 Jan 2020 11:12:09 -0800 Subject: [PATCH 041/143] DB Helper Lower Debug Logging --- db_helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/db_helper.py b/db_helper.py index ae2c993..3fced2a 100644 --- a/db_helper.py +++ b/db_helper.py @@ -83,7 +83,7 @@ def get_conn(config, prefix=None, tojq=None, **kwargs): db_conn = pymysql.connect(**pymysql_args) - logger.info("Connected to {user}@{host}:{port}/{database}".format(**pymysql_args)) + logger.debug("Connected to {user}@{host}:{port}/{database}".format(**pymysql_args)) except Exception as connection_error: logger.warning("Connection Failed to {user}@{host}:{port}/{database}".format(**pymysql_args)) From a1cb27e8807900c3723d69775657153141bda2dc Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Fri, 10 Jan 2020 11:16:14 -0800 Subject: [PATCH 042/143] ui.yaml -> manoward.yaml in Travis --- travis/artifacts/{ui.yaml => manoward.yaml} | 27 +++++++++++++++++---- 1 file changed, 22 insertions(+), 5 deletions(-) rename travis/artifacts/{ui.yaml => manoward.yaml} (55%) diff --git a/travis/artifacts/ui.yaml b/travis/artifacts/manoward.yaml similarity index 55% rename from travis/artifacts/ui.yaml rename to travis/artifacts/manoward.yaml index 181d974..5284668 100644 --- a/travis/artifacts/ui.yaml +++ b/travis/artifacts/manoward.yaml @@ -6,11 +6,22 @@ webserver: accesslink: http://localhost:5000 port: 5000 database: - dbhostname: localhost - dbusername: manowar - dbpassword: longshittypassword_api - dbport: 3306 - dbdb: manowar2 + api_dbhostname: localhost + api_dbusername: manowar + api_dbpassword: longshittypassword_api + api_dbport: 3306 + api_dbdb: manowar2 + # TLS Ignored + #api_ssl: true + #api_dbsslca: /path_to_cert + analyze_dbhostname: localhost + analyze_dbusername: manowar_analyze + analyze_dbpassword: longshittypassword_analyze + analyze_dbport: 3306 + analyze_dbdb: manowar2 + # TLS Ignored + #analyze_ssl: true + #analyze_dbsslca: /path_to_cert v2api: root: /v2 preroot: /jellyfish @@ -30,4 +41,10 @@ v2ui: # extendhost_schema: /etc/manowar/jellyfish_extension_schema.json.schema # storageconfig: /etc/manowar/storage.ini # storagemaxchars: 255 +storage: + collectionmaxchars: 255 +analyze: + freshseconds: 172800 + maxthreads: 32 + From 94054bb01e9c89fa83562d99951148cf129e5461 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Fri, 10 Jan 2020 11:17:18 -0800 Subject: [PATCH 043/143] Updating Travis to use manoward.yaml --- travis/analyze_test.sh | 2 +- travis/api_test.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/travis/analyze_test.sh b/travis/analyze_test.sh index 81d9300..0cf0661 100755 --- a/travis/analyze_test.sh +++ b/travis/analyze_test.sh @@ -3,7 +3,7 @@ set -x # Run Scheduler Test -./analyze.py -a travis/artifacts/audits.d -c travis/artifacts/analyze.ini -vvv +./analyze.py -a travis/artifacts/audits.d -c travis/artifacts/manoward.yaml -vvv analyze_good=$? diff --git a/travis/api_test.sh b/travis/api_test.sh index 5064a34..cc58054 100755 --- a/travis/api_test.sh +++ b/travis/api_test.sh @@ -3,7 +3,7 @@ set -x # Run Scheduler Test -./ui.py -d -c ./travis/artifacts/ui.yaml > /home/travis/ui.log & +./ui.py -d -c ./travis/artifacts/manoward.yaml > /home/travis/ui.log & uipid=$! From 3fd428f6eab6a96617371bfa263b84cc1093e194 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Fri, 10 Jan 2020 11:30:56 -0800 Subject: [PATCH 044/143] Moving yoyo Configurations into Manoward.yaml Travis --- travis/artifacts/manoward.yaml | 29 +++++++++++++++++++++++++---- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/travis/artifacts/manoward.yaml b/travis/artifacts/manoward.yaml index 5284668..5e32dd4 100644 --- a/travis/artifacts/manoward.yaml +++ b/travis/artifacts/manoward.yaml @@ -6,22 +6,43 @@ webserver: accesslink: http://localhost:5000 port: 5000 database: + ## Global Settings ## + # Someday Change to SSL for TLS encryption from yoyo + req_enc: "NONE" + + ## API User ## api_dbhostname: localhost api_dbusername: manowar + api_hostmask: "%" api_dbpassword: longshittypassword_api api_dbport: 3306 api_dbdb: manowar2 - # TLS Ignored + # For TLS At Rest #api_ssl: true - #api_dbsslca: /path_to_cert + #api_dbsslca: /path/to/cacert + + ## Analyze User ## analyze_dbhostname: localhost analyze_dbusername: manowar_analyze + analyze_hostmask: "%" analyze_dbpassword: longshittypassword_analyze analyze_dbport: 3306 analyze_dbdb: manowar2 - # TLS Ignored + # For TLS At Rest #analyze_ssl: true - #analyze_dbsslca: /path_to_cert + #analyze_dbsslca: /path/to/cacert + + ## Storage User ## + store_dbhostname: localhost + store_dbusername: manowar_store + store_hostmask: "%" + store_dbpassword: longshittypassword_store + store_dbport: 3306 + store_dbdb: manowar2 + # For TLS At Rest + #store_ssl: true + #store_dbsslca: /path/to/cacert + v2api: root: /v2 preroot: /jellyfish From 3d191da69b81fc9a44e4625f193f5c5898391675 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Fri, 10 Jan 2020 11:31:11 -0800 Subject: [PATCH 045/143] yoyo.ini no longer needs this --- travis/artifacts/yoyo.ini | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/travis/artifacts/yoyo.ini b/travis/artifacts/yoyo.ini index 1661aeb..9d50ea0 100644 --- a/travis/artifacts/yoyo.ini +++ b/travis/artifacts/yoyo.ini @@ -11,27 +11,3 @@ verbosity = 3 # Batch Mode batch_mode = on - -[manowar] -req_enc = NONE - -# User for API that does most of everything nowadays. -# In the future almost everything should go through this -# and the anayze user -api_username = manowar -api_password = longshittypassword_api -api_hostmask = % - -# This is for the old SSH collector system. In the future -# This should get replaced with either an SSH collector that -# uses the api or manowar_agent directly -store_username = manowar_store -store_password = longshittypassword_store -store_hostmask = % - -# User for the analyze audit processes -# Seperates out the two for better splitting of things. -# Mainly read -analyze_username = manowar_analyze -analyze_password = longshittypassword_analyze -analyze_hostmask = % From fbd915ce6ef2c691ff6d058e99f425c64a5725fd Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Fri, 10 Jan 2020 11:36:56 -0800 Subject: [PATCH 046/143] In Travis Add Travis manwoard file --- .../20200107_02_xxxxx-credentials.py | 112 ++++++++++-------- 1 file changed, 60 insertions(+), 52 deletions(-) diff --git a/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py b/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py index 28236c7..b820d76 100644 --- a/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py +++ b/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py @@ -6,12 +6,15 @@ import os import os.path + import logging +import yaml from configparser import ConfigParser from yoyo import step + __depends__ = {"20200107_01_Gv0ql-initialize"} # Permissions as they "should" be @@ -53,6 +56,33 @@ logger.info("Finding Configuration File yoyo.ini") +LOGGER.debug("No Config File Given Let's Look in Default Locations.") + +possible_config_files = ["/etc/manowar/manoward.yaml", "./etc/manowar/manoward.yaml", "/usr/local/etc/manowar/manoward.yaml"] + +if os.environ.get("TRAVIS", None) is not None: + logger.info("In a Travis Build Add the Travis Paths to Configuration.") + possible_config_files.append("./travis/artifacts/manoward.yaml") + + +for default_file in possible_config_files: + if os.path.isfile(default_file) and os.access(default_file, os.R_OK): + LOGGER.debug("Using Default File : {}".format(default_file)) + + try: + with open(default_file, "r") as manoward_config_file: + manwoard_configs = yaml.safe_load(manoward_config_file) + except Exception as manoward_config_error: + logger.error("Unable to Read Manoward Configuration.") + logger.debug("Error : {}".format(manoward_config_error)) + + raise manoward_config_error + else: + # I've now Got my Things + logger.info("Found and loaded manoward.yaml") + + + do_api_attempt = True config_file = None @@ -64,65 +94,43 @@ steps = list() -for default_file in ("../yoyo.ini", "./yoyo.ini"): - if os.path.isfile(default_file) and os.access(default_file, os.R_OK): - logger.debug("Using Default File : {}".format(default_file)) - config_file = default_file - break - -if config_file is None: - logger.warning("Yoyo applied without a configuration file not attempting to update credential stuff.") - raise ValueError("No yoyo configuration file given.") -else: - # Load Configuration - try: - # Read Our INI with our data collection rules - config = ConfigParser() - config.read(config_file) - except Exception as general_config_error: - logger.warning("Couldn't read configuration file {} properly. Not doing api credential stuff.") - raise general_config_error - else: - # DB Config Items - yoyo_config = dict(config._sections) +# DB Config Items +yoyo_config = manoward_configs["database"] - for user_type in ["api", "store", "analyze"]: +for user_type in ["api", "store", "analyze"]: - if yoyo_config.get("manowar", dict()).get("{}_password".format(user_type), None) is None: - # We not doing it - do_api_attempt = False - else: - username = yoyo_config.get("manowar", dict()).get("{}_username".format(user_type), None) - hostmask = yoyo_config.get("manowar", dict()).get("{}_hostmask".format(user_type), None) - password = yoyo_config.get("manowar", dict()).get("{}_password".format(user_type), None) - - req_enc = yoyo_config.get("manowar", dict()).get("req_enc", "SSL") + if yoyo_config.get("manowar", dict()).get("{}_password".format(user_type), None) is None: + # We not doing it + do_api_attempt = False + else: + username = yoyo_config.get("manowar", dict()).get("{}_dbusername".format(user_type), None) + hostmask = yoyo_config.get("manowar", dict()).get("{}_hostmask".format(user_type), "%") + password = yoyo_config.get("manowar", dict()).get("{}_dbpassword".format(user_type), None) + req_enc = yoyo_config.get("manowar", dict()).get("req_enc", "SSL") - this_u = '"{0}"@"{1}"'.format(username, hostmask) - steps.append(step("create or replace user {0} identified by \"{1}\"".format(this_u, - password), - "drop user {}".format(this_u))) + this_u = '"{0}"@"{1}"'.format(username, hostmask) - if req_enc in ("SSL", "NONE", "X509"): - steps.append(step("alter user {0} REQUIRE {1}".format(this_u, - req_enc))) - else: - raise ValueError("req_enc not set to valid entry") + steps.append(step("create or replace user {0} identified by \"{1}\"".format(this_u, + password), + "drop user {}".format(this_u))) - # Now Add Permissions - for this_table in _table_defs.keys(): - if user_type in _table_defs[this_table].keys(): - logger.info("Adding rights to {} for {}".format(this_table, user_type)) + if req_enc in ("SSL", "NONE", "X509"): + steps.append(step("alter user {0} REQUIRE {1}".format(this_u, + req_enc))) + else: + raise ValueError("req_enc not set to valid entry") - for right in _table_defs[this_table][user_type]: + # Now Add Permissions + for this_table in _table_defs.keys(): + if user_type in _table_defs[this_table].keys(): + logger.info("Adding rights to {} for {}".format(this_table, user_type)) - steps.append(step("grant {} on {} to {}".format(right, - this_table, - this_u))) - else: - logger.debug("User {} Not granted access to {}".format(user_type, this_table)) + for right in _table_defs[this_table][user_type]: - finally: - pass + steps.append(step("grant {} on {} to {}".format(right, + this_table, + this_u))) + else: + logger.debug("User {} Not granted access to {}".format(user_type, this_table)) From 59ed81eb402ccdfa74c3fa29b1cde83676732d64 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Fri, 10 Jan 2020 11:44:25 -0800 Subject: [PATCH 047/143] Some Bugfixes in Yoyo --- .../migrations/20200107_02_xxxxx-credentials.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py b/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py index b820d76..8f47c48 100644 --- a/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py +++ b/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py @@ -56,22 +56,22 @@ logger.info("Finding Configuration File yoyo.ini") -LOGGER.debug("No Config File Given Let's Look in Default Locations.") - possible_config_files = ["/etc/manowar/manoward.yaml", "./etc/manowar/manoward.yaml", "/usr/local/etc/manowar/manoward.yaml"] if os.environ.get("TRAVIS", None) is not None: logger.info("In a Travis Build Add the Travis Paths to Configuration.") possible_config_files.append("./travis/artifacts/manoward.yaml") +manoward_configs = None for default_file in possible_config_files: if os.path.isfile(default_file) and os.access(default_file, os.R_OK): - LOGGER.debug("Using Default File : {}".format(default_file)) + logger.debug("Using Default File : {}".format(default_file)) try: with open(default_file, "r") as manoward_config_file: - manwoard_configs = yaml.safe_load(manoward_config_file) + manoward_configs = yaml.safe_load(manoward_config_file) + except Exception as manoward_config_error: logger.error("Unable to Read Manoward Configuration.") logger.debug("Error : {}".format(manoward_config_error)) @@ -80,6 +80,10 @@ else: # I've now Got my Things logger.info("Found and loaded manoward.yaml") + break + +if manoward_configs is None: + raise TypeError("No Manowar Configs") From 43b3760bcb4d32fd0d0f58ca288ef2fa2a0711ee Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Fri, 10 Jan 2020 11:50:51 -0800 Subject: [PATCH 048/143] Relative paths fam. --- yoyo_steps/migrations/20200107_02_xxxxx-credentials.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py b/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py index 8f47c48..aecf66d 100644 --- a/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py +++ b/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py @@ -60,7 +60,7 @@ if os.environ.get("TRAVIS", None) is not None: logger.info("In a Travis Build Add the Travis Paths to Configuration.") - possible_config_files.append("./travis/artifacts/manoward.yaml") + possible_config_files.append("../travis/artifacts/manoward.yaml") manoward_configs = None From 5381a2cfc167cdcb4c837159ee8647d7be4e9d96 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Fri, 10 Jan 2020 14:32:43 -0800 Subject: [PATCH 049/143] credentials migration fix --- .../migrations/20200107_02_xxxxx-credentials.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py b/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py index aecf66d..8e915af 100644 --- a/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py +++ b/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py @@ -103,15 +103,17 @@ for user_type in ["api", "store", "analyze"]: - if yoyo_config.get("manowar", dict()).get("{}_password".format(user_type), None) is None: + if yoyo_config.get("{}_dbpassword".format(user_type), None) is None: # We not doing it do_api_attempt = False + + raise ValueError("Missing Configuration for {}".format(user_type)) else: - username = yoyo_config.get("manowar", dict()).get("{}_dbusername".format(user_type), None) - hostmask = yoyo_config.get("manowar", dict()).get("{}_hostmask".format(user_type), "%") - password = yoyo_config.get("manowar", dict()).get("{}_dbpassword".format(user_type), None) + username = yoyo_config.get("{}_dbusername".format(user_type), None) + hostmask = yoyo_config.get("{}_hostmask".format(user_type), "%") + password = yoyo_config.get("{}_dbpassword".format(user_type), None) - req_enc = yoyo_config.get("manowar", dict()).get("req_enc", "SSL") + req_enc = yoyo_config.get("req_enc", "SSL") this_u = '"{0}"@"{1}"'.format(username, hostmask) From a9d59e407f306c810f12153cbcd3fe0ac52db82a Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Fri, 10 Jan 2020 15:57:59 -0800 Subject: [PATCH 050/143] Updating db_helper and some things. --- analyze.py | 26 ++----- db_helper.py | 67 ++++++++++++++++++- source_docs/db_setup.md | 13 ++-- ui.py | 27 +------- .../20200107_02_xxxxx-credentials.py | 3 + yoyo_steps/yoyo.ini.sample | 28 -------- 6 files changed, 84 insertions(+), 80 deletions(-) diff --git a/analyze.py b/analyze.py index a42b74b..dc086e1 100755 --- a/analyze.py +++ b/analyze.py @@ -74,20 +74,8 @@ LOGGER = logging.getLogger("analyze.py") - CONFIG = args.config - - if CONFIG is False: - # Let's Look for a Default File. - LOGGER.debug("No Config File Given Let's Look in Default Locations.") - for default_file in ("/etc/manowar/manoward.yaml", - "./etc/manowar/manoward.yaml", - "/usr/local/etc/manowar/manoward.yaml"): - - if os.path.isfile(default_file) and os.access(default_file, os.R_OK): - LOGGER.debug("Using Default File : {}".format(default_file)) - CONFIG = default_file - break - + CONFIG = db_helper.get_manoward(explicit_config=args.config, + only_file=False) def analyze(CONFIGDIR, CONFIG): @@ -101,13 +89,11 @@ def analyze(CONFIGDIR, CONFIG): # Parse my General Configuration if isinstance(CONFIG, dict): config_items = CONFIG + elif isinstance(CONFIG, str): + db_helper.get_manoward(explicit_config=CONFIG) else: - try: - with open(CONFIG) as yaml_config: - config_items = yaml.safe_load(yaml_config) - except Exception as yaml_error: - logger.debug("Error when reading yaml config {} ".format(yaml_error)) - sys.exit("Bad configuration file {}".format(CONFIG)) + raise TypeError("No Configuration Given.") + logger.debug("Configuration Items: {}".format(config_items)) diff --git a/db_helper.py b/db_helper.py index 3fced2a..da04944 100644 --- a/db_helper.py +++ b/db_helper.py @@ -13,16 +13,77 @@ dbname: db autocommit: bool charset: string (default: utf8) - - ''' +import logging +import os + import pymysql import pyjq -import logging +import yaml + + +def get_manoward(explicit_config=None, **kwargs): + + _manoward_defaults = ["/etc/manowar/manoward.yaml", + "./etc/manowar/manoward.yaml", + "/usr/local/etc/manowar/manoward.yaml"] + + ''' + Searches the filesystem for the correct manoward.yaml file and uses it. + ''' + + logger = logging.getLogger("manoward_configuration") + + if os.environ.get("TRAVIS", None) is not None: + logger.info("In a Travis Build Add the Travis Paths to Configuration.") + _manoward_defaults.append("../travis/artifacts/manoward.yaml") + _manoward_defaults.append("./travis/artifacts/manoward.yaml") + + + manoward_configs = None + + if isinstance(explicit_config, str): + # Overwrite files with only this option. + _manoward_defaults = [explicit_config] + + for default_file in _manoward_defaults: + if os.path.isfile(default_file) and os.access(default_file, os.R_OK): + logger.debug("Using Default File : {}".format(default_file)) + + if kwargs.get("only_file", False) is False: + # Process the config file and return results + + try: + with open(default_file, "r") as manoward_config_file: + manoward_configs = yaml.safe_load(manoward_config_file) + + except Exception as manoward_config_error: + logger.error("Unable to Read Manoward Configuration.") + logger.debug("Error : {}".format(manoward_config_error)) + + raise manoward_config_error + else: + # I've now Got my Things + logger.info("Found and loaded manoward.yaml") + break + else: + # Return Just the filename + manoward_configs = default_file + + if kwargs.get("no_config_okay", False) is True and manoward_configs is None: + raise ValueError("No Manowar Configuration Found.") + + return manoward_configs + def get_conn(config, prefix=None, tojq=None, **kwargs): + ''' + Returns a DB Cursors (with pymysql) that supports all of the + hotness + ''' + # Given a Config Dictionary with an optional prefix and tojq # Pull the Data Out and Connect to the Database diff --git a/source_docs/db_setup.md b/source_docs/db_setup.md index cf6876a..94afbda 100644 --- a/source_docs/db_setup.md +++ b/source_docs/db_setup.md @@ -9,12 +9,15 @@ and administraive user. Then in the `yoyo_steps` directory you need to create a `yoyo.ini` file (see the `yoyo.ini.sample` as an example). In it you need to edit the target database with -your administrator username and password. Additionally you'll need to populate -credentials for your big 3 users, the api user, the storage user and the analyze -user. +your administrator username and password. -I'd encourage managing this file with a change management system so that you can -better manage these secrets contained here. +Additionally you'll need to populate credentials for your big 3 users, +the api user, the storage user and the analyze user in your `manoward.yaml` +file as the system will read that to find the username/password and connection +methods needed. + +I'd encourage managing these files with a change management system so that +you can better manage these secrets contained here. ## Application diff --git a/ui.py b/ui.py index 2db56d1..5f98c9b 100755 --- a/ui.py +++ b/ui.py @@ -54,20 +54,9 @@ LOGGER.info("Welcome to Man 'o War") FDEBUG = args.flaskdebug - CONFIG = args.config - - if CONFIG is False: - # Let's Look for a Default File. - LOGGER.debug("No Config File Given Let's Look in Default Locations.") - for default_file in ("/etc/manowar/manoward.yaml", - "./etc/manowar/manoward.yaml", - "/usr/local/etc/manowar/manoward.yaml"): - - if os.path.isfile(default_file) and os.access(default_file, os.R_OK): - LOGGER.debug("Using Default File : {}".format(default_file)) - CONFIG = default_file - break + CONFIG = db_helper.get_manoward(explicit_config=args.config, + only_file=False) def ui(CONFIG, FDEBUG): @@ -79,17 +68,7 @@ def ui(CONFIG, FDEBUG): logger = logging.getLogger("ui.ui") - if isinstance(CONFIG, dict): - logger.info("Using Configuration as a Dict.") - config_items = CONFIG - else: - logger.info("Reading Configuration from String.") - try: - with open(CONFIG) as yaml_config: - config_items = yaml.safe_load(yaml_config) - except Exception as yaml_error: - logger.debug("Error when reading yaml config {} ".format(yaml_error)) - sys.exit("Bad configuration file {}".format(CONFIG)) + config_items = CONFIG logger.debug("Configuration Items: {}".format(config_items)) diff --git a/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py b/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py index 8e915af..d61db97 100644 --- a/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py +++ b/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py @@ -56,6 +56,9 @@ logger.info("Finding Configuration File yoyo.ini") +# Since Im in a subdirectory when running this I need to do +# these things by hand instead of using db_helper +# Because of the way yoyo is setup possible_config_files = ["/etc/manowar/manoward.yaml", "./etc/manowar/manoward.yaml", "/usr/local/etc/manowar/manoward.yaml"] if os.environ.get("TRAVIS", None) is not None: diff --git a/yoyo_steps/yoyo.ini.sample b/yoyo_steps/yoyo.ini.sample index 4d03b78..1f05990 100644 --- a/yoyo_steps/yoyo.ini.sample +++ b/yoyo_steps/yoyo.ini.sample @@ -16,31 +16,3 @@ verbosity = 3 # Batch Mode batch_mode = on - -[manowar] -# Require an Encrypted connection. Useful if using TLS encryption in transit -# with an RDS instance as you can force an encrypted connection -# Options are NONE, SSL or X509 you'll have to do custom work to make -# Certificate auth work (or just request it and I'll hack on it) -req_enc = SSL - -# User for API that does most of everything nowadays. -# In the future almost everything should go through this -# and the anayze user -api_username = manowar -#api_password = replaceme -api_hostmask = % - -# This is for the old SSH collector system. In the future -# This should get replaced with either an SSH collector that -# uses the api or manowar_agent directly -store_username = manowar_store -#store_password = replaceme -store_hostmask = % - -# User for the analyze audit processes -# Seperates out the two for better splitting of things. -# Mainly read -analyze_username = manowar_store -#analyze_password = replaceme -analyze_hostmask = % From 7d0f77e4d3b3cfe2cc860e41cefed5fd660a1d78 Mon Sep 17 00:00:00 2001 From: Chris Halbersma Date: Fri, 10 Jan 2020 17:03:26 -0800 Subject: [PATCH 051/143] Migrated Collate over to New manoward config. --- analyze.py | 2 +- collate.py | 265 ++++++++++++++++++++++------------------- travis/analyze_test.sh | 2 +- 3 files changed, 147 insertions(+), 122 deletions(-) diff --git a/analyze.py b/analyze.py index dc086e1..159fa74 100755 --- a/analyze.py +++ b/analyze.py @@ -90,7 +90,7 @@ def analyze(CONFIGDIR, CONFIG): if isinstance(CONFIG, dict): config_items = CONFIG elif isinstance(CONFIG, str): - db_helper.get_manoward(explicit_config=CONFIG) + config_items = db_helper.get_manoward(explicit_config=CONFIG) else: raise TypeError("No Configuration Given.") diff --git a/collate.py b/collate.py index 827ed36..a2e1db0 100755 --- a/collate.py +++ b/collate.py @@ -19,31 +19,43 @@ from copy import deepcopy from time import time from time import sleep +import logging import threading import multiprocessing # Yes I use both! from queue import Queue +import db_helper + if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument("-c", "--config", help="Main collate.ini file", required=True) - parser.add_argument("-V", "--verbose", action='store_true', help="Verbose Mode Show more Stuff") + parser.add_argument("-c", "--config", help="Main collate.ini file", required=False, default=False) + parser.add_argument("-v", "--verbose", action='append_const', help="Turn on Verbosity", const=1, default=[]) + parser._optionals.title = "DESCRIPTION " # Parser Args args = parser.parse_args() - CONFIG=args.config + CONFIG = db_helper.get_manoward(explicit_config=args.config) - if args.verbose: - VERBOSE=True + VERBOSE = len(args.verbose) + + if VERBOSE == 0: + logging.basicConfig(level=logging.ERROR) + elif VERBOSE == 1: + logging.basicConfig(level=logging.WARNING) + elif VERBOSE == 2: + logging.basicConfig(level=logging.INFO) else: - VERBOSE=False + logging.basicConfig(level=logging.DEBUG) def collate(CONFIG): + logger = logging.getLogger("collate") + COLLATION_TIME = int(time()) # Better Fresh Info/Unifies Lists between what is returned by the api (auditresults) @@ -52,54 +64,46 @@ def collate(CONFIG): MIDNIGHT = COLLATION_TIME - seconds_after_midnight twodaytimestamp = MIDNIGHT - (86400*2) - # Parse Information - try: - # Read Our INI with our data collection rules - config = ConfigParser() - config.read(CONFIG) - # Debug - #for i in config : - #for key in config[i] : - #print (i, "-", key, ":", config[i][key]) - except Exception as e: # pylint: disable=broad-except, invalid-name - sys.exit('Bad configuration file {}'.format(e)) - - - # DB Config Items - db_config_items=dict() - for section in config: - if section in ["database"] : - for item in config[section]: - db_config_items[item] = config[section][item] + # Parse my General Configuration + if isinstance(CONFIG, dict): + config_items = CONFIG + elif isinstance(CONFIG, str): + config_items = db_helper.get_manoward(explicit_config=CONFIG) + else: + raise TypeError("No Configuration Given.") + ''' def giveMeDB(db_config_items) : db_conn = pymysql.connect(host=db_config_items['dbhostname'], port=int(db_config_items['dbport']), user=db_config_items['dbusername'], passwd=db_config_items['dbpassword'], db=db_config_items['dbdb'], autocommit=True) dbmessage = "Good, connected to " + db_config_items['dbusername'] + "@" + db_config_items['dbhostname'] + ":" + db_config_items['dbport'] + "/" + db_config_items['dbdb'] return db_conn, dbmessage + ''' + + def grab_single_collated(db_config, result_enum, type_to_grab): - def grab_single_collated( db_config, result_enum, type_to_grab ) : + logger = logging.getLogger("grab_single_collated") - db_conn, dbmessage = giveMeDB(db_config) + db_conn = db_helper.get_conn(db_config, prefix="analyze_", tojq=".database", ac_def=True) cur = db_conn.cursor() # Check Result Enum - if result_enum not in ["pass", "fail", "notafflicted" ]: + if result_enum not in ["pass", "fail", "notafflicted"]: raise Exception("Result Enum not in pass/fail/notafflicted. Instead it's : ", str(results_enum)) - if type_to_grab not in ["acoll", "pop", "srvtype" ]: + if type_to_grab not in ["acoll", "pop", "srvtype"]: raise Exception("Type to Grab Unknown. Not in acoll, pop, srvtype instead it's : ", str(type_to_grab)) - if type_to_grab == "acoll" : + if type_to_grab == "acoll": grouper = "audits.audit_name" table = "audits_by_acoll" - elif type_to_grab == "pop" : + elif type_to_grab == "pop": grouper = "hosts.pop" table = "audits_by_pop" - elif type_to_grab == "srvtype" : + elif type_to_grab == "srvtype": grouper = "hosts.srvtype" table = "audits_by_srvtype" @@ -109,45 +113,56 @@ def grab_single_collated( db_config, result_enum, type_to_grab ) : grab_single_collated_query_list.append("join hosts on fk_host_id = hosts.host_id ") grab_single_collated_query_list.append("join audits on fk_audits_id = audits.audit_id ") grab_single_collated_query_list.append("WHERE") - grab_single_collated_query_list.append("audit_result = '" + result_enum + "'") - grab_single_collated_query_list.append("and last_audit >= FROM_UNIXTIME(" + str(twodaytimestamp) + ") ") + grab_single_collated_query_list.append("audit_result = %s ") + grab_single_collated_query_list.append("and last_audit >= FROM_UNIXTIME(%s) ") grab_single_collated_query_list.append("group by " + grouper + ", fk_audits_id") + grab_scq_args = [result_enum, twodaytimestamp] + grab_single_collated_query = " ".join(grab_single_collated_query_list) #print(grab_single_collated_query) - cur.execute(grab_single_collated_query) - if cur.rowcount: - query_results_list = cur.fetchall() - else : - # No Results - # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - query_results_list = [] - - cur.close() - + try: + gsqq_debug = cur.mogrify(grab_single_collated_query, grab_scq_args) + logger.debug("GSQQ Debug : {}".format(gsqq_debug)) + cur.execute(grab_single_collated_query, grab_scq_args) + except Exception as gscq_query_error: + logger.error("Unable to Grab GSCQ Error for Group : {} on type {}".format(grouper, type_to_grab)) + raise gscq_query_error + else: + + if cur.rowcount: + query_results_list = cur.fetchall() + else: + # No Results + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + query_results_list = list() + finally: + cur.close() - #print("Query Results: ", query_results_list) return query_results_list - def grab_multiple_collated( db_config, type_to_grab ) : + def grab_multiple_collated(db_config, type_to_grab): - if type_to_grab not in ["acoll", "pop", "srvtype" ]: + logger = logging.getLogger("grab_multiple_collated") + + # TODO Argumentize this one. + if type_to_grab not in ["acoll", "pop", "srvtype"]: raise Exception("Type to Grab Unknown. Not in acoll, pop, srvtype instead it's : ", str(type_to_grab)) full_results_list = dict() - for item in ["pass", "fail", "notafflicted" ] : - this_results_list = grab_single_collated( db_config, item, type_to_grab ) + for item in ["pass", "fail", "notafflicted"]: + this_results_list = grab_single_collated(db_config, item, type_to_grab) #print(this_results_list) - for result in this_results_list : + for result in this_results_list: # Create an Entry For each result type (pop, srvtype or audit) - if result[0] not in full_results_list.keys() : + if result[0] not in full_results_list.keys(): # No Entry so Create a New Result Dict full_results_list[result[0]] = dict() # Add the fk_audit_id Number for Reference @@ -156,52 +171,50 @@ def grab_multiple_collated( db_config, type_to_grab ) : if result[1] not in full_results_list[result[0]].keys(): full_results_list[result[0]][result[1]] = dict() - # Add my item (but to the real place now full_results_list[result[0]][result[1]][item] = result[2] - #print("Full Results List", full_results_list) return full_results_list - def grab_all_table_data( db_config ): + def grab_all_table_data(db_config): + + logger = logging.getLogger("grab_all_table_data") updates = 0 inserts = 0 - for table in ["acoll", "pop", "srvtype" ]: - table_results = grab_multiple_collated( db_config, table) - - #print(table_results) + for table in ["acoll", "pop", "srvtype"]: - current_table_results = get_current_table_data( db_config, table) + table_results = grab_multiple_collated(db_config, table) - #print(current_table_results) + current_table_results = get_current_table_data(db_config, table) - this_updates, this_inserts = compare_update_insert_table(table_results, current_table_results, table, db_config ) + this_updates, this_inserts = compare_update_insert_table(table_results, current_table_results, table, db_config) updates += this_updates inserts += this_inserts + logger.debug("For table {} I:{} U:{}".format(table, this_inserts, this_updates)) + return updates, inserts - def compare_update_insert_table(collected, currently_on_disk, table, db_config ): + def compare_update_insert_table(collected, currently_on_disk, table, db_config): + + logger = logging.getLogger("compare_update_insert_table") updates = 0 inserts = 0 - #print("COMPARE") + db_conn = db_helper.get_conn(db_config, prefix="analyze_", tojq=".database", ac_def=True) COLLATE_TIME = int(time()) - db_conn, dbmessage = giveMeDB(db_config) cur = db_conn.cursor() collected_index = list() - #print("Pre-Hydrated Collections", collected) - # Hydrate Collecteded Items for index in collected.keys(): @@ -226,45 +239,47 @@ def compare_update_insert_table(collected, currently_on_disk, table, db_config ) #print("Current Amounts", currently_on_disk) ## IDs to Update Used in SQL - update_ids = [ item[0] for item in currently_on_disk if item[1] in collected.keys() and item[2] in collected[item[1]].keys() and collected[item[1]][item[2]]["pass"] == item[3] and collected[item[1]][item[2]]["fail"] == item[4] and collected[item[1]][item[2]]["notafflicted"] == item[5] ] + update_ids = [item[0] for item in currently_on_disk + if item[1] in collected.keys() + and item[2] in collected[item[1]].keys() + and collected[item[1]][item[2]]["pass"] == item[3] + and collected[item[1]][item[2]]["fail"] == item[4] + and collected[item[1]][item[2]]["notafflicted"] == item[5] + ] ## Items to Insert ## Part of What's Needed current_text_location = dict() - for i in range(0, len(currently_on_disk)) : + for i in range(0, len(currently_on_disk)): #print(currently_on_disk[i][1], currently_on_disk[i][2]) - if currently_on_disk[i][1] not in current_text_location.keys() : + if currently_on_disk[i][1] not in current_text_location.keys(): current_text_location[currently_on_disk[i][1]] = dict() - if currently_on_disk[i][2] not in current_text_location[currently_on_disk[i][1]].keys() : + if currently_on_disk[i][2] not in current_text_location[currently_on_disk[i][1]].keys(): current_text_location[currently_on_disk[i][1]][currently_on_disk[i][2]] = i #print("On Disk Lookups", current_text_location) insert_list = list() - insert_list = [ [ item[0], item[1], COLLATE_TIME, COLLATE_TIME, collected[item[0]][item[1]]["pass"], collected[item[0]][item[1]]["fail"], collected[item[0]][item[1]]["notafflicted"] ] \ + insert_list = [[item[0], item[1], COLLATE_TIME, COLLATE_TIME, collected[item[0]][item[1]]["pass"], collected[item[0]][item[1]]["fail"], collected[item[0]][item[1]]["notafflicted"]] \ for item in collected_index \ - if item[0] not in current_text_location.keys() or item[1] not in current_text_location[item[0]].keys() or ( collected[item[0]][item[1]]["pass"], collected[item[0]][item[1]]["fail"], collected[item[0]][item[1]]["notafflicted"] ) \ - != ( currently_on_disk[current_text_location[item[0]][item[1]]][3], currently_on_disk[current_text_location[item[0]][item[1]]][4], currently_on_disk[current_text_location[item[0]][item[1]]][5] ) ] - - if VERBOSE : - print("Updates") - print(update_ids) + if item[0] not in current_text_location.keys() or item[1] not in current_text_location[item[0]].keys() or (collected[item[0]][item[1]]["pass"], collected[item[0]][item[1]]["fail"], collected[item[0]][item[1]]["notafflicted"]) \ + != (currently_on_disk[current_text_location[item[0]][item[1]]][3], currently_on_disk[current_text_location[item[0]][item[1]]][4], currently_on_disk[current_text_location[item[0]][item[1]]][5])] - print("Inserts") - print(insert_list) + logger.debug("Update IDs : {}".format(update_ids)) + logger.debug("Insert List : {}".format(insert_list)) try: if len(update_ids) > 0 : #update_ids_string = ",".join(map(str, update_ids)) # Update ID's will now be used as a query paramertization list - update_ids_parameters = [ " %s " for x in update_ids ] - update_ids_string = ",".join(map(str, update_ids_parameters) ) - update_query_parameters = [ str(COLLATE_TIME) ] + update_ids_parameters = [" %s " for x in update_ids] + update_ids_string = ",".join(map(str, update_ids_parameters)) + update_query_parameters = [str(COLLATE_TIME)] update_query_parameters.extend(update_ids) # Query has been parameterized update_query = "UPDATE " @@ -274,85 +289,95 @@ def compare_update_insert_table(collected, currently_on_disk, table, db_config ) "_id in ( " + update_ids_string + " ) " try: + update_query_debug = cur.mogrify(update_query, update_query_parameters) + + logger.info("Running Update Query for table : {}".format(table)) + logger.debug("{}".format(table, update_query_debug)) + cur.execute(update_query, update_query_parameters) + except Exception as update_query_error: + logger.error("Error updating hosts for {} : {}".format(table, update_query_error)) + else: updates += len(update_ids) - except Exception as e: - print("Error updating hosts for collate : ", e ) - except Exception as e : - print("Error doing Updates. ", e) + except Exception as update_error: + logger.error("Error doing Updates. {}".format(update_error)) - #print("Inserts") + + # Inserts try: - if len(insert_list) > 0 : + if len(insert_list) > 0: # Only do this is there's stuff. insert_query = [] # This query is properly paramaterized and the table value is properly # hardcoded earlier in the program. I'm noseccing it. - insert_query.append("INSERT into audits_by_" + table + " ( " + table + "_text, ") # nosec - insert_query.append("fk_audits_id, " + table + "_initial_audit, " + table + "_last_audit, " + table + "_passed, " + table + "_failed, " + table + "_exempt ) " ) - insert_query.append("VALUES( %s, %s, FROM_UNIXTIME(%s), FROM_UNIXTIME(%s), %s, %s, %s ) " ) + insert_query.append("INSERT into audits_by_{0} ( {0}_text, ".format(table)) # nosec + insert_query.append("fk_audits_id, {0}_initial_audit, {0}_last_audit, {0}_passed, {0}_failed, {0}_exempt ) ".format(table)) + insert_query.append("VALUES( %s, %s, FROM_UNIXTIME(%s), FROM_UNIXTIME(%s), %s, %s, %s ) ") insert_query_string = " ".join(insert_query) try: cur.executemany(insert_query_string, insert_list) + except Exception as insert_query_error: + logger.error("Error doing Inserts for {} : {}".format(table, insert_error)) + else: inserts += len(insert_list) - except Exception as e : - print("Error doing Inserts for collation : ", e ) - except Exception as e: - print("Error doing Inserts. ", e) + except Exception as insert_error: + logger.error("Error doing Inserts : {}".format(insert_error)) return updates, inserts - def get_current_table_data( db_config, table ) : - + def get_current_table_data(db_config, table): - db_conn, dbmessage = giveMeDB(db_config) + db_conn = db_helper.get_conn(db_config, prefix="analyze_", tojq=".database", ac_def=True) cur = db_conn.cursor() grab_current_table_list = list() - grab_current_table_list.append("SELECT " + table + "_id, " + table + "_text, fk_audits_id, " + table + "_passed, " + table + "_failed, " + table + "_exempt ") - grab_current_table_list.append("FROM audits_by_" + table) - grab_current_table_list.append("WHERE") - grab_current_table_list.append(table + "_last_audit >= now() - INTERVAL " + FRESH + " SECOND") - grab_current_table_list.append("group by " + table + "_text, fk_audits_id") + grab_current_table_list.append("SELECT {0}_id, {0}_text, fk_audits_id, {0}_passed, {0}_failed, {0}_exempt ".format(table)) + grab_current_table_list.append("FROM audits_by_{0}".format(table)) + grab_current_table_list.append("WHERE {0}_last_audit >= now() - INTERVAL %s SECOND".format(table)) + grab_current_table_list.append("group by {0}_text, fk_audits_id".format(table)) + grab_ctl_args = [db_config["collate"]["freshseconds"]] grab_current_table_query = " ".join(grab_current_table_list) #print(grab_current_table_query) - cur.execute(grab_current_table_query) - - if cur.rowcount: - query_results_list = cur.fetchall() - else : - # No Results - # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - query_results_list = [] - - cur.close() + try: + cur.execute(grab_current_table_query, grab_ctl_args) + except Exception as grab_ctl_query_error: + logger.error("Unable to grab current table list table {}".format(table)) + else: + if cur.rowcount: + query_results_list = cur.fetchall() + else : + query_results_list = list() + finally: + cur.close() - #print("Currently ON Disk", query_results_list) return query_results_list # Globals - FRESH = db_config_items["freshseconds"] + FRESH = config_items["collate"]["freshseconds"] results_dict = dict() - updates, inserts = grab_all_table_data(db_config_items) + results_dict["updates"], results_dict["inserts"] = grab_all_table_data(config_items) - results_dict["inserts"] = inserts - results_dict["updates"] = updates + return results_dict - print(json.dumps(results_dict, sort_keys=True, indent=4)) if __name__ == "__main__": - collate(CONFIG) + results = collate(CONFIG) + + + print(json.dumps(results, sort_keys=True, indent=4)) + + diff --git a/travis/analyze_test.sh b/travis/analyze_test.sh index 0cf0661..f6426aa 100755 --- a/travis/analyze_test.sh +++ b/travis/analyze_test.sh @@ -10,7 +10,7 @@ analyze_good=$? if [[ ${analyze_good} -eq 0 ]] ; then # Analyze Worked, let's try to collate # Collate Here - ./collate.py -c travis/artifacts/collate.ini -V + ./collate.py -vvv collate_good=$? From 6c4939e0464bc139de9b6eb8c9c6a44f0cdf1215 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 16 Jan 2020 11:15:49 -0800 Subject: [PATCH 052/143] Updating Storage and StorageJSONVerify --- .gitignore | 4 + db_helper.py | 5 +- jelly_api_2/puthostjson.py | 13 +- process_ip_intel.py | 11 +- storage.py | 390 +++++++----------- storageJSONVerify.py | 58 +-- travis/artifacts/manoward.yaml | 2 + ui.py | 3 +- .../20200107_02_xxxxx-credentials.py | 5 +- 9 files changed, 204 insertions(+), 287 deletions(-) diff --git a/.gitignore b/.gitignore index d96f37a..96869e3 100644 --- a/.gitignore +++ b/.gitignore @@ -78,3 +78,7 @@ tmp* # Ignore Ini *.ini +/etc/manowar/manoward.yaml +/etc/manowar_agent/collector.yaml +/etc/manowar_agent/minion +/etc/manowar_agent/saltcell.yaml diff --git a/db_helper.py b/db_helper.py index da04944..a62acfa 100644 --- a/db_helper.py +++ b/db_helper.py @@ -28,7 +28,7 @@ def get_manoward(explicit_config=None, **kwargs): _manoward_defaults = ["/etc/manowar/manoward.yaml", "./etc/manowar/manoward.yaml", "/usr/local/etc/manowar/manoward.yaml"] - + ''' Searches the filesystem for the correct manoward.yaml file and uses it. ''' @@ -48,7 +48,8 @@ def get_manoward(explicit_config=None, **kwargs): _manoward_defaults = [explicit_config] for default_file in _manoward_defaults: - if os.path.isfile(default_file) and os.access(default_file, os.R_OK): + + if os.path.isfile(default_file) is True and os.access(default_file, os.R_OK): logger.debug("Using Default File : {}".format(default_file)) if kwargs.get("only_file", False) is False: diff --git a/jelly_api_2/puthostjson.py b/jelly_api_2/puthostjson.py index 88f05d0..c08bffc 100644 --- a/jelly_api_2/puthostjson.py +++ b/jelly_api_2/puthostjson.py @@ -73,29 +73,20 @@ def generic_puthostjson(): g.logger.debug("Recieved Authenticated Request Request") if request.json is None: - # No Json Data Given error_dict["nodata"] = True error = True - - #print(type(g.SCHEMAFILE)) - if error is False: # Note that this is an API host request.json["collection_status"] = "STINGCELL" # Do a Storage Verify on this Mofo. - check_result = storageJSONVerify(g.config_items["sapi"].get("puthost_schema", "puthost_schema.json"), \ - request.json) - - # Parse Result - check_result_passed = check_result[0] - check_result_message = check_result[1] + check_result_passed, check_result_message = storageJSONVerify(request.json) if check_result_passed is True: # It's good do the Storage - this_store_collection_result = storage(g.config_items["sapi"].get("storageconfig", "storage.ini"), + this_store_collection_result = storage(g.config_items, request.json, sapi=True) diff --git a/process_ip_intel.py b/process_ip_intel.py index 32f5c80..608c364 100644 --- a/process_ip_intel.py +++ b/process_ip_intel.py @@ -29,19 +29,20 @@ def process_ip_intel(config_dict=False, ip=False, iptype=False, host=False, mult if config_dict["ip_intel"].get("use_auth", False) == True : # Use an auth token when you make the query - request_headers["Authorization"] = "{}:{}".format(self.config_dict["ip_intel"].get("intel_username","nobody"), \ - self.config_dict["ip_intel"].get("intel_token","nothing")) + request_headers["Authorization"] = "{}:{}".format(self.config_dict["ip_intel"].get("intel_username","nobody"), + self.config_dict["ip_intel"].get("intel_token","nothing") + ) - if ( multireport == False and ( ip == False or iptype == False )) or host == False : + if (multireport is False and (ip is False or iptype is False)) or host is False: raise Exception("Incomplete Specification") - if multireport == False : + if multireport is False: # Use the Given query_arguments["ip"] = "'{}'".format(str(ip)) query_arguments["iptype"] = "'{}'".format(str(iptype)) else : request_headers["Content-Type"] = "application/json" - if type(multireport) == list : + if isinstance(multireport, list): # multireport pass else : diff --git a/storage.py b/storage.py index ab7755b..59bdbf6 100755 --- a/storage.py +++ b/storage.py @@ -23,10 +23,11 @@ # IP Intelligence from process_ip_intel import process_ip_intel +import db_helper if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument("-c", "--config", help="JSON Config File with our Storage Info", required=True) + parser.add_argument("-c", "--config", help="JSON Config File with our Storage Info", required=False, default=None) parser.add_argument("-j", "--json", help="json file to store", required=True) parser.add_argument("-v", "--verbose", action='append_const', help="Turn on Verbosity", const=1, default=[]) @@ -35,7 +36,8 @@ # Grab Variables JSONFILE = args.json - CONFIG = args.config + CONFIG = db_helper.get_manoward(explicit_config=args.config, + only_file=False) VERBOSE = len(args.verbose) @@ -56,184 +58,8 @@ LOGGER.info("Welcome to Storage Module") -def storage(CONFIG, JSONFILE, sapi=False): - - ''' - Does a Storage of an Object. - ''' - - logger = logging.getLogger("storage.py") - - STORAGE_TIME = int(time()) - storage_stats = dict() - - try: - # Read Our INI with our data collection rules - config = ConfigParser() - config.read(CONFIG) - # Debug - #for i in config : - #for key in config[i] : - #print (i, "-", key, ":", config[i][key]) - except Exception as e: # pylint: disable=broad-except, invalid-name - storage_stats["config-parse-status"] = "Failure with " + str(e) - - if __name__ == "__main__": - print(json.dumps(storage_stats, sort_keys=True, indent=4)) - exit(1) - - return storage_stats - - - db_config_items = dict() - ip_intel_config = dict() - # Collection Items - for section in config: - if section == "database": - for item in config[section]: - db_config_items[item] = config[section][item] - if section == "ip_intel": - for item in config[section]: - ip_intel_config[item] = ast.literal_eval(config[section][item]) - - MAX = db_config_items["collectionmaxchars"] - - storage_stats = dict() - storage_stats["storage_timestamp"] = STORAGE_TIME - - try: - db_conn = pymysql.connect(host=db_config_items['dbhostname'], - port=int(db_config_items['dbport']), - user=db_config_items['dbusername'], - passwd=db_config_items['dbpassword'], - db=db_config_items['dbdb']) - - dbmessage = "Good, connected to {}@{}:{}/{}".format(db_config_items['dbusername'], - db_config_items['dbhostname'], - db_config_items['dbport'], - db_config_items['dbdb']) - - storage_stats["db-status"] = dbmessage - - except Exception as dbconnection_error: - storage_stats["db-status"] = "Connection Failed" - storage_stats["db-error"] = str(dbconnection_error) - return storage_stats - - collection_good, hostdata, results_data = parse_json_file(JSONFILE=JSONFILE) - storage_stats["collection_status"] = collection_good - - - if collection_good: - try: - host_id = insert_update_host(hostdata, db_conn) - hostname = hostdata["hostname"] - storage_stats["collection_timestamp"] = hostdata['last_update'] - storage_stats["inserts"], storage_stats["updates"], storage_stats["errors"] = insert_update_collections(db_conn, - host_id, - results_data, - MAX, - hostdata['last_update'], - hostname) - - except Exception as dbconnection_error: - logger.error("{}Error Updating Host Collecitons {}{}".format(Fore.RED, - dbconnection_error, - Style.RESET_ALL)) - - storage_stats["insert_update"] = 0 - storage_stats["errors"] = 1 - else: - logger.info("{}Updating Collection Success{}{}".format(Fore.GREEN, - storage_stats, - Style.RESET_ALL)) - - # Updating Collection has been a success Let's check if this is a sapi host. - if sapi is True: - # I am so update sapi table - storage_stats["sapi_data"] = store_as_SAPI_host(host_id=host_id, - db_conn=db_conn, - hostname=hostname) - - do_ipintel = ip_intel_config.get("do_intel", False) - - logger.debug("Doing IP Intel. ({} Statement).".format(do_ipintel)) - - if do_ipintel is True and "ip_intel" in hostdata.keys(): - # Process the IP Intelligence for this host - result = process_ip_intel(config_dict={"ip_intel" : ip_intel_config}, - multireport=hostdata["ip_intel"], - host=hostname) - if result == 200: - logger.info("{}IP Intel : {} for host {}{}".format(Fore.GREEN, result, hostname, Style.RESET_ALL)) - else: - logger.error("{}IP Intel : {} for host {}{}".format(Fore.RED, result, hostname, Style.RESET_ALL)) - - else: - storage_stats["inserts_updates"] = 0 - storage_stats["errors"] = 1 - - - - try: - db_conn.commit() - db_conn.close() - except Exception as e: - logger.error("{}Error Closing DB Connection{}".format(Fore.RED, Style.RESET_ALL)) - - if __name__ == "__main__": - print(json.dumps(storage_stats, sort_keys=True, indent=4)) - - return storage_stats - -def parse_json_file(JSONFILE=False, VERBOSE=False): - - ''' - Parse JSON File. Pretty self explanatory. Parse that JSON file using the json module - ''' - - logger = logging.getLogger("storage:parse_json_file") - - # Return: collection_good, hostdata, results_data - collection_good = False - - # If we've got the dict passed to us instead of the filename - if isinstance(JSONFILE, dict): - # Treat the dict as the results - collection_results = JSONFILE - else: - # Generally means we're running it manually - with open(JSONFILE) as json_file: - # Parse my JSONFILE as a file and load it to a dict - collection_results = json.load(json_file) - - # No matter what check to see that I have "SSH SUCCESS" (Future more) in my collection_status - # Future will have more successful types - if collection_results['collection_status'] in ["SSH SUCCESS", "STINGCELL"]: - # Do Parse stuff - collection_good = True - hostdata = { - "host_uber_id" : collection_results['uber_id'], - "hostname" : collection_results['collection_hostname'], - "pop" : collection_results['pop'], - "srvtype" : collection_results['srvtype'], - "last_update" : collection_results['collection_timestamp'], - "status" : collection_results['status'] - } - results_data = collection_results['collection_data'] - - if "ip_intel" in collection_results: - hostdata["ip_intel"] = collection_results["ip_intel"] - - else: - # Failed for some reason. Ignoring any results. - print(collection_results["status"]) - collection_good = False - hostdata = dict() - results_data = dict() - - return collection_good, hostdata, results_data +##### TODO remove this def null_or_value(data_to_check, VERBOSE=False): logger = logging.getLogger("storage:null_or_value") @@ -245,7 +71,7 @@ def null_or_value(data_to_check, VERBOSE=False): data = "'" + str(data_to_check) + "'" return data -def insert_update_host(hostdata, db_conn, VERBOSE=False): +def insert_update_host(hostdata, db_conn): ''' This updates the Host table (not the collections, sapi or ip_intel tables. @@ -263,22 +89,18 @@ def insert_update_host(hostdata, db_conn, VERBOSE=False): insert_columns = ["hostname", "last_update"] insert_values = ["%s", "FROM_UNIXTIME(%s)"] - insert_columns_args = [hostdata["hostname"], hostdata["last_update"]] + insert_columns_args = [hostdata["collection_hostname"], hostdata["collection_timestamp"]] host_id_query_params = list() # SELECT * Specification - if isinstance(hostdata["host_uber_id"], int): + if isinstance(hostdata.get("uber_id", None), int): select_tail_specification = "from hosts where host_uber_id = %s " - host_id_query_params.append(hostdata["host_uber_id"]) + host_id_query_params.append(hostdata["uber_id"]) else: select_tail_specification = "from hosts where hostname = %s" - host_id_query_params.append(hostdata["hostname"]) - - # TODO Add MOWN Logic here someday + host_id_query_params.append(hostdata["collection_hostname"]) - #################################################################### - # Add host_id to Query. select_head_specification = "SELECT host_id " host_id_query = "SELECT host_id {}".format(select_tail_specification) @@ -306,23 +128,23 @@ def insert_update_host(hostdata, db_conn, VERBOSE=False): insert_columns_args.append(host_id) ## V2 Factors like pop srvtype and the like - for v2factor in [("pop", "pop"), ("srvtype", "srvtype"), ("status", "hoststatus"), ("host_uber_id", "host_uber_id")]: + for v2factor in [("pop", "pop"), ("srvtype", "srvtype"), ("status", "hoststatus"), ("uber_id", "host_uber_id")]: if hostdata[v2factor[0]] != "N/A" and hostdata[v2factor[0]] is not None: insert_columns.append(v2factor[1]) insert_values.append("%s") insert_columns_args.append(hostdata[v2factor[0]]) else: - logger.warning("No {0} given for host {1}, ignoring {0} column.".format(v2factor[0], hostdata["hostname"])) + logger.warning("No {0} given for host {1}, ignoring {0} column.".format(v2factor[0], hostdata["collection_hostname"])) replace_query = "REPLACE into hosts ( {} ) VALUES ( {} )".format(" , ".join(insert_columns), " , ".join(insert_values)) try: replace_query_debug = cur.mogrify(replace_query, insert_columns_args) - logger.debug("Replace Query for Host {} : {}".format(hostdata["hostname"], replace_query_debug)) + logger.debug("Replace Query for Host {} : {}".format(hostdata["collection_hostname"], replace_query_debug)) cur.execute(replace_query, insert_columns_args) except Exception as replace_error: - logger.error("Unable to do Replace Query for host {} with error : {}".format(hostdata["hostname"], replace_query_debug)) + logger.error("Unable to do Replace Query for host {} with error : {}".format(hostdata["collection_hostname"], replace_query_debug)) else: host_id = cur.lastrowid @@ -407,7 +229,7 @@ def store_as_SAPI_host(host_id, db_conn, hostname, VERBOSE=False): return return_dictionary -def insert_update_collections(db_conn, host_id, results_data, MAX, timestamp, hostname, VERBOSE=False): +def insert_update_collections(db_conn, host_id, hostdata, MAX): ''' Insert Update collections. @@ -416,6 +238,10 @@ def insert_update_collections(db_conn, host_id, results_data, MAX, timestamp, ho ''' logger = logging.getLogger("storage:insert_update_collections") + + logger.debug("Storing Collections for host_id : {}".format(host_id)) + + timestamp = hostdata["collection_timestamp"] cur = db_conn.cursor() @@ -423,45 +249,43 @@ def insert_update_collections(db_conn, host_id, results_data, MAX, timestamp, ho inserts = 0 updates = 0 - for item in results_data: + for ctype, ctype_dict in hostdata["collection_data"].items(): - if "collection_failed" in results_data[item]: + for subtype, value in ctype_dict.items(): + + killchars = "*;\\\'\"%=" + + collection_type = str(ctype)[0:int(MAX)] + + collection_subtype = str(subtype)[0:int(MAX)] - logger.info("{}{}Collection Failed for {} on host: {}{}".format(Back.CYAN, Fore.BLACK, - item, hostname, - Style.RESET_ALL)) - error_count += 1 - continue - else: - # No Error for this item Cycle through the collection - for collection in results_data[item]: - #print(collection) - killchars = "*;\\\'\"%=" - collection_type = str(item)[0:int(MAX)] - collection_subtype = str(collection)[0:int(MAX)] - - # Cycle throught value. Remove the banned characters and store it. - collection_value = "".join(c for c in str(results_data[item][collection])[0:int(MAX)] if c not in killchars) - - # Compare the value I have to the latest version - find_existing_query_args = [str(host_id), - str(collection_type), - str(collection_subtype), - str(collection_value)] - - find_existing_query = "SELECT {} {} {} {}".format(" collection_value, collection_id, last_update FROM collection ", - "WHERE fk_host_id = %s AND collection_type = %s ", - "AND collection_subtype = %s AND collection_value = %s ", - " Order by last_update desc limit 1 ") - - try: - cur.execute(find_existing_query, find_existing_query_args) - except Exception as update_collection_error: - logger.error("{}Trouble with query {} on host {} with error : {}{}".format(Fore.RED, - find_existing_query, - hostname, - update_collection_error, - Style.RESET_ALL)) + # Cycle throught value. Remove the banned characters and store it. + collection_value = "".join(c for c in str(value)[0:int(MAX)] if c not in killchars) + + # Compare the value I have to the latest version + find_existing_query_args = [str(host_id), + str(collection_type), + str(collection_subtype), + str(collection_value)] + + ## Encode the Fresh Time Stuff here + find_existing_query = "SELECT {} {} {} {}".format(" collection_value, collection_id, last_update FROM collection ", + "WHERE fk_host_id = %s AND collection_type = %s ", + "AND collection_subtype = %s AND collection_value = %s ", + " Order by last_update desc limit 1 ") + + # See If I'm Updating This Result + try: + logger.debug(find_existing_query) + logger.debug(find_existing_query_args) + cur.execute(find_existing_query, find_existing_query_args) + except Exception as update_collection_error: + logger.error("{}Trouble with query {} on host {} with error : {}{}".format(Fore.RED, + find_existing_query, + host_id, + update_collection_error, + Style.RESET_ALL)) + else: updated = False @@ -501,8 +325,7 @@ def insert_update_collections(db_conn, host_id, results_data, MAX, timestamp, ho continue if not updated: - #print("Insert Brand Spaking New") - # Because there was no collection (new Collection) Or Old Collection Didn't Match + insert_query_head = " INSERT into collection ( fk_host_id, initial_update, last_update, collection_type, collection_subtype, collection_value ) " insert_query_mid = " VALUES (%s, FROM_UNIXTIME(%s), FROM_UNIXTIME(%s), %s , %s , %s)" insert_query_tail = "; " @@ -527,22 +350,111 @@ def insert_update_collections(db_conn, host_id, results_data, MAX, timestamp, ho inserts += 1 updated = True - # Commit My Recent Changes - db_conn.commit() if not updated: # Error error_count += 1 - # Keep this in here while Troubleshooting. Will stop the storage after each type - #break - # Loop Completed - # Close Cursor db_conn.commit() cur.close() # Return Statistics return inserts, updates, error_count +def storage(config_items, hostdata, sapi=False): + + ''' + Does a Storage of an Object. + ''' + + logger = logging.getLogger("storage.py") + + STORAGE_TIME = int(time()) + storage_stats = dict() + + MAX = config_items["storage"]["collectionmaxchars"] + + storage_stats = dict() + storage_stats["storage_timestamp"] = STORAGE_TIME + + db_conn = db_helper.get_conn(config_items, prefix="store_", tojq=".database", ac_def=False) + + try: + try: + host_id = insert_update_host(hostdata, db_conn) + except Exception as insert_update_host_error: + logger.error("{}Unable to Update Host with Error : {}{}".format(Fore.RED, + insert_update_host_error, + Style.RESET_ALL)) + + raise insert_update_host_error + else: + logger.info(host_id) + + # Unique Data FTM + hostname = hostdata["collection_hostname"] + + + storage_stats["collection_timestamp"] = hostdata["collection_timestamp"] + + try: + storage_stats["inserts"], storage_stats["updates"], storage_stats["errors"] = insert_update_collections(db_conn, + host_id, + hostdata, + MAX) + except Exception as insert_update_collections_error: + logger.error("{}Unable to Update Collections associated with {}{}".format(Fore.RED, + hostname, + Style.RESET_ALL)) + logger.debug("Error : {}".format(insert_update_collections_error)) + + raise insert_update_collections_error + + except Exception as dbconnection_error: + logger.error("{}Error Updating Host Collecitons {}{}".format(Fore.RED, + dbconnection_error, + Style.RESET_ALL)) + + storage_stats["insert_update"] = 0 + storage_stats["errors"] = 1 + else: + logger.info("{}Updating Collection Success{}{}".format(Fore.GREEN, + storage_stats, + Style.RESET_ALL)) + + # Updating Collection has been a success Let's check if this is a sapi host. + if sapi is True: + # I am so update sapi table + storage_stats["sapi_data"] = store_as_SAPI_host(host_id=host_id, + db_conn=db_conn, + hostname=hostname) + + do_ipintel = config_items["ip_intel"].get("do_intel", False) + + logger.debug("Doing IP Intel. ({} Statement).".format(do_ipintel)) + + if do_ipintel is True and "ip_intel" in hostdata.keys(): + # Process the IP Intelligence for this host + result = process_ip_intel(config_dict=config_items, + multireport=hostdata["ip_intel"], + host=hostname) + if result == 200: + logger.info("{}IP Intel : {} for host {}{}".format(Fore.GREEN, result, hostname, Style.RESET_ALL)) + else: + logger.error("{}IP Intel : {} for host {}{}".format(Fore.RED, result, hostname, Style.RESET_ALL)) + + try: + db_conn.commit() + db_conn.close() + except Exception as e: + logger.error("{}Error Closing DB Connection{}".format(Fore.RED, Style.RESET_ALL)) + + if __name__ == "__main__": + print(json.dumps(storage_stats, sort_keys=True, indent=4)) + + return storage_stats + + + if __name__ == "__main__": - storage(CONFIG, JSONFILE) + storage(CONFIG) diff --git a/storageJSONVerify.py b/storageJSONVerify.py index 36f0cd5..dc2fd7c 100755 --- a/storageJSONVerify.py +++ b/storageJSONVerify.py @@ -17,8 +17,6 @@ # Schema Parser import jsonschema -JSONSCHEMA_FILEPATH = "/oc/local/secops/jellyfish2/" -JSONSCHEMA_DEFAULT_SCHEMA = JSONSCHEMA_FILEPATH+"/jellyfish_storage.json.schema" if __name__ == "__main__": @@ -27,8 +25,6 @@ # -h help # -s Audit Definitions (Required) # -j JSON File - - parser.add_argument("-s", "--schema", help="JSON Schema File to use for validation. (Default : jellyfish_storage.json.schema)", default=JSONSCHEMA_DEFAULT_SCHEMA) parser.add_argument("-j", "--json", help="JSON File to Validate", required="TRUE") args = parser.parse_args() @@ -36,11 +32,14 @@ schema_file = args.schema json_file = args.json -def storageJSONVerify(schema_file, json_file): +def storageJSONVerify(json_file): ''' Using the jsonschema file specified check the json bits for compliance ''' + + has_passed = True + message = "No Failures Detected" if isinstance(json_file, dict): # Treat this as the dict itself @@ -52,29 +51,32 @@ def storageJSONVerify(schema_file, json_file): except ValueError as err: msg = "Error in the Format of your JSON File " + err return (False, msg) - - if isinstance(schema_file, dict): - # Treat this as the schema itself - this_schema = schema_file - else: - try: - with open(schema_file, "r") as this_schema_file: - this_schema = json.load(this_schema_file) - except ValueError as err: - msg = "Error in the Format of your Schema File: " + str(err) - return (False, msg) - - try: - jsonschema.validate(this_json, this_schema) - except jsonschema.exceptions.ValidationError as err: - msg = "Error in your JSON File: {}".format(err) - return (False, msg) - except jsonschema.exceptions.SchemaError as err: - msg = "Error with your Schema File: {}".format(err) - return (False, msg) - else: - msg = "JSON file passed Schema Validation." - return (True, msg) + + if isinstance(this_json, dict) is False: + has_passed = False + message = "Incorrect type of Data" + + if has_passed is True: + # Required Key Checks + required_keys = ["resource", "partition", "service", "region", "accountid", + "collection_hostname", "pop", "srvtype", "status", "uber_id"] + required_int_keys = ["collection_timestamp"] + required_dict_keys = ["collection_data"] + collection_data_keys = ["host_host"] + + missing_keys = [mkey for mkey in [*required_keys, *required_int_keys, *required_dict_keys] if mkey not in this_json.keys()] + + missing_coll_keys = [mcollkey for mcollkey in collection_data_keys if mcollkey not in this_json.get("collection_data", dict()).keys()] + + if len(missing_keys) > 0: + has_passed = False + message = "Missing Keys {}".format(",".join(missing_keys)) + + elif len(missing_coll_keys) > 0: + has_passed = False + message = "Missing Collection Keys {}".format(",".join(missing_coll_keys)) + + return (has_passed, message) if __name__ == "__main__": #"We're going to run the main stuff diff --git a/travis/artifacts/manoward.yaml b/travis/artifacts/manoward.yaml index 5e32dd4..c9a4d41 100644 --- a/travis/artifacts/manoward.yaml +++ b/travis/artifacts/manoward.yaml @@ -67,5 +67,7 @@ storage: analyze: freshseconds: 172800 maxthreads: 32 +collate: + freshseconds: 86400 diff --git a/ui.py b/ui.py index 5f98c9b..07956f1 100755 --- a/ui.py +++ b/ui.py @@ -29,7 +29,7 @@ if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument("-c", "--config", help="Config File for Scheduler", required=False, default="/etc/manowar/manoward.yaml") + parser.add_argument("-c", "--config", help="Config File for Scheduler", required=False, default=None) parser.add_argument("-d", "--flaskdebug", action='store_true', help="Turn on Flask Debugging", default=False) parser.add_argument("-v", "--verbose", action='append_const', help="Turn on Verbosity", const=1, default=[]) @@ -57,6 +57,7 @@ CONFIG = db_helper.get_manoward(explicit_config=args.config, only_file=False) + def ui(CONFIG, FDEBUG): diff --git a/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py b/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py index d61db97..6ea6577 100644 --- a/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py +++ b/yoyo_steps/migrations/20200107_02_xxxxx-credentials.py @@ -59,7 +59,10 @@ # Since Im in a subdirectory when running this I need to do # these things by hand instead of using db_helper # Because of the way yoyo is setup -possible_config_files = ["/etc/manowar/manoward.yaml", "./etc/manowar/manoward.yaml", "/usr/local/etc/manowar/manoward.yaml"] +possible_config_files = ["/etc/manowar/manoward.yaml", + "./etc/manowar/manoward.yaml", + "/usr/local/etc/manowar/manoward.yaml", + "../etc/manowar/manoward.yaml"] if os.environ.get("TRAVIS", None) is not None: logger.info("In a Travis Build Add the Travis Paths to Configuration.") From 7b16dfd0b5fd4943c10543eb9b7318af7f3e2ce7 Mon Sep 17 00:00:00 2001 From: Chris Date: Thu, 16 Jan 2020 11:54:22 -0800 Subject: [PATCH 053/143] Updating Icon --- static/img/manowar_full.png | Bin 0 -> 10680 bytes static/img/manowar_small.png | Bin 0 -> 4620 bytes templates/base_header.html | 6 +++--- 3 files changed, 3 insertions(+), 3 deletions(-) create mode 100644 static/img/manowar_full.png create mode 100644 static/img/manowar_small.png diff --git a/static/img/manowar_full.png b/static/img/manowar_full.png new file mode 100644 index 0000000000000000000000000000000000000000..9dc96d4f2aae41571e4a2c587a22cca38ac2e8e6 GIT binary patch literal 10680 zcmV;pDM!|cP)E600009a7bBm000XU z000XU0RWnu7ytkO8FWQhbW?9;ba!ELWdK2BZ(?O2No`?gWm08fWO;GPWjp`?DMd*{ zK~#9!V*LOAKLZRLc=g;tOp;+B01Jd-Ts2Si+V$I7nu@pY+*eikAC^RCcQ69O1Lz23 zA>|u4?`o*i*(pqzLO@5*KVF!={r#gSDvBm5$IpNL>nFOpgNvDk=kXKKBPV3eU36k* z72JPF@A*sClV>!!`KS}|jEoRQSXdaG3l~I*m!ol+1dXexsDNw6GzAZU=f~BnSL0L1 z78&>-3}njlfTUnXyxxSiH5bnOM+FcQk-2a?&`3-!nwFU}XJTqZD22EK2!QVRj{%T< zhRlEf7Z(>ycc6jt^74+34!Eqbu`#kNwEVdE@V=6P9-d$W8iDKr2pbYXckkZCj0=bq zvU-RKaKX^fP!u>i@;EuRBnFFtoF_{fg`^aW6N9K3q@^Ut4fQTn?B8ZUNQIt&8} z7~xU))$>5>fh=I-L;Tq@NWBjfq<6e9telIzUZ8XPBSYT+Q#4IapWgBEYTx0F$~qeN z9s;X-j!oO-E?(ifHXbWmbSOT%7uVYENs3`i3MIkXVv>7lnTfaJs0J|MZ%Hv&1#fPsk? zik+REj9`E(SpWfA38|!{1YrOhB0#63Ylnvi+~L6R_)m8L4G$YN&4De~q3$?x>pEOW zUsPhi-0^Yt61dRC*DpqHeG3pkDDK$s^kLx9y}*k1CkMwPB}E_u)C^Wv+w1HzP~!jt z6C7#{r9cwW{1ATq`YpJ_3mJ%jFd*#$`Zr4ke)NMt#Wk!$go$AkptaTQKYZ46bYNp< zVP#@sW@KVwWc>U8-(OI@!^pzK1Y`iYfByab_4jX>s1!jH_w3mN?D}Jho0ymYn;+D5 zeMLnDu;vyN6hyA(ah1w=q)>w6*6rKBIoOWIWs2pc2}?iLS1$z!na$ae=F4OGN-5#()FJNCvzG3U59V7>I&r7T4xl zV4oH;IQDANte@Y%LnQVF$L6?u!_|FVxDrV2dVHT?(;Vo;^z?LKQw>=V5<0+y10St} zw8Uo3ngv&O@7_I#AZEi49?+P&(7>uytDxh*%F4)NK|mYfVaEsqXt4z%fg18xEP^-8 zfGtCr^1PDm8z5Z4jCf#^8Q6wD{Ny3720X4-Fr?uOi9<*$8`r=ckOV459xB7y634^E z90`OgfCmt=8!&qxh<53O@L04kej>sTVZ58Z0K%v}x)(@_adP8H8juKq+YO9ENMZqI zZ=eWr4#x@zDj=!T1`iit_JMa1(5wI!XV|-O5Ky#mE--c2EuXXc>3w8zVCn$&AvQmK zKx!ud-d7tc0Hvb6^blG!gh(kMi(kyI1d?~BzRtXSHjn`f3n0n)^&61<&c*dwLL3rIkn}Plg5&Y$Pi0Hz zDc`&WNek~pMZR-$Dc`us{^Q37aC;e;ObmBz`0)GJhzJh3?Q4Lc0LdUg02%xF&Bg{q zD6PizONM{hfh!k0Qc{Go;UNW7JiN_a1mt3R8&-Sc6JgqICyf`ZdD`011nYuh>89C z_m7i@M?_juR91HXvX$Hd{CvW~M#`%DjvZ}@OC;z%Wo6}6t5#u-l@c>fNh-h`?+XnL zMdk_#3BfG|h6S1|zOrMJi$8?%^V>HF!-$(7uBgl&AMRUyAvu7~}sDWH% zFhHqjdqW~2j79z^YZ}hIcp71DMN(#lrv>;hI$Ajb8OTLm6r-f11QEyTVXV3^0c0u% z9t1$ez@!Qv69u{u$cC6>Y;25d&dixJL5nhZlcSIa3q^8MRGO>cT=BvT)s||rIg>ym zsXI2F`S1q+Fgma##~e~e8?y%HVYn7x{@=ZOH`Wz1XeOiS!ZQjB9oYjL3+F<@ngRFf z7nGHq5a8BY2Mi9iwpy&g0R-1Rf50Cc$ZK>U0C}YkFhbBA1avf}Lm3FoDgcuJWQ{ML z;DB=>X%pfw(t?9CHUtO&tg@IVN#5I0yje#WS~ghKz*Z7%Z!w2MGWOz}~gR#8HLe zp};Qe?ktpra4EM6w6>8_nrh-DwMkp_ftIQdKKM`#D5Rv;HA)EvW2}iG787fU6{;Z> zc`!Eiq2h&23YC)BLTlp#YOQWtLkTo22wQAw!GHE&ehz0YJ1mO@c0R~3%$%7y`=9UJ z=6wIZ0nFj{j_*F}x}?4$`FK)F^Fv#*tv17y;X)2pm5s~zh%0N?Oh!h=4-Y2|%ylVXeOweda?nz(A{;|Qo`eKo&&8tMVJMVY&g58C z|F(5(40dkE!~iqyL`SP#Lv&%eLI>S>xp=M7iYyay%*ne)KMZ~>`RU5m(`^fM_l3K& zf&YmdoeSnO+j%d%aSeo$90m+1LRx)$X+wcIGAc4+MdXSIX?aqZiV&vt6A{vnZ^=?X zpHbgg=`>#AA6wvdYHAYC3K5e&H`6UCECkGD7b#C&)npevqk)ypWE=gK} z+`qnX+x8NB*>k`5UWl<-t#Y6NoZLA#IBGS5#?Pv)tAT&!RMDuE~1MQP_B?eo%uxB7H5_}d}wTJ z

HIJ>p1RV~&bXkK#W^ib}SnWx`HtrIS*Syp%x7Qw~q(Z=(IypD%=@ddVw<8p_Md zNhqk`B)ljAaD^Q ze+-J^!p%?SZ&YSL?)|D|dE+mPRv^t&+*jtGIVxjRt!VHAY=nAVRPeB-T(kgMpw3ML zRL11QPg-thd$7%5{rgeBD({1WB*&Wl74WCs$!m>|zbK6uh6iTNMQtWiWr5_UFwyd* z{9eZ{o)dh5F8nn5_C!dsIP3!|{T2BE#zJP0B$2H{)bJ=mBM_P#Iyt9yIa`YQ!Omafi~ z6?>AM6`3{8ym#J!v!uV3^ovK|4;bD<8|3$)aYXST8>^O7sDg_4RQp;2HU0r(Q0RZw z9OBygz&gUHiAuS%_^+ZS8(SAHN{YZl8=TSAroL7wzyA1{(=0jK2Cj1n_BJ$)WhcI4aO+PVrg_?5t5FSjnJvvJ!1YNw=P&)lP%{0vdX8HE>!` zZP77V3~Pmk23e6w3gJuo-#envkvBEa{(S}c=SOaG2^#j4J)Dy_s~??Ke%MTAsb@*e z3VH_1HmU?DOX^<$I2M!F9;_Cu=Zab+DD00y>uRL-EPRS$)Ube~6P6i@O$FBt(RhU& zt_B?S2lIv$=C09ZbKlsA{M1*Kuda%p#p~xgr1g|tt)FmVPJ@`iDypZ69~#D@P8Ygr zOn{NN*fbc1OK12noe20WD6k;n{g53r=JOciinSQ*V_OA}=Gc!B#%NG7XL5Y_%_-VX zUY!lwbz;0?%I?=|(UlM}I3Z1ad&-^jpTVk4^N1g6#v&V0=rrsyxB(aTJD50BjfeKL zf`0t~odpF$R{O~9K5#844w&%UAP7KK>Pj?P4%L@SI}X;Kz5EL_7;ROxHN9P8LPEo1 zJF+aQa=?NQ-o+PXv7)#>HSN&GM+5^)9D%!)NAvvO7o))pVLGfD!tEV1EW4b^f#1WC z;Yj@Wd^hZzDtap5y=u2?Hw+pUHLvT$z%KX-PQ8G$_|fSIP1=ubA=u!kj<#r+SK0HYoXq7g!iXO zq3GE0f{_S+zsJ*A5si9$hZZjr+BEq%bltv>tWD1YEGj?!47US8I*|hE!H+w!u$1gf z4jKRCutLVqW=!3id3e9g5Q+@Wo=K9i6bHt0)5b17rnqPY_56aF`7l)erpEdiE@D9vlM$fu5dTzyFL& zTB4B`kAqvmVFwY&&_W7>4#&ajs_6ycmMgCn$~D6{`KLfR#NDUM*F=U*eFOi zavm9EPO&}<(+Uyl$a!RBiMU6Y4YS2Fgb0z4ap;@g?A1OIk^`C;8ChKA+tPhfs91Ot zA?89x>Hfb8EeMgBo2=g75{t)Ur~6xby4(BukM(BiTuOZY*~P`BKL3zqI6gf5Z0*HD z%eg4_8jo*x@7EEZf092q$0`waNtVQ#V0g2Pu)ig!+|DM5yoM9ooHm<1&jznfjy!i6 zhF|j%Y;nN;R$vw0Drdb(Ls{jM-@52P>wz=z!NH*+2`qV1t2@cZW^>sbE}PwLcX;g1 zfYTkwcL$}9%j?J|8_6f%ZqK*b9En40d3gjTg9!|s>gnDv?Ysx8F38lTrI^K%$-Mc0 zZLY0WhAW2Ofrt_rmC|-ND)X0klhN_1jjKb&#o@`N3(uSWr%f-FRaKRimlu~zf+n{n z4MXzj8yFMVI5-p^7>akDJlWsZzjx=Z6Nfu|;ZQ+QWbfVg8l^tw?Rd@oo0^&oA-2jk zETEurwc&6WczNU#R48!aLCsxVT^XixW>@&fK6Cl^=E>HzoFK4x^#UEORjzvhJf&@*`AnLt&zx$q}rm}T$T^aA-; zRuJ;%=Z_*|aS|DQr%o{k)R&Y2rGk9Ny(Y=M@kH~^46h8GD|%v;_6CcBjDR^Y)X*b* z8}l^hXg1KS8p14_j706Y>o?2~=Y%Bs(xF6z1EnfdNd88PiZzk3qchXhet4hgkCprd z8jQBKHZY_-GZopQNhnK?{F$9oGD5@&8bT;b`vt^0bQn+w{6(LuLr`O!pV{gI#CV3ZriGWuiL7J~Z(CCa*3a@%}dh^Uw+ z-Gz7nZ*mAIb++z;gz$DAc`Q>BW)O-sgF+a!$nqh}-VRZSY{wuV!{D2Qj7vrsx@ega zE*u_fFf3FO`nT6M@`t?DCcU<_tZo*3{d4OlnR^p7r%Gl6l?D8%Xh=~_L@xjxS#DYe zG1&ZMR*8Ee`KG3mA}s_xm^l~JkhbMOD+rY{+>eY}=)OUx!axBAL82eoC@S&~v+)dC zssm$4AlJ=Xh~=o9&DhqKe2MLM3#G&tTaO$x(9tTH4Nw-(!il>D56Vn2WXazyH zCLp6-H4-^$PM)EK_;X>PQ+6t$a}bKhVl0F%qYXffq03-~x&%xrY>=asulmdLg+ISl zmZZ3J#jT;+Z!Y=Oii&$z)oy5Hetlre=E%y1$*WgRTX#?G@7K+Ea^s~hK8?T5-So`0 z?`}<-ff}z*os1l{#&avf{UgI>+Cr`L0xS*J+9f|4hv+BM%bC6lsAs~6aFgIopq|Wg=jf@y zsB@`LPxwR`Dp6FG87ydt36#+nFL{&umtH@+Lc;|{&y~p)ecddUe(7IFI}9Iw(gJ3K z*h%Y1k~rR#@Pkl8VuGoT8M7q(-KD{2t=H1ZsONz*NYBg4Yw15oKP#PDDbmQ@grJsG2#p|y}TcyO-tVo<0DCC7mshSnJP z9i=58n)570WW4x~zhgO2s2CJ__4Z%#8((UEf9>qcGB~(~4A)q37`%tugdFvY8e1#z1*FeBvu8I2{i4D$0UmJnQQnpD(YQ zwP;#RHk==0Hk1qo?Tkq`1pqN@{up>L&;_WUOFjRdCNk6?zsX{bPQS_oWO1;mxW#vJ|Sj9#?=?j*w!kIC{-5u zfjqo$G>iBL-2xeAM@G;sE&AO!XMReRo;Z72Xx8M&uq4)GOpi>`Q&)*dR4@^caY4B> zsEnOC{mu;yI_p|id%jSp`H}I9Mbg4PI!&uUhCqfS@an+flVU6>kikj7?Rk0Ujx;J& z$`vidB*@r#--8?8d@YSiFDon&nl&>rqLHG2+no+FA_5s^pS2!( zsIsOq7!o=)hZvU3wXLavp{9}l3DDk^fJaee;mO=bj!7nygd`*Z!X-$6AhJjx zV!{#RP}!&mxF7-|_=%`QltZH&iZ1SY0L!kT>*8kD6_rcLeq6HRw;Upd1hTAuC(Zji;iXV;ng*vb#>BK+s=)(dOfqPCjo`o ztZ8Uem|Ijgnw!a2)tPonVF}o;|0! z!7)qJB9&f9NoID1#R2rObyyblVkjtrhBda97C*OTGqYnnRBuyU9N~R*Dg7VX-M!o4Ox3|Kg12NE7RT(R;;2{u6HP`oSyOo@hvOZN? z-aXaa_NAAJ2}xV;%A#VUpztH)L>ETNwt^a?L1Hv`F%64U<}?JMNvG4b-(=?(g~E(| z9O{dU=k%Zdwfx>(+Sx%t;eT7hK^f+j4oWGst`1xevqH%?F3UJ6;Wag~wn1Do*Z1VW zTcN~3LE(Sa0LnPqSRX9;PS%D3aZveDQ1~8>`Q$5#js+2A03f%JXDUAm3cuMJAq9U4 zx~zkrQhnc4DnbehKiC?3uU-zutk>8VzfciUQ256~p`f6kaDg>cjTQ7oP*6~~z#3|m z`iX7{J@xufXWM244&p*Xb5gogw6;(YQc(C9At&o=-a2OV`EEeD`$F98jVVhrk1ta9xzd@ zk=mZMr5tU)2QEj4hQcr8O4_!WptbGS3VE|yTdkHm!t}Eex=}$=Q235D&=|_M*G->a zIo}i_0rEh#R4Q?7z>WS%(f{_K5XT>T08{;~s7ga+mRPJ7i(}77$8?Yo$J4fW?D0Fo z+Gew(d+l@}aXe;i`;|DNwVl|bwH+%YAtNHAM@7YIWpe+%!MwaY;f-a3A_)p#;`;UL zzTC6EqgVrB#)ljC{#>fo=^_jULwu)DyV60V&j*#K*rp# z87C5sRdIG>#X&c1mIConKNgqs;Yuo&jJUzxw-f@w(05VZfwpC5<#0Lw^Mp? z*tl`yoH=tqVMOky2?~JkMqN-)z_~xV-&o_#Z+5(Ma1XGC2L6VHYQn;_VPVsI_uI8? z`E9s%MIHX#Xaa*yD=j+^r} z*6-PU;d;Ab$7*#z8E~N2>Nd=Jf-j2Z#zv>fsI8JS$4`jQR(j{;z*oUFYt|6W0%5g! zQMut5*B%P@fa*5XRiJnj0v4is20uK7S9qraLe3@;!q^>rj}URro;^DdUZaCGemsBn zrO!UaFQi5qJ{f%XX(l*)`(WG>h#ILF*Mjp;5l&! zJbuIuTIW4PUS1wlU}SW7SP6-iUqC=$Q-EaosDY1eh;YAmg0^FgQ7c!}Hr=fJFN_@X z@;zfY@BZqU7l@FidM7Jo!OdBeHdxZO{kTXRxqJbB59O~3*J$zZ%XPIo&zuf~7bjqj zx=kETAEZu-=UBaZwbg3%%9s?yK-6T;`VTx+SXhXM5NriEuiS^?so*jVF_ zJGY%USI(TjG3mkSgLt=ROfp1oo;8m-wOB08jZMM{yz(1|qcMH>$bLy;&5fg%FT!tx zR(E36TZ!!roK=VR)tmi1B@~nZZ&g5bKoK`=*x=O5aEUU&euiin4_v^&;fxJ#&dtpY z!2nr;Ku@TLGg<%yvh}_r0Iwa=f~Ff!9p0OVujj57QXp~AtO?; z?*sp7&U~?~QE?F+L};}~SFB2mZ1<7TwLg3xP(2!`!2!AAgRh+3^#U}+rVP{~5v`eU z4s7egV~ zpMc;+LB{Z_1qB7DCk2EH$AlNdWSi~m2)HL5By{v*8L{FlCar1fmtFJd}YzvPq`CruWaV|;J=LyMOuMCh5*?$V2vY#1p)Ggw~dIZpe+(mBe(~m#fKPC1AF-JVJ2Eg z1vk0j5r|WGbU-|iy5pthEEhf=i83IbvONUbNkQqtTP5BbWD>s_j)`GZW{`!b`D||L zP@&_j!H^*6HdxaTq0LH4T`*fSZF1OSQ^v1bV|4{RmOU3f>ygO0k47(;75B^&iHn~~ zS^8{h{*vBnmiJq?@}a+P=6i6=>Q};NO|?BZe^%_mx$)1x$>QzxrbD*H^%^ z1K(Rac+(&9_Us}a%-;BR`uevrKG`_x%PmhI-f#622%{AF$vBAVUZ~@ZFE;kvFkIva z1aUwcMchF6$q=X0QV?+>g>YIBcHVFh;}OVla~tfg2RF(k<`K_98YYi7sOL+tc>?#zo9DC#)%K|UER5OX=g*U)A$dZ zFRy55*VX#j^qb`=udgkxuL%VAgza#$Ee<|vC#(#<8t0 z<80z!{PQ_>h@lM5gqUU=XCgisi&Xc3nhb(dvm52Y8(#69f1dP-@@WFpQZWmZIGc1n z9rL@;-TVUJyI=3%43CV9jW8Ndim(Eb4AEPkde-S}AC>UVLmzI4^|2}wJ=^TzdZ)y} zj0NEABW67UU!N%6M!BH@xD!E45b!UE?p069<(v7GoSFbK6}dEt?@mq~^SjX9VU3UX z@8JwrX*4lOo#DXLd_zn;cXjXiuXziF@Ao?jgEEt42h%>gG;)CkECqT@{0O{UsC&Zga`|R*6sNB$U_o#aCEM5F%)H-) z?hb2w#KRg1J-VZV%9a}>^uKC8ZUw;V)%kLOAGS1g4U)^3x zBtRS@DT6aPCNsEKj0Q@@G|u23vFD+>(W-YGyizWxha!7qJO|N=2=`X-Q4cW|%co=h z6}mgDaiF+}9T?jsMar{NyfAVH-MRLGhXrmZymjmr|`!X*DX1`l;RnMnuQC@v@fe+`9dAaoab+lnRv+eKZUXvO9N~-~Dz; z@B`{O5}SmAA~@lNz1N-}+wMc2TDjak$^fh!JE>Dx1m~r_S1&cT{&EYvaqv_rWl;R8 zS!3=<+hon76*pI!&Fp%cClUVRk(s>CQ6=3Yj)EfiStBQHpafgkeouVT-LVOtL>Q)0 zzd2?i=cWHvS5kpdPy`=qBpYM!(_uY&9l3P=nBCUs8WFj$uNTI~GrOb?Pv#hK^2@># z3JMBOS!4KZeK5-C4ZnZ;@u=-MA$h~yW4)EFXNF{OhL<$dQkhXu1Q}~gAKq?!w8V7f z@Wt~VO~`#MBg>1~>J%D2wp$M>9tw(Efi(bAu{{TR8DsZ7HotpRwAXW%)2$~J4+X_7 z!WwI*JBB&FJzd;!IU5ig&koeOHEErKg2KkTOC@rpGC?2tVP*~$6$QmDVrz5|UfExW zzE0jRI)+jP1w}wX9)NlJs-$RpX|Y17e6s)G$vyf|AyH6J))2y_+L{%6c1`S)wrqG7 e6%PeP0OJ3YWAvnJX`K`R0000nQ5WF^|dEN9`7Oqw6KJ&6mEISkyML?n~O#mHQ#EIa^g^7-LoE$FXn#Acr#{dJA- zv6f{ch*Vn=hT{wTneh?4Haw*F!lrN_*nktwA)IIqq21}KDj`)PVc_;8|LV{n-W8|zY zuWPId85NUoW_*M{ef}eiBo<`h^%E2DM52oHi#8j^LT%t^S`inVnuc#qTY8cX2PRru z#JnMg3$Jb8S1}o7lW-w2$Dcm`d%PQ-Qp5$qVfe!{nt5YBFQ%KCWMQ626!jAmy7j8% zJrHcbtGzqR?pUdEGQBc*p6`9TqW0D*vrFe}N=JBk6Er!R=DjYXIo_{HktwW1$d|PKP2~+L_uD2`sTJ zGAxS)j~g+sSF=tjysQNS_dje{Jq(d>X6P#a-8X&?A`w}Rq7ZIqQ0?tk4SfkFlhG`X zqbS_CZ>^E`l?^jQ=&FMn0YFaV>nA4Qnx9wX2{+W2EHe`D>s4zCo=6mda9GyYL^Emh zpZyIkMCNqAa!Eo}u%=R2wAnBltko5LDw{>i z$Sqh?DKU(rD2%m-#Qw#IL}-#QIv?X7`1gMSNvBITWV)$Ia{|O>GVnwq;&17Mr}$-7 z4Sgx=P&tai&92UpL}<-LmRJ@yyE?_+n|yu@-2brV)LT;>9)0~Mx&%c0{*p7m49j9R z7!?20>is>@sA5E+I1Zku@&%=E#gwWD08AzWR?;V3jLhMMkAJ6Gjw%U1y!;tIol0Vf zWfgIYHX9;-KZd(Hb#>n;=&ZWBX@v_gG87AVbNm+0jE|^_yQ(6tGjhIh4QChUwbQfFU zhI(+6{1rHg!c@_;T0k_D#;ex{72&duF_KC1?Qi|!u9G0ZH9rq)DuoPd(Ir3tmCeEv zjf$!h^Z8VzK!BIEVEE`sStlt|hn0{5DHhyrEV$jK@^Cbbh~N0UBbrI$wVOjYy=J`_ zCX=PtZVs7hz@I9LRYD};LS&9Fwh7#;7)dP1W|ATaXU0cXrbR0R>HCT#4BVbvk;beP zr0*+|pk*MrS8+jN1g3zTg?PnUvqFew(h!UZk1WT72ak!F zS%3h95QI~U$Z%PZTmYh67BSU5hunN@HjzMD$86@e+hJvdRld?T+G4YV0uO+I5TG`n zo&czIxj+C=gaSo~m|K_)(g1j#2gmaOIJ_ST=Zq@|DS zFtH>M;)uMdq!6F0oJam2IGzK?^H}1t;CL|>(ozTkSc(Qkavwy^Jgl7F@?!g>};;CHeAm^%u%3ym8CTaG<6Z5Blrj zpc$grcj*7=4|!)>3xW*|a5}TrsGHXSV3oy=fe(KVo6`wzAb>zqBV0ZooK7cf zRvQ?K0!^qqAp}x&5MEv&PN~nlLamC45}$av*o&f^e_3g~i~ycPmg6wLuz={?9mMAD zAf1dOy_m*7e(e$L^#v6grXZoGsRaXhGg~^9LL?l#CrZSfxmgfGArX&*2amxI zKf==DA|lhbVX@g@aoAyVIAC)+Q0ouiq19_tcc4spueCc|*wGTg8y}v-?ASOMmc@0} z0>;9Eu~@)bt*}_F@YdBLu@DEhlm*Q)aC&`kdA*=%0*YZkQ8Z|V22InTXa)>JgQgj9 zToyM5uYyoH0yF6}VzV=#DH;?*gQ6(J!ZTnjENF%WV`0HsEMP1awEFxQy?PA{!+^C~ z6>Z+Q!ztnjkEF+wsPS=Sq^5h|ObeY8u&01`NyQ8*Tw>VZmChU@R6` zY*w&VD_E;de4d+{!lnN`3qlJ7<;h}L&@2O*VZdli=4_<&GH7i;xoIpwLBeQ$ja`g7 zn*;3*7xArIzibAW*>QcJB`vt9(j;tcYSoOB<-9U>iyynA?r-$;>YP)B&*3CTxAd89 zqajRvHC%Sy2A{9&yfPBr8~Q>uQN6>7gH3H>r*dq|Hr;|36A4odvGG7cWna-Kn%%~B ztkfVQ;U{N*tD5K=>o&@M8zI3gx?SbJ1OV)4?||2#^nnsEpI+3A6G^xL|6Df@%Y zng9;Bb;yPmufs{6*}YG*(9EKCg<81JQ)4<$F^+HDt{HcI=C)>>NWzbQ`?hL=)7$o{ zh9B+K?iVrcu_jun9&A2aS!D2!YZ=}W+h=)Lo)ziW2oI!zG58j+fLeQ)rZ3CdL;bkxdFtI@{Y`Hwc$H^A1GPr zGrQDPVkxs!GG8%v*XKys6k25}pA?@?jsXB(dhlV@_{sSgrsDIe!@EdAG5&f_KipPp zNfCW|m%6HC7E`L>YN&TOv8$n(0Koq4^?J#5@%+96@YvNm$n(?cTaDC6_%?s}*03zF z*;j*a^zGEu(Y8P>IkI81B0Q5at`2)7lb8eRdJN~&g4f|Brw$!fjXO6stQt=J>XVNZ zfv5W)G+gP^J9f){a~ucGghSQgP>1*&3f7U0ZS98hO7Yp`xcL0y!NX`#>95`!8^MB# z-!t{APd<_bwAA?VcyFKKp8BDUn@LMe!BfOcr`TB&KxdJY<^FY*tAy9#Ab@-x?hW8M4%sDRbA+eXD@PRp&+Y3ks}1L;luPly=|7|hyHa#Y zk1AL6it)sj?ZoKb)tW#MUH)24i3`;R$=o#ALsze|YRk@kcC~#NfnO*_NYZ zT;GP^vRcuOq(d^x4>0k^xoLw7nZ;Mo$(bY5qRu&Q|-8*;fmN_M1s_Dl>&0}?f1X`&*ZQA z4%Ly8L;u83=E>ooJohRA<<{An`r zuRkI?+dE9p6)xSnshh@ESBrSo+%o%nM^Bhae=z^iWYCcPsMutnq zQ#1{4Fo>{;Wj2sosXkS-NYlf`nQ7fLA&0BbHdCdSz`1LmmyGx3Pqv2B=12el^CW27 z69t6qPVu=aBwQT1soRm()NAXc9`gpqJ*@$=Rdq+q5DbpU|foomRh z)%uex0hi|FhSh`pw;AYUX_g@ku?hDVpd^Gtd;X~NcVS@PxQ7gPLO*K>W= zi#@%Hw976DPOk@)fs^h=aI$X)Y4#c0q>)O(nJX6o059)6fZYvBQ$bZoxHY3~(|4IT zq-6x(I;b?1sNr@pft!-`!M`0ouIZ#vF$twc1`Er=>Gf1vg$z$@*+!bZ#-!nLerf&u z)*Yn3#00Qn5~A_AW?Xk`ND;QqUH%rjfN$u%9@jHm>U zZQcsEwe-~^0B|{?ZeJ)y#Uw-$+LCZWW%1*rE5GE+W3!VNw(HL#6H6HkYizwqQ8@`3 z>ae?W4V*Tm1C?~nUJ1UwNjcQ1;Yw8h)~||5xHO`^dfd~cerHP1qA$iq(Cn=tCweyN zrW=mObki)`$(KDUCM_W623Ree=+U-V3ovrXZTU#(TB{h691Meyqk5OeZEtNSjXJ(c z!|{0}vP&iVg=HaOPuG0_fXM~H)94+9UltOCovg_s`~LPo`8EYDxUDv#XTsMr->D78H(biafzax*^HzZb`L1>-?@}+=^!7G~u&K6w z*{>qr+Spt&{#rbyIajEdgm#UcolnL`;j&op@{awLR$&N@E)QvNn!A}tK*c2N@9t52 zwc*d=yp!juK}7s*sR7 z7gboz-I|?QvD}#&RXl zeK*ws0A4=+b$IM{ynX3HwTm`{Vn4>9)sCRmR&xGObrRYFLGt1Qhw)z@e}Guhygy{6 zh!H8 ziXFUrd;zpO-Id?!UoLu@Tgj8#c46#}{;m4e!cfsx`~!`~&TlO1{|D;HZTiOwtBG^B zhQ-fXXLsOWW2>S1@0nZp-W~ahUyyW)&vtbi&U-JBAUKZ^L$=I!Y4)|l#x?hO*YxB>upsACNtU$fqHXZ$Bm!bB{}e>yRS zN4nPG?{?o`)@Zb>L*(7S{9+1EZ`^|K>?w}`0Qf)dWk>}f#4N@D0000 - + @@ -29,13 +29,13 @@ - +