mirror of
https://github.com/puppetlabs/infinitory.git
synced 2026-01-26 02:08:41 -05:00
Adding error page to infinitory
This commit is contained in:
parent
26acc1d4c6
commit
7e5f22eab6
11 changed files with 319 additions and 31 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -5,3 +5,4 @@ __pycache__
|
||||||
/*.egg-info
|
/*.egg-info
|
||||||
/output
|
/output
|
||||||
bin/
|
bin/
|
||||||
|
/cache
|
||||||
|
|
|
||||||
|
|
@ -7,3 +7,7 @@ Developing
|
||||||
==========
|
==========
|
||||||
|
|
||||||
Use `python setup.py develop` to install dependencies
|
Use `python setup.py develop` to install dependencies
|
||||||
|
|
||||||
|
Run in Dev:
|
||||||
|
|
||||||
|
bin/infinitory -h pdb.ops.puppetlabs.net -o /tmp/output
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@ from jinja2 import Markup
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
||||||
class Base(object):
|
class Base(object):
|
||||||
def __init__(self, section, key, header=None):
|
def __init__(self, section, key, header=None):
|
||||||
self.section = section
|
self.section = section
|
||||||
|
|
@ -55,6 +56,21 @@ class Boolean(Base):
|
||||||
return "Y" if self.value(record) else "N"
|
return "Y" if self.value(record) else "N"
|
||||||
|
|
||||||
|
|
||||||
|
class TruncatedList(Base):
|
||||||
|
def value_html(self, record):
|
||||||
|
items = [self.item_html(i) for i in self.value(record)]
|
||||||
|
return Markup("<ol>%s</ol>") % Markup("\n").join(items[:5])
|
||||||
|
|
||||||
|
def item_html(self, item):
|
||||||
|
return Markup("<li>%s</li>") % item
|
||||||
|
|
||||||
|
def value_csv(self, record):
|
||||||
|
return "\n".join([self.item_csv(i) for i in self.value(record)])
|
||||||
|
|
||||||
|
def item_csv(self, item):
|
||||||
|
return item
|
||||||
|
|
||||||
|
|
||||||
class List(Base):
|
class List(Base):
|
||||||
def value_html(self, record):
|
def value_html(self, record):
|
||||||
items = [self.item_html(i) for i in self.value(record)]
|
items = [self.item_html(i) for i in self.value(record)]
|
||||||
|
|
@ -173,4 +189,3 @@ class Os(Base):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return " ".join(os)
|
return " ".join(os)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -33,6 +33,7 @@ def output_html(inventory, directory):
|
||||||
with open("{}/pygments.css".format(directory), "w", encoding="utf-8") as css:
|
with open("{}/pygments.css".format(directory), "w", encoding="utf-8") as css:
|
||||||
css.write(pygments.formatters.HtmlFormatter().get_style_defs('.codehilite'))
|
css.write(pygments.formatters.HtmlFormatter().get_style_defs('.codehilite'))
|
||||||
|
|
||||||
|
os.mkdir("{}/errors".format(directory), 0o755)
|
||||||
os.mkdir("{}/nodes".format(directory), 0o755)
|
os.mkdir("{}/nodes".format(directory), 0o755)
|
||||||
nodes = inventory.sorted_nodes("facts", "fqdn")
|
nodes = inventory.sorted_nodes("facts", "fqdn")
|
||||||
generation_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%SZ")
|
generation_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%SZ")
|
||||||
|
|
@ -54,6 +55,39 @@ def output_html(inventory, directory):
|
||||||
cellformatter.Roles("other", "roles"),
|
cellformatter.Roles("other", "roles"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
unique_error_columns = [
|
||||||
|
cellformatter.Base("other", "count"),
|
||||||
|
cellformatter.Base("other", "level"),
|
||||||
|
cellformatter.Base("other", "message"),
|
||||||
|
cellformatter.TruncatedList("other", "certnames"),
|
||||||
|
]
|
||||||
|
|
||||||
|
unique_errors = inventory.unique_errors()
|
||||||
|
|
||||||
|
with open("{}/errors/index.html".format(directory), "w", encoding="utf-8") as html:
|
||||||
|
html.write(
|
||||||
|
render_template("errors.html",
|
||||||
|
path="../",
|
||||||
|
generation_time=generation_time,
|
||||||
|
columns=unique_error_columns,
|
||||||
|
errors=unique_errors))
|
||||||
|
|
||||||
|
all_error_columns = [
|
||||||
|
cellformatter.Base("other", "message"),
|
||||||
|
cellformatter.Base("other", "level"),
|
||||||
|
cellformatter.Base("other", "certname"),
|
||||||
|
]
|
||||||
|
|
||||||
|
all_errors = inventory.all_errors()
|
||||||
|
|
||||||
|
with open("{}/errors/all.html".format(directory), "w", encoding="utf-8") as html:
|
||||||
|
html.write(
|
||||||
|
render_template("all_errors.html",
|
||||||
|
path="../",
|
||||||
|
generation_time=generation_time,
|
||||||
|
columns=all_error_columns,
|
||||||
|
errors=unique_errors))
|
||||||
|
|
||||||
with open("{}/nodes/index.html".format(directory), "w", encoding="utf-8") as html:
|
with open("{}/nodes/index.html".format(directory), "w", encoding="utf-8") as html:
|
||||||
html.write(
|
html.write(
|
||||||
render_template("nodes.html",
|
render_template("nodes.html",
|
||||||
|
|
@ -130,6 +164,7 @@ def output_html(inventory, directory):
|
||||||
generation_time=generation_time,
|
generation_time=generation_time,
|
||||||
service=service))
|
service=service))
|
||||||
|
|
||||||
|
|
||||||
def render_template(template_name, **kwargs):
|
def render_template(template_name, **kwargs):
|
||||||
data_path = os.path.dirname(os.path.abspath(__file__))
|
data_path = os.path.dirname(os.path.abspath(__file__))
|
||||||
environment = jinja2.Environment(
|
environment = jinja2.Environment(
|
||||||
|
|
@ -197,16 +232,17 @@ def main(host, output, verbose, debug):
|
||||||
set_up_logging(logging.WARNING)
|
set_up_logging(logging.WARNING)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
inventory = Inventory()
|
inventory = Inventory(debug=debug)
|
||||||
inventory.add_active_filter()
|
inventory.add_active_filter()
|
||||||
|
|
||||||
with puppetdb.AutomaticConnection(host) as pdb:
|
with puppetdb.AutomaticConnection(host) as pupdb:
|
||||||
inventory.load_nodes(pdb)
|
inventory.load_nodes(pupdb)
|
||||||
inventory.load_backups(pdb)
|
inventory.load_errors(pupdb)
|
||||||
inventory.load_logging(pdb)
|
inventory.load_backups(pupdb)
|
||||||
inventory.load_metrics(pdb)
|
inventory.load_logging(pupdb)
|
||||||
inventory.load_monitoring(pdb)
|
inventory.load_metrics(pupdb)
|
||||||
inventory.load_roles(pdb)
|
inventory.load_monitoring(pupdb)
|
||||||
|
inventory.load_roles(pupdb)
|
||||||
|
|
||||||
output_html(inventory, output)
|
output_html(inventory, output)
|
||||||
except socket.gaierror as e:
|
except socket.gaierror as e:
|
||||||
|
|
@ -219,5 +255,3 @@ def main(host, output, verbose, debug):
|
||||||
sys.exit(e)
|
sys.exit(e)
|
||||||
except requests.exceptions.ConnectionError as e:
|
except requests.exceptions.ConnectionError as e:
|
||||||
sys.exit(e)
|
sys.exit(e)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
116
infinitory/errors.py
Normal file
116
infinitory/errors.py
Normal file
|
|
@ -0,0 +1,116 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import pickle
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorParser(object):
|
||||||
|
def __init__(self, debug=False):
|
||||||
|
self.all_errors = []
|
||||||
|
self.reports_cache_path = '/tmp/infinitory_cache'
|
||||||
|
self.debug = debug
|
||||||
|
self._logger = logging.getLogger()
|
||||||
|
self._reports = dict()
|
||||||
|
self.unique_errors = []
|
||||||
|
self.delete_report_cache()
|
||||||
|
|
||||||
|
def delete_report_cache(self):
|
||||||
|
if not os.path.isdir(self.reports_cache_path):
|
||||||
|
os.mkdir(self.reports_cache_path)
|
||||||
|
|
||||||
|
for file in os.listdir(self.reports_cache_path):
|
||||||
|
# Delete cache item if older than 2 hours
|
||||||
|
absolute_cache_file_path = os.path.join(self.reports_cache_path,file)
|
||||||
|
time_one_hour_ago = time.mktime(datetime.now().timetuple()) - (1 * 3600)
|
||||||
|
if os.stat(absolute_cache_file_path).st_mtime < time_one_hour_ago:
|
||||||
|
print("Deleting File " + absolute_cache_file_path)
|
||||||
|
os.remove(absolute_cache_file_path)
|
||||||
|
|
||||||
|
def load_reports(self, pupdb):
|
||||||
|
""" I didn't use a subquery because it takes much longer than loading
|
||||||
|
the reports one by one """
|
||||||
|
for report in pupdb.query('nodes[certname, latest_report_hash] { }'):
|
||||||
|
cache_file = "%s/%s" % (self.reports_cache_path, report["latest_report_hash"])
|
||||||
|
if os.path.isfile(cache_file):
|
||||||
|
full_report = pickle.load(open(cache_file, "rb"))
|
||||||
|
if self.debug:
|
||||||
|
sys.stdout.write('#')
|
||||||
|
else:
|
||||||
|
query = 'reports[] { hash = "%s" }' % report["latest_report_hash"]
|
||||||
|
full_report = pupdb.query(query)
|
||||||
|
pickle.dump( full_report, open(cache_file, "wb" ) )
|
||||||
|
if self.debug:
|
||||||
|
sys.stdout.write('.')
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
self._reports[report["certname"]] = full_report[0]
|
||||||
|
|
||||||
|
def common_error_prefixes(self):
|
||||||
|
return [
|
||||||
|
"Could not retrieve catalog from remote server: Error 500 on SERVER: Server Error: Evaluation Error: Error while evaluating a Function Call, Untrusted facts (left) don't match values from certname (right)"
|
||||||
|
]
|
||||||
|
|
||||||
|
def matches_stored_error(self, message):
|
||||||
|
for se in self.common_error_prefixes():
|
||||||
|
if message.startswith(se):
|
||||||
|
return se
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def clean_error_message(self, error_message):
|
||||||
|
stored_error = self.matches_stored_error(error_message)
|
||||||
|
|
||||||
|
if stored_error:
|
||||||
|
return stored_error
|
||||||
|
|
||||||
|
return error_message
|
||||||
|
|
||||||
|
def modify_unique_errors_at(self, i, log_level, certname):
|
||||||
|
|
||||||
|
new_certname_list = self.unique_errors[i]['certnames']
|
||||||
|
new_certname_list.add(certname)
|
||||||
|
|
||||||
|
self.unique_errors[i] = {
|
||||||
|
'count': self.unique_errors[i]['count'] + 1,
|
||||||
|
'level': log_level,
|
||||||
|
'certnames': new_certname_list,
|
||||||
|
'message': self.unique_errors[i]['message']
|
||||||
|
}
|
||||||
|
|
||||||
|
def append_unique_error(self, error_message, log_level, certname):
|
||||||
|
for i, ue in enumerate(self.unique_errors):
|
||||||
|
if ue['message'] == error_message:
|
||||||
|
self.modify_unique_errors_at(i, log_level, certname)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.unique_errors.append({
|
||||||
|
'count': 1,
|
||||||
|
'level': log_level,
|
||||||
|
'certnames': set([certname]),
|
||||||
|
'message': error_message,
|
||||||
|
})
|
||||||
|
|
||||||
|
def extract_errors_from_reports(self):
|
||||||
|
for node, report in self._reports.items():
|
||||||
|
|
||||||
|
self._logger.debug("%s -- %s" % (report["certname"], report["status"]))
|
||||||
|
for log_message in report['logs']['data']:
|
||||||
|
if log_message['level'] == 'err' or log_message['level'] == 'warning':
|
||||||
|
error = {
|
||||||
|
'level': log_message['level'],
|
||||||
|
'hostname': report["certname"],
|
||||||
|
'message': log_message['message']
|
||||||
|
}
|
||||||
|
|
||||||
|
self.all_errors.append(error)
|
||||||
|
|
||||||
|
error_message = self.clean_error_message(error['message'])
|
||||||
|
|
||||||
|
self.append_unique_error(
|
||||||
|
error_message,
|
||||||
|
log_message['level'],
|
||||||
|
report['certname']
|
||||||
|
)
|
||||||
|
|
@ -2,8 +2,13 @@ from collections import defaultdict
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from simplepup import puppetdb
|
from simplepup import puppetdb
|
||||||
|
|
||||||
|
import infinitory.errors as errors
|
||||||
|
|
||||||
|
|
||||||
class Inventory(object):
|
class Inventory(object):
|
||||||
def __init__(self, filters=set()):
|
def __init__(self, filters=set(), debug=False):
|
||||||
|
self.debug = debug
|
||||||
|
self.errorParser = errors.ErrorParser(debug=debug)
|
||||||
self.filter = puppetdb.QueryFilter(filters)
|
self.filter = puppetdb.QueryFilter(filters)
|
||||||
self.nodes = None
|
self.nodes = None
|
||||||
self.roles = None
|
self.roles = None
|
||||||
|
|
@ -14,18 +19,18 @@ class Inventory(object):
|
||||||
def add_filter(self, filter):
|
def add_filter(self, filter):
|
||||||
self.filter.add(filter)
|
self.filter.add(filter)
|
||||||
|
|
||||||
def load_nodes(self, pdb):
|
def load_nodes(self, pupdb):
|
||||||
self.nodes = dict()
|
self.nodes = dict()
|
||||||
for node in pdb.query(self.filter('inventory {}')):
|
for node in pupdb.query(self.filter('inventory {}')):
|
||||||
node["other"] = defaultdict(list)
|
node["other"] = defaultdict(list)
|
||||||
self.nodes[node["certname"]] = node
|
self.nodes[node["certname"]] = node
|
||||||
|
|
||||||
def query_classes(self, pdb, class_name):
|
def query_classes(self, pupdb, class_name):
|
||||||
return self.query_resources(pdb,
|
return self.query_resources(pupdb,
|
||||||
'title="%s" and type="Class"' % class_name)
|
'title="%s" and type="Class"' % class_name)
|
||||||
|
|
||||||
def query_resources(self, pdb, condition, include_absent=False):
|
def query_resources(self, pupdb, condition, include_absent=False):
|
||||||
for resource in pdb.query(self.filter('resources {}', condition)):
|
for resource in pupdb.query(self.filter('resources {}', condition)):
|
||||||
if not include_absent:
|
if not include_absent:
|
||||||
if resource["parameters"].get("ensure", None) == "absent":
|
if resource["parameters"].get("ensure", None) == "absent":
|
||||||
continue
|
continue
|
||||||
|
|
@ -35,36 +40,54 @@ class Inventory(object):
|
||||||
except KeyError:
|
except KeyError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
def load_backups(self, pdb):
|
def load_backups(self, pupdb):
|
||||||
for node, resource in self.query_resources(pdb, 'type="Backup::Job"'):
|
for node, resource in self.query_resources(pupdb, 'type="Backup::Job"'):
|
||||||
paths = resource["parameters"]["files"]
|
paths = resource["parameters"]["files"]
|
||||||
if type(paths) is list:
|
if type(paths) is list:
|
||||||
node["other"]["backups"].extend(paths)
|
node["other"]["backups"].extend(paths)
|
||||||
else:
|
else:
|
||||||
node["other"]["backups"].append(paths)
|
node["other"]["backups"].append(paths)
|
||||||
|
|
||||||
def load_logging(self, pdb):
|
def load_errors(self, pupdb):
|
||||||
for node, resource in self.query_classes(pdb, "Profile::Logging::Rsyslog::Client"):
|
self.errorParser.load_reports(pupdb)
|
||||||
|
self.errorParser.extract_errors_from_reports()
|
||||||
|
|
||||||
|
def wrap_with_category(self, list_of_hashes, category):
|
||||||
|
retval = []
|
||||||
|
for error in list_of_hashes:
|
||||||
|
retval.append({
|
||||||
|
category: error
|
||||||
|
})
|
||||||
|
return retval
|
||||||
|
|
||||||
|
def unique_errors(self):
|
||||||
|
return self.wrap_with_category(self.errorParser.unique_errors, "other")
|
||||||
|
|
||||||
|
def all_errors(self):
|
||||||
|
return self.wrap_with_category(self.errorParser.all_errors, "other")
|
||||||
|
|
||||||
|
def load_logging(self, pupdb):
|
||||||
|
for node, resource in self.query_classes(pupdb, "Profile::Logging::Rsyslog::Client"):
|
||||||
node["other"]["logging"] = True
|
node["other"]["logging"] = True
|
||||||
|
|
||||||
def load_metrics(self, pdb):
|
def load_metrics(self, pupdb):
|
||||||
for node, resource in self.query_classes(pdb, "Profile::Metrics"):
|
for node, resource in self.query_classes(pupdb, "Profile::Metrics"):
|
||||||
node["other"]["metrics"] = True
|
node["other"]["metrics"] = True
|
||||||
|
|
||||||
def load_monitoring(self, pdb):
|
def load_monitoring(self, pupdb):
|
||||||
for node, resource in self.query_classes(pdb, "Profile::Server::Monitor"):
|
for node, resource in self.query_classes(pupdb, "Profile::Server::Monitor"):
|
||||||
node["other"]["monitoring"] = True
|
node["other"]["monitoring"] = True
|
||||||
|
|
||||||
for node, resource in self.query_classes(pdb, "Profile::Monitoring::Icinga2::Common"):
|
for node, resource in self.query_classes(pupdb, "Profile::Monitoring::Icinga2::Common"):
|
||||||
node["other"]["icinga_notification_period"] = resource["parameters"]["notification_period"]
|
node["other"]["icinga_notification_period"] = resource["parameters"]["notification_period"]
|
||||||
node["other"]["icinga_environment"] = resource["parameters"]["icinga2_environment"]
|
node["other"]["icinga_environment"] = resource["parameters"]["icinga2_environment"]
|
||||||
node["other"]["icinga_owner"] = resource["parameters"]["owner"]
|
node["other"]["icinga_owner"] = resource["parameters"]["owner"]
|
||||||
|
|
||||||
def load_roles(self, pdb):
|
def load_roles(self, pupdb):
|
||||||
self.roles = defaultdict(list)
|
self.roles = defaultdict(list)
|
||||||
|
|
||||||
condition = 'type = "Class" and title ~ "^Role::"'
|
condition = 'type = "Class" and title ~ "^Role::"'
|
||||||
for node, resource in self.query_resources(pdb, condition):
|
for node, resource in self.query_resources(pupdb, condition):
|
||||||
if resource["title"] not in ("role", "role::delivery"):
|
if resource["title"] not in ("role", "role::delivery"):
|
||||||
node["other"]["roles"].append(resource["title"])
|
node["other"]["roles"].append(resource["title"])
|
||||||
self.roles[resource["title"]].append(node)
|
self.roles[resource["title"]].append(node)
|
||||||
|
|
@ -93,4 +116,3 @@ class Inventory(object):
|
||||||
services[class_name]["nodes"].append(node)
|
services[class_name]["nodes"].append(node)
|
||||||
|
|
||||||
return sorted(services.values(), key=itemgetter("human_name"))
|
return sorted(services.values(), key=itemgetter("human_name"))
|
||||||
|
|
||||||
|
|
|
||||||
27
infinitory/templates/all_errors.html
Normal file
27
infinitory/templates/all_errors.html
Normal file
|
|
@ -0,0 +1,27 @@
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
{% block title %}All Errors{% endblock %}
|
||||||
|
{% block body %}
|
||||||
|
<a href="{{ path }}errors/index.html">Unique Errors</a>
|
||||||
|
<h1>All Errors</h1>
|
||||||
|
<table>
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
{% for cell in columns %}
|
||||||
|
{{ cell.head_html() }}
|
||||||
|
{% endfor %}
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{% for error in errors %}
|
||||||
|
<tr>
|
||||||
|
{% for cell in columns %}
|
||||||
|
{{ cell.body_html(error) }}
|
||||||
|
{% endfor %}
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
{% endblock %}
|
||||||
|
{% block footer %}
|
||||||
|
<a href="../errors.csv">Download CSV</a>
|
||||||
|
{% endblock %}
|
||||||
27
infinitory/templates/errors.html
Normal file
27
infinitory/templates/errors.html
Normal file
|
|
@ -0,0 +1,27 @@
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
{% block title %}Unique Errors{% endblock %}
|
||||||
|
{% block body %}
|
||||||
|
<a href="{{ path }}errors/all.html">All Errors</a>
|
||||||
|
<h1>Unique Errors</h1>
|
||||||
|
<table>
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
{% for cell in columns %}
|
||||||
|
{{ cell.head_html() }}
|
||||||
|
{% endfor %}
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{% for error in errors %}
|
||||||
|
<tr>
|
||||||
|
{% for cell in columns %}
|
||||||
|
{{ cell.body_html(error) }}
|
||||||
|
{% endfor %}
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
{% endblock %}
|
||||||
|
{% block footer %}
|
||||||
|
<a href="../errors.csv">Download CSV</a>
|
||||||
|
{% endblock %}
|
||||||
|
|
@ -16,6 +16,7 @@
|
||||||
<li><a href="{{ path }}nodes/index.html">Nodes</a></li>
|
<li><a href="{{ path }}nodes/index.html">Nodes</a></li>
|
||||||
<li><a href="{{ path }}roles/index.html">Roles</a></li>
|
<li><a href="{{ path }}roles/index.html">Roles</a></li>
|
||||||
<li><a href="{{ path }}services/index.html">Services</a></li>
|
<li><a href="{{ path }}services/index.html">Services</a></li>
|
||||||
|
<li><a href="{{ path }}errors/index.html">Errors</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
</nav>
|
</nav>
|
||||||
<main>
|
<main>
|
||||||
|
|
|
||||||
6
setup.py
6
setup.py
|
|
@ -24,9 +24,13 @@ setuptools.setup(
|
||||||
"Jinja2",
|
"Jinja2",
|
||||||
"markdown2",
|
"markdown2",
|
||||||
"pygments",
|
"pygments",
|
||||||
"simplepup"
|
"simplepup",
|
||||||
],
|
],
|
||||||
|
|
||||||
|
tests_requires = [
|
||||||
|
"pytest",
|
||||||
|
]
|
||||||
|
|
||||||
include_package_data = True,
|
include_package_data = True,
|
||||||
entry_points = {
|
entry_points = {
|
||||||
"console_scripts": [
|
"console_scripts": [
|
||||||
|
|
|
||||||
37
test/errors/test_errors.py
Normal file
37
test/errors/test_errors.py
Normal file
|
|
@ -0,0 +1,37 @@
|
||||||
|
import unittest
|
||||||
|
import infinitory.errors
|
||||||
|
import sample
|
||||||
|
|
||||||
|
|
||||||
|
class MyTest(unittest.TestCase):
|
||||||
|
def test_error_message_cleaner(self):
|
||||||
|
|
||||||
|
errorParser = infinitory.errors.ErrorParser()
|
||||||
|
|
||||||
|
self.assertEqual(errorParser.clean_error_message("Hello"), "Hello")
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
errorParser.clean_error_message("Could not retrieve catalog from remote server: Error 500 on SERVER: Server Error: Evaluation Error: Error while evaluating a Function Call, Untrusted facts (left) don't match values from certname (right) owaijefoeiawjfoiewjf"),
|
||||||
|
"Could not retrieve catalog from remote server: Error 500 on SERVER: Server Error: Evaluation Error: Error while evaluating a Function Call, Untrusted facts (left) don't match values from certname (right)"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_other_prefixing(self):
|
||||||
|
""" The cell formatter expects that all values have a prefix associated
|
||||||
|
with them. This checks that the errorParser properly adds that
|
||||||
|
prefix. """
|
||||||
|
|
||||||
|
errorParser = infinitory.errors.ErrorParser()
|
||||||
|
|
||||||
|
input = ["1", "2"]
|
||||||
|
|
||||||
|
errorParser.set_all_errors(input)
|
||||||
|
errorParser.set_unique_errors(input)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
[ { "other": "1" }, { "other": "2" } ],
|
||||||
|
errorParser.all_errors()
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
[ { "other": "1" }, { "other": "2" } ],
|
||||||
|
errorParser.unique_errors()
|
||||||
|
)
|
||||||
Loading…
Add table
Add a link
Reference in a new issue