2013-10-01 23:41:46 +00:00
|
|
|
import os
|
|
|
|
import yaml
|
|
|
|
import json
|
|
|
|
import re
|
|
|
|
import httplib2
|
2013-10-03 16:44:47 +00:00
|
|
|
import urllib
|
2013-10-01 23:41:46 +00:00
|
|
|
import logging
|
2013-11-08 00:39:16 +00:00
|
|
|
import socket
|
2013-10-01 23:41:46 +00:00
|
|
|
|
2013-10-11 22:10:57 +00:00
|
|
|
import teuthology
|
2013-10-01 23:41:46 +00:00
|
|
|
from teuthology.config import config
|
|
|
|
|
|
|
|
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2013-10-08 22:07:01 +00:00
|
|
|
def main(args):
|
2013-10-11 19:39:45 +00:00
|
|
|
if args.verbose:
|
2013-10-11 22:10:57 +00:00
|
|
|
teuthology.log.setLevel(logging.DEBUG)
|
2013-10-11 19:39:45 +00:00
|
|
|
|
2013-10-08 22:07:01 +00:00
|
|
|
archive_base = os.path.abspath(os.path.expanduser(args.archive))
|
|
|
|
reporter = ResultsReporter(archive_base, base_uri=args.server,
|
|
|
|
save=args.save, refresh=args.refresh)
|
|
|
|
if args.run and len(args.run) > 1:
|
|
|
|
reporter.report_runs(args.run)
|
|
|
|
elif args.run:
|
|
|
|
reporter.report_run(args.run[0])
|
|
|
|
elif args.all_runs:
|
|
|
|
reporter.report_all_runs()
|
|
|
|
|
|
|
|
|
2013-10-01 23:41:46 +00:00
|
|
|
class RequestFailedError(RuntimeError):
|
2013-10-02 21:09:31 +00:00
|
|
|
def __init__(self, uri, resp, content):
|
|
|
|
self.uri = uri
|
2013-10-01 23:41:46 +00:00
|
|
|
self.status = resp.status
|
|
|
|
self.reason = resp.reason
|
|
|
|
self.content = content
|
|
|
|
try:
|
|
|
|
self.content_obj = json.loads(content)
|
|
|
|
self.message = self.content_obj['message']
|
|
|
|
except ValueError:
|
|
|
|
self.message = self.content
|
|
|
|
|
|
|
|
def __str__(self):
|
2013-10-02 21:09:31 +00:00
|
|
|
templ = "Request to {uri} failed with status {status}: {reason}: {message}" # noqa
|
2013-10-01 23:41:46 +00:00
|
|
|
|
|
|
|
return templ.format(
|
2013-10-02 21:09:31 +00:00
|
|
|
uri=self.uri,
|
2013-10-01 23:41:46 +00:00
|
|
|
status=self.status,
|
|
|
|
reason=self.reason,
|
|
|
|
message=self.message,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class ResultsSerializer(object):
|
2013-10-02 23:09:43 +00:00
|
|
|
"""
|
|
|
|
This class exists to poke around in the archive directory doing things like
|
|
|
|
assembling lists of test runs, lists of their jobs, and merging sets of job
|
|
|
|
YAML files together to form JSON objects.
|
|
|
|
"""
|
2013-10-01 23:41:46 +00:00
|
|
|
yamls = ('orig.config.yaml', 'config.yaml', 'info.yaml', 'summary.yaml')
|
|
|
|
|
|
|
|
def __init__(self, archive_base):
|
|
|
|
self.archive_base = archive_base
|
|
|
|
|
|
|
|
def json_for_job(self, run_name, job_id, pretty=False):
|
2013-10-02 23:09:43 +00:00
|
|
|
"""
|
|
|
|
Given a run name and job id, merge the job's YAML files together to
|
|
|
|
create a JSON object.
|
|
|
|
|
|
|
|
:param run_name: The name of the run.
|
|
|
|
:param job_id: The job's id.
|
|
|
|
:returns: A JSON object.
|
|
|
|
"""
|
2013-10-01 23:41:46 +00:00
|
|
|
job_archive_dir = os.path.join(self.archive_base,
|
|
|
|
run_name,
|
|
|
|
job_id)
|
|
|
|
job_info = {}
|
|
|
|
for yaml_name in self.yamls:
|
|
|
|
yaml_path = os.path.join(job_archive_dir, yaml_name)
|
|
|
|
if not os.path.exists(yaml_path):
|
|
|
|
continue
|
|
|
|
with file(yaml_path) as yaml_file:
|
|
|
|
partial_info = yaml.safe_load(yaml_file)
|
|
|
|
if partial_info is not None:
|
|
|
|
job_info.update(partial_info)
|
|
|
|
|
|
|
|
if 'job_id' not in job_info:
|
|
|
|
job_info['job_id'] = job_id
|
|
|
|
|
|
|
|
if pretty:
|
|
|
|
job_json = json.dumps(job_info, sort_keys=True, indent=4)
|
|
|
|
else:
|
|
|
|
job_json = json.dumps(job_info)
|
|
|
|
|
|
|
|
return job_json
|
|
|
|
|
|
|
|
def jobs_for_run(self, run_name):
|
2013-10-02 23:09:43 +00:00
|
|
|
"""
|
|
|
|
Given a run name, look on the filesystem for directories containing job
|
|
|
|
information, and return a dict mapping job IDs to job directories.
|
|
|
|
|
|
|
|
:param run_name: The name of the run.
|
|
|
|
:returns: A dict like: {'1': '/path/to/1', '2': 'path/to/2'}
|
|
|
|
"""
|
2013-10-01 23:41:46 +00:00
|
|
|
archive_dir = os.path.join(self.archive_base, run_name)
|
|
|
|
if not os.path.isdir(archive_dir):
|
|
|
|
return {}
|
|
|
|
jobs = {}
|
|
|
|
for item in os.listdir(archive_dir):
|
|
|
|
if not re.match('\d+$', item):
|
|
|
|
continue
|
|
|
|
job_id = item
|
|
|
|
job_dir = os.path.join(archive_dir, job_id)
|
|
|
|
if os.path.isdir(job_dir):
|
|
|
|
jobs[job_id] = job_dir
|
|
|
|
return jobs
|
|
|
|
|
2013-10-14 21:34:45 +00:00
|
|
|
def running_jobs_for_run(self, run_name):
|
|
|
|
"""
|
|
|
|
Like jobs_for_run(), but only returns jobs with no summary.yaml
|
|
|
|
|
|
|
|
:param run_name: The name of the run.
|
|
|
|
:returns: A dict like: {'1': '/path/to/1', '2': 'path/to/2'}
|
|
|
|
"""
|
|
|
|
jobs = self.jobs_for_run(run_name)
|
2013-10-16 18:43:05 +00:00
|
|
|
for job_id in jobs.keys():
|
|
|
|
if os.path.exists(os.path.join(jobs[job_id], 'summary.yaml')):
|
|
|
|
jobs.pop(job_id)
|
2013-10-14 21:34:45 +00:00
|
|
|
return jobs
|
|
|
|
|
2013-10-01 23:41:46 +00:00
|
|
|
@property
|
|
|
|
def all_runs(self):
|
2013-10-02 23:09:43 +00:00
|
|
|
"""
|
|
|
|
Look in the base archive directory for all test runs. Return a list of
|
|
|
|
their names.
|
|
|
|
"""
|
2013-10-01 23:41:46 +00:00
|
|
|
archive_base = self.archive_base
|
|
|
|
if not os.path.isdir(archive_base):
|
|
|
|
return []
|
|
|
|
runs = []
|
|
|
|
for run_name in os.listdir(archive_base):
|
|
|
|
if not os.path.isdir(os.path.join(archive_base, run_name)):
|
|
|
|
continue
|
|
|
|
runs.append(run_name)
|
|
|
|
return runs
|
|
|
|
|
|
|
|
|
2013-10-02 21:26:48 +00:00
|
|
|
class ResultsReporter(object):
|
2013-10-02 15:57:11 +00:00
|
|
|
last_run_file = 'last_successful_run'
|
|
|
|
|
2013-10-11 15:23:00 +00:00
|
|
|
def __init__(self, archive_base, base_uri=None, save=False, refresh=False,
|
|
|
|
timeout=20):
|
2013-10-01 23:41:46 +00:00
|
|
|
self.archive_base = archive_base
|
|
|
|
self.base_uri = base_uri or config.results_server
|
2013-10-02 23:09:43 +00:00
|
|
|
if self.base_uri:
|
|
|
|
self.base_uri = self.base_uri.rstrip('/')
|
2013-10-01 23:41:46 +00:00
|
|
|
self.serializer = ResultsSerializer(archive_base)
|
2013-10-02 17:45:25 +00:00
|
|
|
self.save_last_run = save
|
|
|
|
self.refresh = refresh
|
2013-10-11 15:23:00 +00:00
|
|
|
self.timeout = timeout
|
2013-10-01 23:41:46 +00:00
|
|
|
|
2013-10-02 21:09:31 +00:00
|
|
|
def _do_request(self, uri, method, json_):
|
2013-10-02 23:09:43 +00:00
|
|
|
"""
|
|
|
|
Perform an actual HTTP request on a given URI. If the request was not
|
|
|
|
successful and the reason was *not* that the object already exists,
|
|
|
|
raise a RequestFailedError.
|
|
|
|
"""
|
2013-10-03 16:44:47 +00:00
|
|
|
# Use urllib.quote() to escape things like spaces. Pass safe=':/' to
|
|
|
|
# avoid it mangling http:// etc.
|
|
|
|
uri = urllib.quote(uri, safe=':/')
|
2013-10-02 17:45:25 +00:00
|
|
|
response, content = self.http.request(
|
2013-10-02 21:09:31 +00:00
|
|
|
uri, method, json_, headers={'content-type': 'application/json'},
|
2013-10-02 17:45:25 +00:00
|
|
|
)
|
2013-10-11 21:29:42 +00:00
|
|
|
log.debug("{method} to {uri}: {status}".format(
|
|
|
|
method=method,
|
|
|
|
uri=uri,
|
|
|
|
status=response.status,
|
|
|
|
))
|
2013-10-02 21:09:31 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
content_obj = json.loads(content)
|
|
|
|
except ValueError:
|
|
|
|
content_obj = {}
|
|
|
|
|
|
|
|
message = content_obj.get('message', '')
|
|
|
|
|
|
|
|
if response.status != 200 and not message.endswith('already exists'):
|
|
|
|
raise RequestFailedError(uri, response, content)
|
|
|
|
|
|
|
|
return response.status, message, content
|
|
|
|
|
|
|
|
def post_json(self, uri, json_):
|
2013-10-02 23:09:43 +00:00
|
|
|
"""
|
|
|
|
call self._do_request(uri, 'POST', json_)
|
|
|
|
"""
|
2013-10-02 21:09:31 +00:00
|
|
|
return self._do_request(uri, 'POST', json_)
|
2013-10-02 17:45:25 +00:00
|
|
|
|
|
|
|
def put_json(self, uri, json_):
|
2013-10-02 23:09:43 +00:00
|
|
|
"""
|
|
|
|
call self._do_request(uri, 'PUT', json_)
|
|
|
|
"""
|
2013-10-02 21:09:31 +00:00
|
|
|
return self._do_request(uri, 'PUT', json_)
|
2013-10-02 17:45:25 +00:00
|
|
|
|
2013-10-02 21:28:08 +00:00
|
|
|
def report_all_runs(self):
|
2013-10-02 23:09:43 +00:00
|
|
|
"""
|
|
|
|
Report *all* runs in self.archive_dir to the results server.
|
|
|
|
"""
|
2013-10-01 23:41:46 +00:00
|
|
|
all_runs = self.serializer.all_runs
|
|
|
|
last_run = self.last_run
|
2013-10-02 15:57:11 +00:00
|
|
|
if self.save_last_run and last_run and last_run in all_runs:
|
2013-10-01 23:41:46 +00:00
|
|
|
next_index = all_runs.index(last_run) + 1
|
|
|
|
runs = all_runs[next_index:]
|
|
|
|
else:
|
|
|
|
runs = all_runs
|
2013-10-11 15:48:02 +00:00
|
|
|
return self.report_runs(runs)
|
2013-10-01 23:41:46 +00:00
|
|
|
|
2013-10-02 21:28:08 +00:00
|
|
|
def report_runs(self, run_names):
|
2013-10-02 23:09:43 +00:00
|
|
|
"""
|
|
|
|
Report several runs to the results server.
|
|
|
|
|
|
|
|
:param run_names: The names of the runs.
|
|
|
|
"""
|
2013-10-11 15:48:02 +00:00
|
|
|
num_runs = len(run_names)
|
2013-10-02 15:57:11 +00:00
|
|
|
num_jobs = 0
|
2013-10-11 15:48:02 +00:00
|
|
|
log.info("Posting %s runs", num_runs)
|
|
|
|
for run in run_names:
|
|
|
|
job_count = self.report_run(run)
|
|
|
|
num_jobs += job_count
|
|
|
|
if self.save_last_run:
|
|
|
|
self.last_run = run
|
|
|
|
del self.last_run
|
2013-10-02 15:57:11 +00:00
|
|
|
log.info("Total: %s jobs in %s runs", num_jobs, len(run_names))
|
|
|
|
|
2013-10-02 21:24:13 +00:00
|
|
|
def create_run(self, run_name):
|
2013-10-02 23:09:43 +00:00
|
|
|
"""
|
|
|
|
Create a run on the results server.
|
|
|
|
|
|
|
|
:param run_name: The name of the run.
|
|
|
|
:returns: The result of self.post_json()
|
|
|
|
"""
|
2013-10-02 21:24:13 +00:00
|
|
|
run_uri = "{base}/runs/".format(base=self.base_uri, name=run_name)
|
|
|
|
run_json = json.dumps({'name': run_name})
|
|
|
|
return self.post_json(run_uri, run_json)
|
|
|
|
|
2013-10-02 21:28:08 +00:00
|
|
|
def report_run(self, run_name):
|
2013-10-02 23:09:43 +00:00
|
|
|
"""
|
|
|
|
Report a single run to the results server.
|
|
|
|
|
|
|
|
:param run_name: The name of the run.
|
|
|
|
:returns: The number of jobs reported.
|
|
|
|
"""
|
2013-10-01 23:41:46 +00:00
|
|
|
jobs = self.serializer.jobs_for_run(run_name)
|
|
|
|
log.info("{name} {jobs} jobs".format(
|
|
|
|
name=run_name,
|
|
|
|
jobs=len(jobs),
|
|
|
|
))
|
|
|
|
if jobs:
|
2013-11-04 20:56:13 +00:00
|
|
|
if not self.refresh:
|
|
|
|
status, msg, content = self.create_run(run_name)
|
|
|
|
if status != 200:
|
2013-10-02 21:09:31 +00:00
|
|
|
log.info(" already present; skipped")
|
2013-11-12 23:07:15 +00:00
|
|
|
return 0
|
2013-11-04 20:56:13 +00:00
|
|
|
self.report_jobs(run_name, jobs.keys())
|
2013-10-02 17:45:25 +00:00
|
|
|
elif not jobs:
|
|
|
|
log.debug(" no jobs; skipped")
|
2013-10-01 23:41:46 +00:00
|
|
|
return len(jobs)
|
|
|
|
|
2013-10-02 21:28:08 +00:00
|
|
|
def report_jobs(self, run_name, job_ids):
|
2013-10-02 23:09:43 +00:00
|
|
|
"""
|
|
|
|
Report several jobs to the results server.
|
|
|
|
|
|
|
|
:param run_name: The name of the run.
|
|
|
|
:param job_ids: The jobs' ids
|
|
|
|
"""
|
2013-10-02 21:09:31 +00:00
|
|
|
for job_id in job_ids:
|
2013-10-02 21:28:08 +00:00
|
|
|
self.report_job(run_name, job_id)
|
2013-10-02 21:09:31 +00:00
|
|
|
|
2013-10-02 21:28:08 +00:00
|
|
|
def report_job(self, run_name, job_id, job_json=None):
|
2013-10-02 23:09:43 +00:00
|
|
|
"""
|
|
|
|
Report a single job to the results server.
|
|
|
|
|
2013-10-07 15:00:03 +00:00
|
|
|
:param run_name: The name of the run. The run must already exist.
|
2013-10-02 23:09:43 +00:00
|
|
|
:param job_id: The job's id
|
|
|
|
:param job_json: The job's JSON object. Optional - if not present, we
|
|
|
|
look at the archive.
|
|
|
|
"""
|
2013-10-07 15:00:03 +00:00
|
|
|
run_uri = "{base}/runs/{name}/jobs/".format(
|
2013-10-02 17:45:25 +00:00
|
|
|
base=self.base_uri, name=run_name,)
|
2013-10-02 21:24:13 +00:00
|
|
|
if job_json is None:
|
|
|
|
job_json = self.serializer.json_for_job(run_name, job_id)
|
2013-10-02 21:09:31 +00:00
|
|
|
status, msg, content = self.post_json(run_uri, job_json)
|
2013-10-01 23:41:46 +00:00
|
|
|
|
2013-10-02 21:09:31 +00:00
|
|
|
if msg.endswith('already exists'):
|
2013-10-02 17:45:25 +00:00
|
|
|
job_uri = os.path.join(run_uri, job_id, '')
|
2013-10-02 21:09:31 +00:00
|
|
|
status, msg, content = self.put_json(job_uri, job_json)
|
2013-10-01 23:41:46 +00:00
|
|
|
return job_id
|
|
|
|
|
|
|
|
@property
|
|
|
|
def last_run(self):
|
2013-10-02 23:09:43 +00:00
|
|
|
"""
|
|
|
|
The last run to be successfully reported.
|
|
|
|
"""
|
2013-10-01 23:41:46 +00:00
|
|
|
if hasattr(self, '__last_run'):
|
|
|
|
return self.__last_run
|
2013-10-02 15:57:11 +00:00
|
|
|
elif os.path.exists(self.last_run_file):
|
|
|
|
with file(self.last_run_file) as f:
|
2013-10-01 23:41:46 +00:00
|
|
|
self.__last_run = f.read().strip()
|
|
|
|
return self.__last_run
|
|
|
|
|
|
|
|
@last_run.setter
|
|
|
|
def last_run(self, run_name):
|
|
|
|
self.__last_run = run_name
|
2013-10-02 15:57:11 +00:00
|
|
|
with file(self.last_run_file, 'w') as f:
|
2013-10-01 23:41:46 +00:00
|
|
|
f.write(run_name)
|
|
|
|
|
2013-10-02 15:57:11 +00:00
|
|
|
@last_run.deleter
|
|
|
|
def last_run(self):
|
|
|
|
self.__last_run = None
|
|
|
|
if os.path.exists(self.last_run_file):
|
|
|
|
os.remove(self.last_run_file)
|
|
|
|
|
2013-10-02 16:26:07 +00:00
|
|
|
@property
|
|
|
|
def http(self):
|
|
|
|
if hasattr(self, '__http'):
|
|
|
|
return self.__http
|
2013-10-11 15:23:00 +00:00
|
|
|
self.__http = httplib2.Http(timeout=self.timeout)
|
2013-10-02 16:26:07 +00:00
|
|
|
return self.__http
|
|
|
|
|
2013-10-02 15:57:11 +00:00
|
|
|
|
2013-10-02 22:57:07 +00:00
|
|
|
def create_run(run_name, base_uri=None):
|
|
|
|
"""
|
|
|
|
Create a run on the results server. If it already exists, just smile and be
|
|
|
|
happy.
|
|
|
|
|
|
|
|
:param run_name: The name of the run.
|
|
|
|
:param base_uri: The endpoint of the results server. If you leave it out
|
|
|
|
ResultsReporter will ask teuthology.config.
|
2013-10-02 23:09:43 +00:00
|
|
|
:returns: True if the run was successfully created.
|
2013-10-02 22:57:07 +00:00
|
|
|
"""
|
|
|
|
# We are using archive_base='' here because we KNOW the serializer isn't
|
|
|
|
# needed for this codepath.
|
|
|
|
reporter = ResultsReporter(archive_base='', base_uri=base_uri)
|
|
|
|
status, msg, content = reporter.create_run(run_name)
|
|
|
|
return (status == 200 or msg.endswith('already exists'))
|
|
|
|
|
|
|
|
|
|
|
|
def push_job_info(run_name, job_id, job_info, base_uri=None):
|
|
|
|
"""
|
|
|
|
Push a job's info (example: ctx.config) to the results server.
|
|
|
|
|
|
|
|
:param run_name: The name of the run.
|
|
|
|
:param job_id: The job's id
|
|
|
|
:param job_info: A dict containing the job's information.
|
|
|
|
:param base_uri: The endpoint of the results server. If you leave it out
|
|
|
|
ResultsReporter will ask teuthology.config.
|
|
|
|
"""
|
|
|
|
# We are using archive_base='' here because we KNOW the serializer isn't
|
|
|
|
# needed for this codepath.
|
|
|
|
job_json = json.dumps(job_info)
|
|
|
|
reporter = ResultsReporter(archive_base='')
|
|
|
|
reporter.report_job(run_name, job_id, job_json)
|
|
|
|
|
|
|
|
|
2013-10-04 19:04:43 +00:00
|
|
|
def try_push_job_info(job_config, extra_info=None):
|
2013-10-04 15:25:07 +00:00
|
|
|
"""
|
|
|
|
Wrap push_job_info, gracefully doing nothing if:
|
|
|
|
A RequestFailedError is raised
|
2013-11-08 00:39:16 +00:00
|
|
|
A socket.error is raised
|
2013-10-04 15:25:07 +00:00
|
|
|
config.results_server is not set
|
2013-10-10 19:17:51 +00:00
|
|
|
config['job_id'] is not present or is None
|
2013-10-04 15:25:07 +00:00
|
|
|
|
2013-10-04 19:04:43 +00:00
|
|
|
:param job_config: The ctx.config object to push
|
|
|
|
:param extra_info: Optional second dict to push
|
2013-10-04 15:25:07 +00:00
|
|
|
"""
|
|
|
|
if not config.results_server:
|
|
|
|
msg = "No results_server set in {yaml}; not attempting to push results"
|
|
|
|
log.debug(msg.format(yaml=config.teuthology_yaml))
|
2013-10-10 19:17:51 +00:00
|
|
|
return
|
|
|
|
elif job_config.get('job_id') is None:
|
2013-10-11 19:32:21 +00:00
|
|
|
log.warning('No job_id found; not reporting results')
|
2013-10-10 19:17:51 +00:00
|
|
|
return
|
2013-10-04 19:04:43 +00:00
|
|
|
|
2013-10-10 19:17:51 +00:00
|
|
|
run_name = job_config['name']
|
|
|
|
job_id = job_config['job_id']
|
2013-10-04 15:25:07 +00:00
|
|
|
|
2013-10-10 19:17:51 +00:00
|
|
|
if extra_info is not None:
|
|
|
|
job_info = extra_info.copy()
|
|
|
|
job_info.update(job_config)
|
|
|
|
else:
|
|
|
|
job_info = job_config
|
|
|
|
|
|
|
|
try:
|
|
|
|
log.info("Pushing job info to %s", config.results_server)
|
|
|
|
push_job_info(run_name, job_id, job_info)
|
2013-11-22 23:03:29 +00:00
|
|
|
except (RequestFailedError, socket.error, httplib2.ServerNotFoundError):
|
2013-10-10 19:17:51 +00:00
|
|
|
log.exception("Could not report results to %s" %
|
|
|
|
config.results_server)
|