2013-10-01 23:41:46 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
import os
|
|
|
|
import yaml
|
|
|
|
import json
|
|
|
|
import re
|
|
|
|
import httplib2
|
|
|
|
import logging
|
2013-10-02 15:57:11 +00:00
|
|
|
import argparse
|
|
|
|
from textwrap import dedent
|
2013-10-01 23:41:46 +00:00
|
|
|
|
|
|
|
from teuthology.config import config
|
|
|
|
|
|
|
|
|
|
|
|
log = logging.getLogger(__name__)
|
2013-10-02 15:57:11 +00:00
|
|
|
logging.basicConfig(level=logging.INFO)
|
2013-10-01 23:41:46 +00:00
|
|
|
|
|
|
|
|
|
|
|
class RequestFailedError(RuntimeError):
|
2013-10-02 21:09:31 +00:00
|
|
|
def __init__(self, uri, resp, content):
|
|
|
|
self.uri = uri
|
2013-10-01 23:41:46 +00:00
|
|
|
self.status = resp.status
|
|
|
|
self.reason = resp.reason
|
|
|
|
self.content = content
|
|
|
|
try:
|
|
|
|
self.content_obj = json.loads(content)
|
|
|
|
self.message = self.content_obj['message']
|
|
|
|
except ValueError:
|
|
|
|
self.message = self.content
|
|
|
|
|
|
|
|
def __str__(self):
|
2013-10-02 21:09:31 +00:00
|
|
|
templ = "Request to {uri} failed with status {status}: {reason}: {message}" # noqa
|
2013-10-01 23:41:46 +00:00
|
|
|
|
|
|
|
return templ.format(
|
2013-10-02 21:09:31 +00:00
|
|
|
uri=self.uri,
|
2013-10-01 23:41:46 +00:00
|
|
|
status=self.status,
|
|
|
|
reason=self.reason,
|
|
|
|
message=self.message,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class ResultsSerializer(object):
|
|
|
|
yamls = ('orig.config.yaml', 'config.yaml', 'info.yaml', 'summary.yaml')
|
|
|
|
|
|
|
|
def __init__(self, archive_base):
|
|
|
|
self.archive_base = archive_base
|
|
|
|
|
|
|
|
def json_for_job(self, run_name, job_id, pretty=False):
|
|
|
|
job_archive_dir = os.path.join(self.archive_base,
|
|
|
|
run_name,
|
|
|
|
job_id)
|
|
|
|
job_info = {}
|
|
|
|
for yaml_name in self.yamls:
|
|
|
|
yaml_path = os.path.join(job_archive_dir, yaml_name)
|
|
|
|
if not os.path.exists(yaml_path):
|
|
|
|
continue
|
|
|
|
with file(yaml_path) as yaml_file:
|
|
|
|
partial_info = yaml.safe_load(yaml_file)
|
|
|
|
if partial_info is not None:
|
|
|
|
job_info.update(partial_info)
|
|
|
|
|
|
|
|
if 'job_id' not in job_info:
|
|
|
|
job_info['job_id'] = job_id
|
|
|
|
|
|
|
|
if pretty:
|
|
|
|
job_json = json.dumps(job_info, sort_keys=True, indent=4)
|
|
|
|
else:
|
|
|
|
job_json = json.dumps(job_info)
|
|
|
|
|
|
|
|
return job_json
|
|
|
|
|
|
|
|
def jobs_for_run(self, run_name):
|
|
|
|
archive_dir = os.path.join(self.archive_base, run_name)
|
|
|
|
if not os.path.isdir(archive_dir):
|
|
|
|
return {}
|
|
|
|
jobs = {}
|
|
|
|
for item in os.listdir(archive_dir):
|
|
|
|
if not re.match('\d+$', item):
|
|
|
|
continue
|
|
|
|
job_id = item
|
|
|
|
job_dir = os.path.join(archive_dir, job_id)
|
|
|
|
if os.path.isdir(job_dir):
|
|
|
|
jobs[job_id] = job_dir
|
|
|
|
return jobs
|
|
|
|
|
|
|
|
@property
|
|
|
|
def all_runs(self):
|
|
|
|
archive_base = self.archive_base
|
|
|
|
if not os.path.isdir(archive_base):
|
|
|
|
return []
|
|
|
|
runs = []
|
|
|
|
for run_name in os.listdir(archive_base):
|
|
|
|
if not os.path.isdir(os.path.join(archive_base, run_name)):
|
|
|
|
continue
|
|
|
|
runs.append(run_name)
|
|
|
|
return runs
|
|
|
|
|
|
|
|
|
|
|
|
class ResultsPoster(object):
|
2013-10-02 15:57:11 +00:00
|
|
|
last_run_file = 'last_successful_run'
|
|
|
|
|
2013-10-02 17:45:25 +00:00
|
|
|
def __init__(self, archive_base, base_uri=None, save=False, refresh=False):
|
2013-10-01 23:41:46 +00:00
|
|
|
self.archive_base = archive_base
|
|
|
|
self.base_uri = base_uri or config.results_server
|
|
|
|
self.base_uri = self.base_uri.rstrip('/')
|
|
|
|
self.serializer = ResultsSerializer(archive_base)
|
2013-10-02 17:45:25 +00:00
|
|
|
self.save_last_run = save
|
|
|
|
self.refresh = refresh
|
2013-10-01 23:41:46 +00:00
|
|
|
|
2013-10-02 21:09:31 +00:00
|
|
|
def _do_request(self, uri, method, json_):
|
2013-10-02 17:45:25 +00:00
|
|
|
response, content = self.http.request(
|
2013-10-02 21:09:31 +00:00
|
|
|
uri, method, json_, headers={'content-type': 'application/json'},
|
2013-10-02 17:45:25 +00:00
|
|
|
)
|
2013-10-02 21:09:31 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
content_obj = json.loads(content)
|
|
|
|
except ValueError:
|
|
|
|
content_obj = {}
|
|
|
|
|
|
|
|
message = content_obj.get('message', '')
|
|
|
|
|
|
|
|
if response.status != 200 and not message.endswith('already exists'):
|
|
|
|
raise RequestFailedError(uri, response, content)
|
|
|
|
|
|
|
|
return response.status, message, content
|
|
|
|
|
|
|
|
def post_json(self, uri, json_):
|
|
|
|
return self._do_request(uri, 'POST', json_)
|
2013-10-02 17:45:25 +00:00
|
|
|
|
|
|
|
def put_json(self, uri, json_):
|
2013-10-02 21:09:31 +00:00
|
|
|
return self._do_request(uri, 'PUT', json_)
|
2013-10-02 17:45:25 +00:00
|
|
|
|
|
|
|
def submit_all_runs(self):
|
2013-10-01 23:41:46 +00:00
|
|
|
all_runs = self.serializer.all_runs
|
|
|
|
last_run = self.last_run
|
2013-10-02 15:57:11 +00:00
|
|
|
if self.save_last_run and last_run and last_run in all_runs:
|
2013-10-01 23:41:46 +00:00
|
|
|
next_index = all_runs.index(last_run) + 1
|
|
|
|
runs = all_runs[next_index:]
|
|
|
|
else:
|
|
|
|
runs = all_runs
|
|
|
|
num_runs = len(runs)
|
|
|
|
num_jobs = 0
|
|
|
|
log.info("Posting %s runs", num_runs)
|
|
|
|
for run in runs:
|
2013-10-02 17:45:25 +00:00
|
|
|
job_count = self.submit_run(run)
|
2013-10-01 23:41:46 +00:00
|
|
|
num_jobs += job_count
|
2013-10-02 15:57:11 +00:00
|
|
|
if self.save_last_run:
|
|
|
|
self.last_run = run
|
|
|
|
del self.last_run
|
2013-10-01 23:41:46 +00:00
|
|
|
log.info("Total: %s jobs in %s runs", num_jobs, num_runs)
|
|
|
|
|
2013-10-02 17:45:25 +00:00
|
|
|
def submit_runs(self, run_names):
|
2013-10-02 15:57:11 +00:00
|
|
|
num_jobs = 0
|
|
|
|
for run_name in run_names:
|
2013-10-02 17:45:25 +00:00
|
|
|
num_jobs += self.submit_run(run_name)
|
2013-10-02 15:57:11 +00:00
|
|
|
log.info("Total: %s jobs in %s runs", num_jobs, len(run_names))
|
|
|
|
|
2013-10-02 17:45:25 +00:00
|
|
|
def submit_run(self, run_name):
|
2013-10-01 23:41:46 +00:00
|
|
|
jobs = self.serializer.jobs_for_run(run_name)
|
|
|
|
log.info("{name} {jobs} jobs".format(
|
|
|
|
name=run_name,
|
|
|
|
jobs=len(jobs),
|
|
|
|
))
|
|
|
|
if jobs:
|
2013-10-02 17:45:25 +00:00
|
|
|
run_uri = "{base}/runs/".format(
|
|
|
|
base=self.base_uri, name=run_name)
|
2013-10-01 23:41:46 +00:00
|
|
|
run_json = json.dumps({'name': run_name})
|
2013-10-02 21:09:31 +00:00
|
|
|
status, msg, content = self.post_json(run_uri, run_json)
|
|
|
|
if status == 200:
|
|
|
|
self.submit_jobs(run_name, jobs.keys())
|
|
|
|
elif msg.endswith('already exists'):
|
|
|
|
if self.refresh:
|
|
|
|
self.submit_jobs(run_name, jobs.keys())
|
2013-10-01 23:41:46 +00:00
|
|
|
else:
|
2013-10-02 21:09:31 +00:00
|
|
|
log.info(" already present; skipped")
|
2013-10-02 17:45:25 +00:00
|
|
|
elif not jobs:
|
|
|
|
log.debug(" no jobs; skipped")
|
2013-10-01 23:41:46 +00:00
|
|
|
return len(jobs)
|
|
|
|
|
2013-10-02 21:09:31 +00:00
|
|
|
def submit_jobs(self, run_name, job_ids):
|
|
|
|
for job_id in job_ids:
|
|
|
|
self.submit_job(run_name, job_id)
|
|
|
|
|
2013-10-02 17:45:25 +00:00
|
|
|
def submit_job(self, run_name, job_id):
|
|
|
|
run_uri = "{base}/runs/{name}/".format(
|
|
|
|
base=self.base_uri, name=run_name,)
|
2013-10-01 23:41:46 +00:00
|
|
|
job_json = self.serializer.json_for_job(run_name, job_id)
|
2013-10-02 21:09:31 +00:00
|
|
|
status, msg, content = self.post_json(run_uri, job_json)
|
2013-10-01 23:41:46 +00:00
|
|
|
|
2013-10-02 21:09:31 +00:00
|
|
|
if msg.endswith('already exists'):
|
2013-10-02 17:45:25 +00:00
|
|
|
job_uri = os.path.join(run_uri, job_id, '')
|
2013-10-02 21:09:31 +00:00
|
|
|
status, msg, content = self.put_json(job_uri, job_json)
|
2013-10-01 23:41:46 +00:00
|
|
|
return job_id
|
|
|
|
|
|
|
|
@property
|
|
|
|
def last_run(self):
|
|
|
|
if hasattr(self, '__last_run'):
|
|
|
|
return self.__last_run
|
2013-10-02 15:57:11 +00:00
|
|
|
elif os.path.exists(self.last_run_file):
|
|
|
|
with file(self.last_run_file) as f:
|
2013-10-01 23:41:46 +00:00
|
|
|
self.__last_run = f.read().strip()
|
|
|
|
return self.__last_run
|
|
|
|
|
|
|
|
@last_run.setter
|
|
|
|
def last_run(self, run_name):
|
|
|
|
self.__last_run = run_name
|
2013-10-02 15:57:11 +00:00
|
|
|
with file(self.last_run_file, 'w') as f:
|
2013-10-01 23:41:46 +00:00
|
|
|
f.write(run_name)
|
|
|
|
|
2013-10-02 15:57:11 +00:00
|
|
|
@last_run.deleter
|
|
|
|
def last_run(self):
|
|
|
|
self.__last_run = None
|
|
|
|
if os.path.exists(self.last_run_file):
|
|
|
|
os.remove(self.last_run_file)
|
|
|
|
|
2013-10-02 16:26:07 +00:00
|
|
|
@property
|
|
|
|
def http(self):
|
|
|
|
if hasattr(self, '__http'):
|
|
|
|
return self.__http
|
|
|
|
self.__http = httplib2.Http()
|
|
|
|
return self.__http
|
|
|
|
|
2013-10-02 15:57:11 +00:00
|
|
|
|
|
|
|
def parse_args():
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Submit test results to a web service")
|
|
|
|
parser.add_argument('-a', '--archive', required=True,
|
|
|
|
help="The base archive directory")
|
|
|
|
parser.add_argument('-r', '--run', nargs='*',
|
|
|
|
help="A run (or list of runs) to submit")
|
|
|
|
parser.add_argument('--all-runs', action='store_true',
|
|
|
|
help="Submit all runs in the archive")
|
2013-10-02 17:45:25 +00:00
|
|
|
parser.add_argument('-R', '--refresh', action='store_true', default=False,
|
|
|
|
help=dedent("""Re-push any runs already stored on the
|
|
|
|
server. Note that this may be slow."""))
|
2013-10-02 17:13:30 +00:00
|
|
|
parser.add_argument('-s', '--server',
|
|
|
|
help=dedent(""""The server to post results to, e.g.
|
|
|
|
http://localhost:8080/ . May also be
|
|
|
|
specified in ~/.teuthology.yaml as
|
|
|
|
'results_server'"""))
|
2013-10-02 15:57:11 +00:00
|
|
|
parser.add_argument('-n', '--no-save', dest='save',
|
2013-10-02 17:45:25 +00:00
|
|
|
action='store_false', default=True,
|
2013-10-02 15:57:11 +00:00
|
|
|
help=dedent("""By default, when submitting all runs, we
|
|
|
|
remember the last successful submission in a file
|
|
|
|
called 'last_successful_run'. Pass this flag to disable
|
|
|
|
that behavior."""))
|
|
|
|
args = parser.parse_args()
|
|
|
|
return args
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
args = parse_args()
|
|
|
|
archive_base = os.path.abspath(os.path.expanduser(args.archive))
|
2013-10-02 17:45:25 +00:00
|
|
|
poster = ResultsPoster(archive_base, base_uri=args.server, save=args.save,
|
|
|
|
refresh=args.refresh)
|
2013-10-02 15:57:11 +00:00
|
|
|
if args.run and len(args.run) > 1:
|
2013-10-02 17:45:25 +00:00
|
|
|
poster.submit_runs(args.run)
|
2013-10-02 15:57:11 +00:00
|
|
|
elif args.run:
|
2013-10-02 17:45:25 +00:00
|
|
|
poster.submit_run(args.run[0])
|
2013-10-02 15:57:11 +00:00
|
|
|
elif args.all_runs:
|
2013-10-02 17:45:25 +00:00
|
|
|
poster.submit_all_runs()
|
2013-10-01 23:41:46 +00:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2013-10-02 15:57:11 +00:00
|
|
|
main()
|