2013-08-23 06:22:04 +00:00
|
|
|
# this file is responsible for submitting tests into the queue
|
|
|
|
# by generating combinations of facets found in
|
|
|
|
# https://github.com/ceph/ceph-qa-suite.git
|
|
|
|
|
2011-08-26 00:11:33 +00:00
|
|
|
import copy
|
2011-07-01 16:32:30 +00:00
|
|
|
import errno
|
2011-06-21 17:00:16 +00:00
|
|
|
import itertools
|
|
|
|
import logging
|
|
|
|
import os
|
2013-08-27 19:51:27 +00:00
|
|
|
import re
|
2011-06-21 17:00:16 +00:00
|
|
|
import subprocess
|
|
|
|
import sys
|
2011-08-26 00:11:33 +00:00
|
|
|
import yaml
|
|
|
|
|
2013-08-22 21:39:56 +00:00
|
|
|
from teuthology import lock as lock
|
2011-06-21 17:00:16 +00:00
|
|
|
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2013-09-26 15:47:43 +00:00
|
|
|
|
2013-10-08 16:34:09 +00:00
|
|
|
def main(args):
|
2011-06-21 17:00:16 +00:00
|
|
|
loglevel = logging.INFO
|
|
|
|
if args.verbose:
|
|
|
|
loglevel = logging.DEBUG
|
|
|
|
|
|
|
|
logging.basicConfig(
|
|
|
|
level=loglevel,
|
2013-10-08 22:10:01 +00:00
|
|
|
)
|
2011-06-21 17:00:16 +00:00
|
|
|
|
2011-08-26 00:11:33 +00:00
|
|
|
base_arg = [
|
|
|
|
os.path.join(os.path.dirname(sys.argv[0]), 'teuthology-schedule'),
|
|
|
|
'--name', args.name,
|
2012-07-14 20:02:04 +00:00
|
|
|
'--num', str(args.num),
|
2013-07-18 19:04:08 +00:00
|
|
|
'--worker', args.worker,
|
2013-10-08 22:10:01 +00:00
|
|
|
]
|
2011-08-26 00:11:33 +00:00
|
|
|
if args.verbose:
|
|
|
|
base_arg.append('-v')
|
|
|
|
if args.owner:
|
|
|
|
base_arg.extend(['--owner', args.owner])
|
|
|
|
|
2011-11-18 01:14:05 +00:00
|
|
|
collections = [
|
2013-08-23 05:42:11 +00:00
|
|
|
(os.path.join(args.base, collection), collection)
|
2011-11-18 01:14:05 +00:00
|
|
|
for collection in args.collections
|
2013-10-08 22:10:01 +00:00
|
|
|
]
|
2011-11-18 01:14:05 +00:00
|
|
|
|
2013-09-13 15:39:13 +00:00
|
|
|
num_jobs = 0
|
2011-08-10 20:34:38 +00:00
|
|
|
for collection, collection_name in sorted(collections):
|
2013-09-13 15:39:13 +00:00
|
|
|
log.debug('Collection %s in %s' % (collection_name, collection))
|
2013-10-08 22:10:01 +00:00
|
|
|
configs = [(combine_path(collection_name, item[0]), item[1])
|
|
|
|
for item in build_matrix(collection)]
|
|
|
|
log.info('Collection %s in %s generated %d jobs' %
|
|
|
|
(collection_name, collection, len(configs)))
|
2013-09-17 18:19:01 +00:00
|
|
|
num_jobs += len(configs)
|
2013-08-22 21:39:56 +00:00
|
|
|
|
|
|
|
arch = get_arch(args.config)
|
2013-08-22 21:48:02 +00:00
|
|
|
machine_type = get_machine_type(args.config)
|
2013-08-23 06:29:27 +00:00
|
|
|
for description, config in configs:
|
2013-08-28 20:25:22 +00:00
|
|
|
raw_yaml = '\n'.join([file(a, 'r').read() for a in config])
|
2013-08-23 06:29:27 +00:00
|
|
|
|
2013-08-28 20:25:22 +00:00
|
|
|
parsed_yaml = yaml.load(raw_yaml)
|
|
|
|
os_type = parsed_yaml.get('os_type')
|
|
|
|
exclude_arch = parsed_yaml.get('exclude_arch')
|
|
|
|
exclude_os_type = parsed_yaml.get('exclude_os_type')
|
2013-08-23 06:29:27 +00:00
|
|
|
|
2013-08-22 21:39:56 +00:00
|
|
|
if exclude_arch:
|
|
|
|
if exclude_arch == arch:
|
2013-10-08 22:10:01 +00:00
|
|
|
log.info('Skipping due to excluded_arch: %s facets %s',
|
|
|
|
exclude_arch, description)
|
2013-08-22 21:39:56 +00:00
|
|
|
continue
|
|
|
|
if exclude_os_type:
|
|
|
|
if exclude_os_type == os_type:
|
2013-10-08 22:10:01 +00:00
|
|
|
log.info('Skipping due to excluded_os_type: %s facets %s',
|
|
|
|
exclude_os_type, description)
|
2013-08-22 21:39:56 +00:00
|
|
|
continue
|
2013-10-08 22:10:01 +00:00
|
|
|
# We should not run multiple tests (changing distros) unless the
|
|
|
|
# machine is a VPS.
|
2013-08-22 21:48:02 +00:00
|
|
|
# Re-imaging baremetal is not yet supported.
|
|
|
|
if machine_type != 'vps':
|
2013-08-23 05:18:55 +00:00
|
|
|
if os_type and os_type != 'ubuntu':
|
2013-08-22 21:48:02 +00:00
|
|
|
log.info(
|
2013-10-08 22:10:01 +00:00
|
|
|
'Skipping due to non-ubuntu on baremetal facets %s',
|
|
|
|
description)
|
2013-08-22 21:48:02 +00:00
|
|
|
continue
|
2013-08-22 21:39:56 +00:00
|
|
|
|
2011-08-10 20:34:38 +00:00
|
|
|
log.info(
|
2013-08-28 20:25:22 +00:00
|
|
|
'Scheduling %s', description
|
2013-10-08 22:10:01 +00:00
|
|
|
)
|
2013-08-22 21:39:56 +00:00
|
|
|
|
2011-08-26 00:11:33 +00:00
|
|
|
arg = copy.deepcopy(base_arg)
|
2011-08-10 20:34:38 +00:00
|
|
|
arg.extend([
|
2013-10-08 22:10:01 +00:00
|
|
|
'--description', description,
|
|
|
|
'--',
|
|
|
|
])
|
2011-08-10 20:34:38 +00:00
|
|
|
arg.extend(args.config)
|
2013-08-28 20:25:22 +00:00
|
|
|
arg.extend(config)
|
2013-08-23 06:29:27 +00:00
|
|
|
|
2013-08-28 20:25:22 +00:00
|
|
|
if args.dry_run:
|
|
|
|
log.info('dry-run: %s' % ' '.join(arg))
|
|
|
|
else:
|
|
|
|
subprocess.check_call(
|
|
|
|
args=arg,
|
2013-10-08 22:10:01 +00:00
|
|
|
)
|
2013-08-23 06:29:27 +00:00
|
|
|
|
2013-09-13 15:39:30 +00:00
|
|
|
if num_jobs:
|
|
|
|
arg = copy.deepcopy(base_arg)
|
|
|
|
arg.append('--last-in-suite')
|
|
|
|
if args.email:
|
|
|
|
arg.extend(['--email', args.email])
|
|
|
|
if args.timeout:
|
|
|
|
arg.extend(['--timeout', args.timeout])
|
|
|
|
if args.dry_run:
|
|
|
|
log.info('dry-run: %s' % ' '.join(arg))
|
|
|
|
else:
|
|
|
|
subprocess.check_call(
|
|
|
|
args=arg,
|
2013-10-08 22:10:01 +00:00
|
|
|
)
|
2011-08-26 00:11:33 +00:00
|
|
|
|
2013-08-23 06:29:27 +00:00
|
|
|
|
|
|
|
def combine_path(left, right):
|
|
|
|
"""
|
|
|
|
os.path.join(a, b) doesn't like it when b is None
|
|
|
|
"""
|
|
|
|
if right:
|
|
|
|
return os.path.join(left, right)
|
|
|
|
return left
|
|
|
|
|
2013-10-08 22:10:01 +00:00
|
|
|
|
2013-08-23 06:29:27 +00:00
|
|
|
def build_matrix(path):
|
|
|
|
"""
|
2013-08-28 20:25:22 +00:00
|
|
|
Return a list of items describe by path
|
|
|
|
|
|
|
|
The input is just a path. The output is an array of (description,
|
|
|
|
[file list]) tuples.
|
|
|
|
|
|
|
|
For a normal file we generate a new item for the result list.
|
|
|
|
|
|
|
|
For a directory, we (recursively) generate a new item for each
|
|
|
|
file/dir.
|
|
|
|
|
|
|
|
For a directory with a magic '+' file, we generate a single item
|
|
|
|
that concatenates all files/subdirs.
|
|
|
|
|
|
|
|
For a directory with a magic '%' file, we generate a result set
|
|
|
|
for each tiem in the directory, and then do a product to generate
|
|
|
|
a result list with all combinations.
|
|
|
|
|
|
|
|
The final description (after recursion) for each item will look
|
|
|
|
like a relative path. If there was a % product, that path
|
|
|
|
component will appear as a file with braces listing the selection
|
|
|
|
of chosen subitems.
|
2013-08-23 06:29:27 +00:00
|
|
|
"""
|
2013-08-28 20:25:22 +00:00
|
|
|
if os.path.isfile(path):
|
2013-08-23 06:29:27 +00:00
|
|
|
if path.endswith('.yaml'):
|
2013-08-28 20:25:22 +00:00
|
|
|
return [(None, [path])]
|
|
|
|
if os.path.isdir(path):
|
2013-08-23 06:29:27 +00:00
|
|
|
files = sorted(os.listdir(path))
|
|
|
|
if '+' in files:
|
|
|
|
# concatenate items
|
|
|
|
files.remove('+')
|
|
|
|
out = []
|
|
|
|
for fn in files:
|
|
|
|
out.extend(build_matrix(os.path.join(path, fn)))
|
|
|
|
return [(
|
|
|
|
'+',
|
2013-08-28 20:25:22 +00:00
|
|
|
[a[1] for a in out]
|
2013-08-23 06:29:27 +00:00
|
|
|
)]
|
|
|
|
elif '%' in files:
|
|
|
|
# convolve items
|
|
|
|
files.remove('%')
|
2013-08-28 20:25:22 +00:00
|
|
|
sublists = []
|
2013-08-23 06:29:27 +00:00
|
|
|
for fn in files:
|
|
|
|
raw = build_matrix(os.path.join(path, fn))
|
2013-10-08 22:10:01 +00:00
|
|
|
sublists.append([(combine_path(fn, item[0]), item[1])
|
|
|
|
for item in raw])
|
2013-08-23 06:29:27 +00:00
|
|
|
out = []
|
2013-09-17 03:22:09 +00:00
|
|
|
if sublists:
|
|
|
|
for sublist in itertools.product(*sublists):
|
|
|
|
name = '{' + ' '.join([item[0] for item in sublist]) + '}'
|
|
|
|
val = []
|
|
|
|
for item in sublist:
|
|
|
|
val.extend(item[1])
|
|
|
|
out.append((name, val))
|
2013-08-23 06:29:27 +00:00
|
|
|
return out
|
|
|
|
else:
|
|
|
|
# list items
|
|
|
|
out = []
|
|
|
|
for fn in files:
|
|
|
|
raw = build_matrix(os.path.join(path, fn))
|
2013-10-08 22:10:01 +00:00
|
|
|
out.extend([(combine_path(fn, item[0]), item[1])
|
|
|
|
for item in raw])
|
2013-08-23 06:29:27 +00:00
|
|
|
return out
|
2013-08-29 04:53:41 +00:00
|
|
|
return []
|
2013-08-23 06:29:27 +00:00
|
|
|
|
2011-06-29 19:54:53 +00:00
|
|
|
|
2013-10-08 16:54:41 +00:00
|
|
|
def ls(archive_dir, verbose):
|
|
|
|
for j in get_jobs(archive_dir):
|
|
|
|
job_dir = os.path.join(archive_dir, j)
|
2011-07-01 16:33:06 +00:00
|
|
|
summary = {}
|
2011-06-29 19:54:53 +00:00
|
|
|
try:
|
2012-03-19 21:16:14 +00:00
|
|
|
with file(os.path.join(job_dir, 'summary.yaml')) as f:
|
2011-06-29 19:54:53 +00:00
|
|
|
g = yaml.safe_load_all(f)
|
|
|
|
for new in g:
|
2011-06-30 18:25:15 +00:00
|
|
|
summary.update(new)
|
2013-10-08 22:10:01 +00:00
|
|
|
except IOError as e:
|
2011-07-01 16:32:30 +00:00
|
|
|
if e.errno == errno.ENOENT:
|
2012-04-10 15:57:19 +00:00
|
|
|
print '%s ' % j,
|
2012-01-14 06:08:33 +00:00
|
|
|
|
|
|
|
# pid
|
|
|
|
try:
|
2012-03-19 21:16:14 +00:00
|
|
|
pidfile = os.path.join(job_dir, 'pid')
|
2012-01-16 21:18:49 +00:00
|
|
|
found = False
|
2012-01-14 06:08:33 +00:00
|
|
|
if os.path.isfile(pidfile):
|
|
|
|
pid = open(pidfile, 'r').read()
|
|
|
|
if os.path.isdir("/proc/%s" % pid):
|
2013-10-08 16:54:41 +00:00
|
|
|
cmdline = open('/proc/%s/cmdline' % pid,
|
|
|
|
'r').read()
|
|
|
|
if cmdline.find(archive_dir) >= 0:
|
2012-01-16 21:18:49 +00:00
|
|
|
print '(pid %s)' % pid,
|
|
|
|
found = True
|
|
|
|
if not found:
|
2012-04-10 15:59:47 +00:00
|
|
|
print '(no process or summary.yaml)',
|
2012-01-14 06:08:33 +00:00
|
|
|
# tail
|
|
|
|
tail = os.popen(
|
2013-10-08 16:54:41 +00:00
|
|
|
'tail -1 %s/%s/teuthology.log' % (archive_dir, j)
|
2013-10-08 22:10:01 +00:00
|
|
|
).read().rstrip()
|
2012-01-16 21:18:49 +00:00
|
|
|
print tail,
|
2013-10-08 22:10:01 +00:00
|
|
|
except IOError as e:
|
2012-01-14 06:08:33 +00:00
|
|
|
continue
|
|
|
|
print ''
|
2011-07-01 16:32:30 +00:00
|
|
|
continue
|
|
|
|
else:
|
|
|
|
raise
|
2011-06-29 19:54:53 +00:00
|
|
|
|
2012-01-16 21:18:49 +00:00
|
|
|
print "{job} {success} {owner} {desc} {duration}s".format(
|
2011-06-29 19:54:53 +00:00
|
|
|
job=j,
|
2011-07-01 16:34:08 +00:00
|
|
|
owner=summary.get('owner', '-'),
|
|
|
|
desc=summary.get('description', '-'),
|
2012-06-20 17:13:48 +00:00
|
|
|
success='pass' if summary.get('success', False) else 'FAIL',
|
2012-03-20 14:48:45 +00:00
|
|
|
duration=int(summary.get('duration', 0)),
|
2013-10-08 22:10:01 +00:00
|
|
|
)
|
2013-10-08 16:54:41 +00:00
|
|
|
if verbose and 'failure_reason' in summary:
|
2011-10-03 23:32:42 +00:00
|
|
|
print ' {reason}'.format(reason=summary['failure_reason'])
|
2011-08-26 00:11:33 +00:00
|
|
|
|
2013-10-08 22:10:01 +00:00
|
|
|
|
2013-08-27 19:51:27 +00:00
|
|
|
def get_jobs(archive_dir):
|
|
|
|
dir_contents = os.listdir(archive_dir)
|
|
|
|
|
|
|
|
def is_job_dir(parent, subdir):
|
2013-10-08 22:10:01 +00:00
|
|
|
if (os.path.isdir(os.path.join(parent, subdir)) and re.match('\d+$',
|
|
|
|
subdir)):
|
2013-08-27 19:51:27 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
jobs = [job for job in dir_contents if is_job_dir(archive_dir, job)]
|
|
|
|
return sorted(jobs)
|
|
|
|
|
|
|
|
|
2013-08-22 21:39:56 +00:00
|
|
|
def get_arch(config):
|
|
|
|
for yamlfile in config:
|
|
|
|
y = yaml.safe_load(file(yamlfile))
|
|
|
|
machine_type = y.get('machine_type')
|
|
|
|
if machine_type:
|
|
|
|
fakectx = []
|
|
|
|
locks = lock.list_locks(fakectx)
|
|
|
|
for machine in locks:
|
|
|
|
if machine['type'] == machine_type:
|
|
|
|
arch = machine['arch']
|
|
|
|
return arch
|
|
|
|
return None
|
|
|
|
|
2013-08-26 16:51:43 +00:00
|
|
|
|
2013-08-22 21:39:56 +00:00
|
|
|
def get_os_type(configs):
|
|
|
|
for config in configs:
|
|
|
|
yamlfile = config[2]
|
|
|
|
y = yaml.safe_load(file(yamlfile))
|
2013-08-23 17:36:37 +00:00
|
|
|
if not y:
|
|
|
|
y = {}
|
2013-08-22 21:39:56 +00:00
|
|
|
os_type = y.get('os_type')
|
|
|
|
if os_type:
|
|
|
|
return os_type
|
|
|
|
return None
|
|
|
|
|
2013-08-26 16:51:43 +00:00
|
|
|
|
2013-08-22 21:39:56 +00:00
|
|
|
def get_exclude_arch(configs):
|
|
|
|
for config in configs:
|
|
|
|
yamlfile = config[2]
|
|
|
|
y = yaml.safe_load(file(yamlfile))
|
2013-08-23 17:36:37 +00:00
|
|
|
if not y:
|
|
|
|
y = {}
|
|
|
|
exclude_arch = y.get('exclude_arch')
|
|
|
|
if exclude_arch:
|
|
|
|
return exclude_arch
|
2013-08-22 21:39:56 +00:00
|
|
|
return None
|
|
|
|
|
2013-08-26 16:51:43 +00:00
|
|
|
|
2013-08-22 21:39:56 +00:00
|
|
|
def get_exclude_os_type(configs):
|
|
|
|
for config in configs:
|
|
|
|
yamlfile = config[2]
|
|
|
|
y = yaml.safe_load(file(yamlfile))
|
2013-08-23 17:36:37 +00:00
|
|
|
if not y:
|
|
|
|
y = {}
|
|
|
|
exclude_os_type = y.get('exclude_os_type')
|
|
|
|
if exclude_os_type:
|
|
|
|
return exclude_os_type
|
2013-08-22 21:39:56 +00:00
|
|
|
return None
|
|
|
|
|
2013-08-26 16:51:43 +00:00
|
|
|
|
2013-08-22 21:48:02 +00:00
|
|
|
def get_machine_type(config):
|
|
|
|
for yamlfile in config:
|
|
|
|
y = yaml.safe_load(file(yamlfile))
|
2013-08-23 17:36:37 +00:00
|
|
|
if not y:
|
|
|
|
y = {}
|
2013-08-22 21:48:02 +00:00
|
|
|
machine_type = y.get('machine_type')
|
|
|
|
if machine_type:
|
|
|
|
return machine_type
|
|
|
|
return None
|