Merge pull request #8989 from flyd1005/wip-fix-python-warnings

cleanup: python: remove warnings of 'trailing whitespace' and 'new blank line at EOF'
This commit is contained in:
Sage Weil 2016-05-09 09:50:46 -04:00
commit 162b4fb7d0
13 changed files with 27 additions and 30 deletions

View File

@ -96,7 +96,7 @@ class StateMachineRenderer(object):
def get_state(self, line):
if "boost::statechart::state_machine" in line:
tokens = re.search(
r"boost::statechart::state_machine<\s*(\w*),\s*(\w*)\s*>",
r"boost::statechart::state_machine<\s*(\w*),\s*(\w*)\s*>",
line)
if tokens is None:
raise "Error: malformed state_machine line: " + line

View File

@ -364,4 +364,3 @@ def main():
if __name__ == '__main__':
main()

View File

@ -12,7 +12,7 @@ def setup_app(config):
logging=getattr(config, 'logging', {}),
hooks=[TransactionHook(model.start,
model.start,
model.commit,
model.commit,
model.rollback,
model.clear)],
**app_conf

View File

@ -23,7 +23,7 @@ class RootController(RestController):
result = db.get_versions(args[0])
except Exception as e:
return self.fail(status_code=500, msg="Internal Server Error")
if result is None:
return self.fail(400, msg="Invalid UUID")
elif len(args) is 2 or len(args) is 3 and args[2] == '':
@ -51,23 +51,23 @@ class RootController(RestController):
return self.fail(status_code=422, msg=msg)
except Exception as e:
return self.fail(status_code=500, msg="Internal Server Error : " + str(e))
response.status = 201
return "CREATED"
@expose()
def delete(self, *args, **kwargs):
if 'uuid' not in kwargs:
return self.fail(status_code=400, msg="Required uuid parameter")
uuid = kwargs['uuid']
try:
status = db.delete_uuid(uuid)
except Exception as e:
return self.fail(status_code=500, msg="Internal Server Error")
if status is not None:
return self.fail(status_code=status['status'], msg=status['msg'])
response.status=200
return "DELETED"

View File

@ -1,5 +1,5 @@
from pecan.jsonify import jsonify
from ceph_brag.model import db
from ceph_brag.model import db
@jsonify.register(db.version_info)
def jsonify_version(vi):
@ -80,7 +80,7 @@ def jsonify_ceph_versions(value):
@jsonify.register(db.sysinfo)
def jsonify_sysinfo(value):
retval = {}
if value.os:
retval['os_info'] = value.os
if value.kern_vers:
@ -104,7 +104,7 @@ def jsonify_brag(b):
'description':b.ci.description,
'email':b.ci.contact_email,
'name':b.ci.cluster_name
}
}
return dict(uuid=b.ci.uuid,
cluster_creation_date=str(b.ci.cluster_creation_date),
components_count=b.comps,

View File

@ -26,4 +26,3 @@ def rollback():
def clear():
Session.remove()

View File

@ -132,7 +132,7 @@ class brag(object):
self.ci = Session.query(cluster_info).filter_by(uuid=uuid).first()
if self.ci is not None:
self.vi = Session.query(version_info).filter_by(cluster_id=self.ci.index, version_number=version_number).first()
if self.ci is not None and self.vi is not None:
self.comps = Session.query(components_info).filter_by(vid=self.vi.index).first()
self.crush = Session.query(crush_types).filter_by(vid=self.vi.index).all()
@ -145,7 +145,7 @@ def put_new_version(data):
ci = Session.query(cluster_info).filter_by(uuid=info['uuid']).first()
if ci is None:
dt = datetime.strptime(info['cluster_creation_date'], "%Y-%m-%d %H:%M:%S.%f")
ci = cluster_info(uuid=info['uuid'],
ci = cluster_info(uuid=info['uuid'],
organization=info['ownership']['organization'],
contact_email=info['ownership']['email'],
cluster_name=info['ownership']['name'],
@ -155,12 +155,12 @@ def put_new_version(data):
Session.add(ci)
Session.commit()
else:
ci.num_versions += 1
ci.num_versions += 1
return ci
def add_version_info(ci):
vi = version_info(cluster_id=ci.index,
vi = version_info(cluster_id=ci.index,
version_number=ci.num_versions,
version_date=datetime.now())
Session.add(vi)
@ -181,7 +181,7 @@ def put_new_version(data):
def add_crush_types(vi):
for c in info['crush_types']:
Session.add(crush_types(vid=vi.index,
Session.add(crush_types(vid=vi.index,
crush_type=c['type'],
crush_count=c['count']))
@ -199,7 +199,7 @@ def put_new_version(data):
k,v = si.popitem()
if k == 'os_info':
for o in v:
Session.add(os_info(vid=vi.index,
Session.add(os_info(vid=vi.index,
os=o['os'],
count=o['count']))
elif k == 'kernel_versions':
@ -235,13 +235,13 @@ def put_new_version(data):
ci = add_cluster_info()
add_version_info(ci)
vi = Session.query(version_info).filter_by(cluster_id=ci.index,
vi = Session.query(version_info).filter_by(cluster_id=ci.index,
version_number=ci.num_versions).first()
add_components_info(vi)
add_crush_types(vi)
add_pools_info(vi)
add_sys_info(vi)
def delete_uuid(uuid):
ci = Session.query(cluster_info).filter_by(uuid=uuid).first()
if ci is None:

View File

@ -24,7 +24,7 @@ class TestRootController(FunctionalTest):
assert response.status_int == 422
def test_5_put_incomplete_json(self):
response = self.app.request('/', method='PUT', body='{\"uuid\":\"adfs-12312ad\"}',
response = self.app.request('/', method='PUT', body='{\"uuid\":\"adfs-12312ad\"}',
expect_errors=True)
assert response.status_int == 422

View File

@ -49,7 +49,7 @@ docker run -v $toplevel:$toplevel -w $(pwd) --user $(id -u) {name} bash -x {name
init = open(name + '/init').read().strip()
shutil.rmtree(name)
return init
class TestCephDetectInit(testtools.TestCase):

View File

@ -145,7 +145,7 @@ def parse_region_map(map):
for region in map['regions']:
url = urlparse(region['val']['endpoints'][0])
regions.update({region['key']: url.netloc})
return regions
def str2bool(s):
@ -210,7 +210,7 @@ def init_config():
def generate_app(config):
# The Flask App
app = Flask(__name__)
# Get the RGW Region Map
region_map = parse_region_map(do_rgw_request('config'))

View File

@ -1293,4 +1293,3 @@ def json_command(cluster, target=('mon', ''), prefix=None, argdict=None,
raise
return ret, outbuf, outs

View File

@ -132,7 +132,7 @@ def test4(cluster):
"command: key create --uid %s, returned user_id %s" %
(tid_uid, outj['user_id']))
skj = outj['swift_keys'][0]
if skj['secret_key'] != swift_secret:
if skj['secret_key'] != swift_secret:
raise TestException(
"command: key create --uid %s, returned swift key %s" %
(tid_uid, skj['secret_key']))

View File

@ -205,7 +205,7 @@ class RGWRealm:
log(20, 'current meta sync status=', meta_sync_status_json)
sync_status = json.loads(meta_sync_status_json)
global_sync_status=sync_status['sync_status']['info']['status']
num_shards=sync_status['sync_status']['info']['num_shards']
@ -290,7 +290,7 @@ class RGWRealm:
log(20, 'current data sync status=', data_sync_status_json)
sync_status = json.loads(data_sync_status_json)
global_sync_status=sync_status['sync_status']['info']['status']
num_shards=sync_status['sync_status']['info']['num_shards']