2018-03-09 16:44:34 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
from __future__ import absolute_import
|
|
|
|
|
2018-03-21 16:15:29 +00:00
|
|
|
import logging
|
|
|
|
|
2018-03-22 11:08:18 +00:00
|
|
|
import six
|
|
|
|
|
2018-04-24 16:32:54 +00:00
|
|
|
from .helper import DashboardTestCase
|
2018-03-09 16:44:34 +00:00
|
|
|
|
2018-03-21 16:15:29 +00:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class PoolTest(DashboardTestCase):
|
2018-04-24 16:32:54 +00:00
|
|
|
AUTH_ROLES = ['pool-manager']
|
|
|
|
|
2018-03-21 16:15:29 +00:00
|
|
|
@classmethod
|
|
|
|
def tearDownClass(cls):
|
|
|
|
super(PoolTest, cls).tearDownClass()
|
|
|
|
for name in ['dashboard_pool1', 'dashboard_pool2', 'dashboard_pool3']:
|
|
|
|
cls._ceph_cmd(['osd', 'pool', 'delete', name, name, '--yes-i-really-really-mean-it'])
|
|
|
|
cls._ceph_cmd(['osd', 'erasure-code-profile', 'rm', 'ecprofile'])
|
|
|
|
|
2018-04-24 16:32:54 +00:00
|
|
|
@DashboardTestCase.RunAs('test', 'test', [{'pool': ['create', 'update', 'delete']}])
|
|
|
|
def test_read_access_permissions(self):
|
|
|
|
self._get('/api/pool')
|
|
|
|
self.assertStatus(403)
|
|
|
|
self._get('/api/pool/bla')
|
|
|
|
self.assertStatus(403)
|
|
|
|
|
|
|
|
@DashboardTestCase.RunAs('test', 'test', [{'pool': ['read', 'update', 'delete']}])
|
|
|
|
def test_create_access_permissions(self):
|
|
|
|
self._post('/api/pool/', {})
|
|
|
|
self.assertStatus(403)
|
|
|
|
|
|
|
|
@DashboardTestCase.RunAs('test', 'test', [{'pool': ['read', 'create', 'update']}])
|
|
|
|
def test_delete_access_permissions(self):
|
|
|
|
self._delete('/api/pool/ddd')
|
|
|
|
self.assertStatus(403)
|
|
|
|
|
2018-03-09 16:44:34 +00:00
|
|
|
def test_pool_list(self):
|
|
|
|
data = self._get("/api/pool")
|
|
|
|
self.assertStatus(200)
|
|
|
|
|
|
|
|
cluster_pools = self.ceph_cluster.mon_manager.list_pools()
|
|
|
|
self.assertEqual(len(cluster_pools), len(data))
|
|
|
|
for pool in data:
|
|
|
|
self.assertIn('pool_name', pool)
|
|
|
|
self.assertIn('type', pool)
|
2018-04-13 15:43:18 +00:00
|
|
|
self.assertIn('application_metadata', pool)
|
2018-04-23 14:47:34 +00:00
|
|
|
self.assertIsInstance(pool['application_metadata'], list)
|
2018-03-09 16:44:34 +00:00
|
|
|
self.assertIn('flags', pool)
|
|
|
|
self.assertIn('flags_names', pool)
|
|
|
|
self.assertNotIn('stats', pool)
|
|
|
|
self.assertIn(pool['pool_name'], cluster_pools)
|
|
|
|
|
|
|
|
def test_pool_list_attrs(self):
|
|
|
|
data = self._get("/api/pool?attrs=type,flags")
|
|
|
|
self.assertStatus(200)
|
|
|
|
|
|
|
|
cluster_pools = self.ceph_cluster.mon_manager.list_pools()
|
|
|
|
self.assertEqual(len(cluster_pools), len(data))
|
|
|
|
for pool in data:
|
|
|
|
self.assertIn('pool_name', pool)
|
|
|
|
self.assertIn('type', pool)
|
|
|
|
self.assertIn('flags', pool)
|
|
|
|
self.assertNotIn('flags_names', pool)
|
|
|
|
self.assertNotIn('stats', pool)
|
|
|
|
self.assertIn(pool['pool_name'], cluster_pools)
|
|
|
|
|
|
|
|
def test_pool_list_stats(self):
|
|
|
|
data = self._get("/api/pool?stats=true")
|
|
|
|
self.assertStatus(200)
|
|
|
|
|
|
|
|
cluster_pools = self.ceph_cluster.mon_manager.list_pools()
|
|
|
|
self.assertEqual(len(cluster_pools), len(data))
|
|
|
|
for pool in data:
|
|
|
|
self.assertIn('pool_name', pool)
|
|
|
|
self.assertIn('type', pool)
|
2018-04-13 15:43:18 +00:00
|
|
|
self.assertIn('application_metadata', pool)
|
2018-03-09 16:44:34 +00:00
|
|
|
self.assertIn('flags', pool)
|
|
|
|
self.assertIn('stats', pool)
|
|
|
|
self.assertIn('flags_names', pool)
|
|
|
|
self.assertIn(pool['pool_name'], cluster_pools)
|
|
|
|
|
|
|
|
def test_pool_get(self):
|
|
|
|
cluster_pools = self.ceph_cluster.mon_manager.list_pools()
|
|
|
|
pool = self._get("/api/pool/{}?stats=true&attrs=type,flags,stats"
|
|
|
|
.format(cluster_pools[0]))
|
|
|
|
self.assertEqual(pool['pool_name'], cluster_pools[0])
|
|
|
|
self.assertIn('type', pool)
|
|
|
|
self.assertIn('flags', pool)
|
|
|
|
self.assertIn('stats', pool)
|
|
|
|
self.assertNotIn('flags_names', pool)
|
2018-03-21 16:15:29 +00:00
|
|
|
|
|
|
|
def _pool_create(self, data):
|
|
|
|
try:
|
|
|
|
self._post('/api/pool/', data)
|
|
|
|
self.assertStatus(201)
|
|
|
|
|
|
|
|
pool = self._get("/api/pool/" + data['pool'])
|
|
|
|
self.assertStatus(200)
|
|
|
|
try:
|
|
|
|
for k, v in data.items():
|
|
|
|
if k == 'pool_type':
|
|
|
|
self.assertEqual(pool['type'], data['pool_type'])
|
|
|
|
elif k == 'pg_num':
|
|
|
|
self.assertEqual(pool[k], int(v), '{}: {} != {}'.format(k, pool[k], v))
|
|
|
|
elif k == 'application_metadata':
|
2018-04-23 14:47:34 +00:00
|
|
|
self.assertIsInstance(pool[k], list)
|
2018-03-21 16:15:29 +00:00
|
|
|
self.assertEqual(pool[k],
|
2018-04-13 15:43:18 +00:00
|
|
|
data['application_metadata'].split(','))
|
2018-03-21 16:15:29 +00:00
|
|
|
elif k == 'pool':
|
|
|
|
self.assertEqual(pool['pool_name'], v)
|
2018-05-11 14:12:43 +00:00
|
|
|
elif k in ['compression_mode', 'compression_algorithm']:
|
2018-03-21 16:15:29 +00:00
|
|
|
self.assertEqual(pool['options'][k], data[k])
|
2018-05-11 14:12:43 +00:00
|
|
|
elif k == 'compression_max_blob_size':
|
|
|
|
self.assertEqual(pool['options'][k], int(data[k]))
|
2018-03-21 16:15:29 +00:00
|
|
|
elif k == 'compression_required_ratio':
|
|
|
|
self.assertEqual(pool['options'][k], float(data[k]))
|
|
|
|
else:
|
|
|
|
self.assertEqual(pool[k], v, '{}: {} != {}'.format(k, pool[k], v))
|
|
|
|
|
|
|
|
except Exception:
|
|
|
|
log.exception("test_pool_create: pool=%s", pool)
|
|
|
|
raise
|
|
|
|
|
|
|
|
self._delete("/api/pool/" + data['pool'])
|
|
|
|
self.assertStatus(204)
|
|
|
|
except Exception:
|
|
|
|
log.exception("test_pool_create: data=%s", data)
|
|
|
|
raise
|
|
|
|
|
|
|
|
def test_pool_create(self):
|
|
|
|
self._ceph_cmd(['osd', 'crush', 'rule', 'create-erasure', 'ecrule'])
|
|
|
|
self._ceph_cmd(
|
|
|
|
['osd', 'erasure-code-profile', 'set', 'ecprofile', 'crush-failure-domain=osd'])
|
|
|
|
pools = [{
|
|
|
|
'pool': 'dashboard_pool1',
|
|
|
|
'pg_num': '10',
|
|
|
|
'pool_type': 'replicated',
|
|
|
|
'application_metadata': 'rbd',
|
|
|
|
}, {
|
|
|
|
'pool': 'dashboard_pool2',
|
|
|
|
'pg_num': '10',
|
|
|
|
'pool_type': 'erasure',
|
|
|
|
'erasure_code_profile': 'ecprofile',
|
|
|
|
'crush_rule': 'ecrule',
|
|
|
|
}, {
|
|
|
|
'pool': 'dashboard_pool3',
|
|
|
|
'pg_num': '10',
|
|
|
|
'pool_type': 'replicated',
|
|
|
|
'compression_algorithm': 'zstd',
|
|
|
|
'compression_mode': 'aggressive',
|
2018-05-11 14:12:43 +00:00
|
|
|
'compression_max_blob_size': "10000000",
|
2018-03-21 16:15:29 +00:00
|
|
|
'compression_required_ratio': '0.8',
|
|
|
|
}]
|
|
|
|
for data in pools:
|
|
|
|
self._pool_create(data)
|
2018-03-22 11:08:18 +00:00
|
|
|
|
2018-04-24 16:20:59 +00:00
|
|
|
def test_pool_create_fail(self):
|
|
|
|
data = {'pool_type': u'replicated', 'rule_name': u'dnf', 'pg_num': u'8', 'pool': u'sadfs'}
|
|
|
|
self._post('/api/pool/', data)
|
|
|
|
self.assertStatus(400)
|
|
|
|
self.assertJsonBody({
|
|
|
|
'component': 'pool',
|
|
|
|
'code': "2",
|
2018-05-11 14:12:43 +00:00
|
|
|
'detail': "[errno -2] specified rule dnf doesn't exist"
|
2018-04-24 16:20:59 +00:00
|
|
|
})
|
|
|
|
|
2018-03-22 11:08:18 +00:00
|
|
|
def test_pool_info(self):
|
|
|
|
info_data = self._get("/api/pool/_info")
|
|
|
|
self.assertEqual(set(info_data),
|
|
|
|
{'pool_names', 'crush_rules_replicated', 'crush_rules_erasure',
|
2018-03-23 13:15:45 +00:00
|
|
|
'is_all_bluestore', 'compression_algorithms', 'compression_modes',
|
|
|
|
'osd_count'})
|
2018-03-22 11:08:18 +00:00
|
|
|
self.assertTrue(all(isinstance(n, six.string_types) for n in info_data['pool_names']))
|
|
|
|
self.assertTrue(
|
2018-03-23 13:15:45 +00:00
|
|
|
all(isinstance(n, dict) for n in info_data['crush_rules_replicated']))
|
2018-03-22 11:08:18 +00:00
|
|
|
self.assertTrue(
|
2018-03-23 13:15:45 +00:00
|
|
|
all(isinstance(n, dict) for n in info_data['crush_rules_erasure']))
|
2018-03-22 11:08:18 +00:00
|
|
|
self.assertIsInstance(info_data['is_all_bluestore'], bool)
|
2018-03-23 13:15:45 +00:00
|
|
|
self.assertIsInstance(info_data['osd_count'], int)
|
2018-03-22 11:08:18 +00:00
|
|
|
self.assertTrue(
|
|
|
|
all(isinstance(n, six.string_types) for n in info_data['compression_algorithms']))
|
|
|
|
self.assertTrue(
|
|
|
|
all(isinstance(n, six.string_types) for n in info_data['compression_modes']))
|