mirror of
https://github.com/ceph/ceph
synced 2025-01-20 01:51:34 +00:00
mgr/dashboard: nfs-ganesha: controller API documentation
Signed-off-by: Ricardo Dias <rdias@suse.com>
This commit is contained in:
parent
c1112928b7
commit
297d47640d
@ -7,7 +7,7 @@ import cherrypy
|
||||
import cephfs
|
||||
|
||||
from . import ApiController, RESTController, UiApiController, BaseController, \
|
||||
Endpoint, Task, ReadPermission
|
||||
Endpoint, Task, ReadPermission, ControllerDoc, EndpointDoc
|
||||
from .. import logger
|
||||
from ..security import Scope
|
||||
from ..services.cephfs import CephFS
|
||||
@ -17,6 +17,63 @@ from ..services.ganesha import Ganesha, GaneshaConf, NFSException
|
||||
from ..services.rgw_client import RgwClient
|
||||
|
||||
|
||||
# documentation helpers
|
||||
EXPORT_SCHEMA = {
|
||||
'export_id': (int, 'Export ID'),
|
||||
'path': (str, 'Export path'),
|
||||
'cluster_id': (str, 'Cluster identifier'),
|
||||
'daemons': ([str], 'List of NFS Ganesha daemons identifiers'),
|
||||
'pseudo': (str, 'Pseudo FS path'),
|
||||
'tag': (str, 'NFSv3 export tag'),
|
||||
'access_type': (str, 'Export access type'),
|
||||
'squash': (str, 'Export squash policy'),
|
||||
'security_label': (str, 'Security label'),
|
||||
'protocols': ([int], 'List of protocol types'),
|
||||
'transports': ([str], 'List of transport types'),
|
||||
'fsal': ({
|
||||
'name': (str, 'name of FSAL'),
|
||||
'user_id': (str, 'CephX user id', True),
|
||||
'filesystem': (str, 'CephFS filesystem ID', True),
|
||||
'sec_label_xattr': (str, 'Name of xattr for security label', True),
|
||||
'rgw_user_id': (str, 'RGW user id', True)
|
||||
}, 'FSAL configuration'),
|
||||
'clients': ([{
|
||||
'addresses': ([str], 'list of IP addresses'),
|
||||
'access_type': (str, 'Client access type'),
|
||||
'squash': (str, 'Client squash policy')
|
||||
}], 'List of client configurations'),
|
||||
}
|
||||
|
||||
|
||||
CREATE_EXPORT_SCHEMA = {
|
||||
'path': (str, 'Export path'),
|
||||
'cluster_id': (str, 'Cluster identifier'),
|
||||
'daemons': ([str], 'List of NFS Ganesha daemons identifiers'),
|
||||
'pseudo': (str, 'Pseudo FS path'),
|
||||
'tag': (str, 'NFSv3 export tag'),
|
||||
'access_type': (str, 'Export access type'),
|
||||
'squash': (str, 'Export squash policy'),
|
||||
'security_label': (str, 'Security label'),
|
||||
'protocols': ([int], 'List of protocol types'),
|
||||
'transports': ([str], 'List of transport types'),
|
||||
'fsal': ({
|
||||
'name': (str, 'name of FSAL'),
|
||||
'user_id': (str, 'CephX user id', True),
|
||||
'filesystem': (str, 'CephFS filesystem ID', True),
|
||||
'sec_label_xattr': (str, 'Name of xattr for security label', True),
|
||||
'rgw_user_id': (str, 'RGW user id', True)
|
||||
}, 'FSAL configuration'),
|
||||
'clients': ([{
|
||||
'addresses': ([str], 'list of IP addresses'),
|
||||
'access_type': (str, 'Client access type'),
|
||||
'squash': (str, 'Client squash policy')
|
||||
}], 'List of client configurations'),
|
||||
'reload_daemons': (bool,
|
||||
'Trigger reload of NFS-Ganesha daemons configuration',
|
||||
True)
|
||||
}
|
||||
|
||||
|
||||
# pylint: disable=not-callable
|
||||
def NfsTask(name, metadata, wait_for):
|
||||
def composed_decorator(func):
|
||||
@ -27,8 +84,14 @@ def NfsTask(name, metadata, wait_for):
|
||||
|
||||
|
||||
@ApiController('/nfs-ganesha', Scope.NFS_GANESHA)
|
||||
@ControllerDoc("NFS-Ganesha Management API", "NFS-Ganesha")
|
||||
class NFSGanesha(RESTController):
|
||||
|
||||
@EndpointDoc("Status of NFS-Ganesha management feature",
|
||||
responses={200: {
|
||||
'available': (bool, "Is API available?"),
|
||||
'message': (str, "Error message")
|
||||
}})
|
||||
@Endpoint()
|
||||
@ReadPermission
|
||||
def status(self):
|
||||
@ -43,9 +106,12 @@ class NFSGanesha(RESTController):
|
||||
|
||||
|
||||
@ApiController('/nfs-ganesha/export', Scope.NFS_GANESHA)
|
||||
@ControllerDoc(group="NFS-Ganesha")
|
||||
class NFSGaneshaExports(RESTController):
|
||||
RESOURCE_ID = "cluster_id/export_id"
|
||||
|
||||
@EndpointDoc("List all NFS-Ganesha exports",
|
||||
responses={200: [EXPORT_SCHEMA]})
|
||||
def list(self):
|
||||
result = []
|
||||
for cluster_id in Ganesha.get_ganesha_clusters():
|
||||
@ -56,6 +122,9 @@ class NFSGaneshaExports(RESTController):
|
||||
|
||||
@NfsTask('create', {'path': '{path}', 'fsal': '{fsal.name}',
|
||||
'cluster_id': '{cluster_id}'}, 2.0)
|
||||
@EndpointDoc("Creates a new NFS-Ganesha export",
|
||||
parameters=CREATE_EXPORT_SCHEMA,
|
||||
responses={201: EXPORT_SCHEMA})
|
||||
def create(self, path, cluster_id, daemons, pseudo, tag, access_type,
|
||||
squash, security_label, protocols, transports, fsal, clients,
|
||||
reload_daemons=True):
|
||||
@ -83,6 +152,12 @@ class NFSGaneshaExports(RESTController):
|
||||
ganesha_conf.reload_daemons(daemons)
|
||||
return ganesha_conf.get_export(ex_id).to_dict()
|
||||
|
||||
@EndpointDoc("Get an NFS-Ganesha export",
|
||||
parameters={
|
||||
'cluster_id': (str, 'Cluster identifier'),
|
||||
'export_id': (int, "Export ID")
|
||||
},
|
||||
responses={200: EXPORT_SCHEMA})
|
||||
def get(self, cluster_id, export_id):
|
||||
export_id = int(export_id)
|
||||
ganesha_conf = GaneshaConf.instance(cluster_id)
|
||||
@ -92,6 +167,10 @@ class NFSGaneshaExports(RESTController):
|
||||
|
||||
@NfsTask('edit', {'cluster_id': '{cluster_id}', 'export_id': '{export_id}'},
|
||||
2.0)
|
||||
@EndpointDoc("Updates an NFS-Ganesha export",
|
||||
parameters=dict(export_id=(int, "Export ID"),
|
||||
**CREATE_EXPORT_SCHEMA),
|
||||
responses={200: EXPORT_SCHEMA})
|
||||
def set(self, cluster_id, export_id, path, daemons, pseudo, tag, access_type,
|
||||
squash, security_label, protocols, transports, fsal, clients,
|
||||
reload_daemons=True):
|
||||
@ -131,6 +210,15 @@ class NFSGaneshaExports(RESTController):
|
||||
|
||||
@NfsTask('delete', {'cluster_id': '{cluster_id}',
|
||||
'export_id': '{export_id}'}, 2.0)
|
||||
@EndpointDoc("Deletes an NFS-Ganesha export",
|
||||
parameters={
|
||||
'cluster_id': (str, 'Cluster identifier'),
|
||||
'export_id': (int, "Export ID"),
|
||||
'reload_daemons': (bool,
|
||||
'Trigger reload of NFS-Ganesha daemons'
|
||||
' configuration',
|
||||
True)
|
||||
})
|
||||
def delete(self, cluster_id, export_id, reload_daemons=True):
|
||||
export_id = int(export_id)
|
||||
ganesha_conf = GaneshaConf.instance(cluster_id)
|
||||
@ -144,8 +232,18 @@ class NFSGaneshaExports(RESTController):
|
||||
|
||||
|
||||
@ApiController('/nfs-ganesha/daemon')
|
||||
@ControllerDoc(group="NFS-Ganesha")
|
||||
class NFSGaneshaService(RESTController):
|
||||
|
||||
@EndpointDoc("List NFS-Ganesha daemons information",
|
||||
responses={200: [{
|
||||
'daemon_id': (str, 'Daemon identifier'),
|
||||
'cluster_id': (str, 'Cluster identifier'),
|
||||
'status': (int,
|
||||
'Status of daemon (1=RUNNING, 0=STOPPED, -1=ERROR',
|
||||
True),
|
||||
'desc': (str, 'Error description (if status==-1)', True)
|
||||
}]})
|
||||
def list(self):
|
||||
status_dict = Ganesha.get_daemons_status()
|
||||
if status_dict:
|
||||
|
Loading…
Reference in New Issue
Block a user