Merge pull request #48664 from joscollin/wip-cephfs-top-drop-x-coord-map

cephfs-top: drop x_coord_map

Reviewed-by: Neeraj Pratap Singh <neesingh@redhat.com>
This commit is contained in:
Venky Shankar 2022-11-21 18:54:35 +05:30 committed by GitHub
commit 21782ada80
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -619,85 +619,61 @@ class FSTop(object):
return False
return True
def create_top_line_and_build_coord(self):
xp = 0
x_coord_map = {}
def create_table_header(self): # formerly named as top_line
heading = []
for item in MAIN_WINDOW_TOP_LINE_ITEMS_START:
heading.append(item)
nlen = len(item) + len(ITEMS_PAD)
x_coord_map[item] = (xp, nlen)
xp += nlen
for item, typ in MAIN_WINDOW_TOP_LINE_METRICS.items():
if item in MAIN_WINDOW_TOP_LINE_METRICS_LEGACY:
continue
it = f'{self.items(item)}{self.mtype(typ)}'
heading.append(it)
nlen = len(it) + len(ITEMS_PAD)
x_coord_map[item] = (xp, nlen)
xp += nlen
if item == "READ_IO_SIZES" or item == "WRITE_IO_SIZES":
# average io sizes
it = f'{self.avg_items(item)}{self.mtype(typ)}'
heading.append(it)
nlen = len(it) + len(ITEMS_PAD)
if item == "READ_IO_SIZES":
x_coord_map["READ_IO_AVG"] = (xp, nlen)
if item == "WRITE_IO_SIZES":
x_coord_map["WRITE_IO_AVG"] = (xp, nlen)
xp += nlen
# io speeds
it = f'{self.speed_items(item)}{self.speed_mtype(typ)}'
heading.append(it)
nlen = len(it) + len(ITEMS_PAD)
if item == "READ_IO_SIZES":
x_coord_map["READ_IO_SPEED"] = (xp, nlen)
if item == "WRITE_IO_SIZES":
x_coord_map["WRITE_IO_SPEED"] = (xp, nlen)
xp += nlen
for item in MAIN_WINDOW_TOP_LINE_ITEMS_END:
heading.append(item)
nlen = len(item) + len(ITEMS_PAD)
x_coord_map[item] = (xp, nlen)
xp += nlen
title = ITEMS_PAD.join(heading)
self.fsstats.addstr(self.tablehead_y, 0, title, curses.A_STANDOUT | curses.A_BOLD)
return x_coord_map
def create_client(self, fs_name, client_id, metrics, counters,
client_meta, x_coord_map, y_coord):
client_meta, y_coord):
global last_time
metrics_dict.setdefault(fs_name, {})
metrics_dict[fs_name].setdefault(client_id, {})
cur_time = time.time()
duration = cur_time - last_time
last_time = cur_time
for item in MAIN_WINDOW_TOP_LINE_ITEMS_START:
coord = x_coord_map[item]
hlen = coord[1] - 1
xp = 0 # xp is incremented after each addstr to position the next incoming metrics.
for item in MAIN_WINDOW_TOP_LINE_ITEMS_START: # note: the first item is ITEMS_PAD
hlen = len(item) + ITEMS_PAD_LEN
if item == FS_TOP_MAIN_WINDOW_COL_CLIENT_ID:
self.fsstats.addstr(y_coord, coord[0],
self.fsstats.addstr(y_coord, xp,
wrap(client_id.split('.')[1], hlen), curses.A_DIM)
elif item == FS_TOP_MAIN_WINDOW_COL_MNT_ROOT:
if FSTop.has_metric(client_meta,
CLIENT_METADATA_MOUNT_ROOT_KEY):
hlen = len(item) + ITEMS_PAD_LEN
self.fsstats.addstr(
y_coord, coord[0],
y_coord, xp,
wrap(client_meta[CLIENT_METADATA_MOUNT_ROOT_KEY], hlen), curses.A_DIM)
else:
self.fsstats.addstr(y_coord, coord[0], "N/A", curses.A_DIM)
self.fsstats.addstr(y_coord, xp, "N/A", curses.A_DIM)
xp += hlen
cidx = 0
for item in counters:
if item in MAIN_WINDOW_TOP_LINE_METRICS_LEGACY:
cidx += 1
continue
coord = x_coord_map[item]
m = metrics[cidx]
key = MGR_STATS_COUNTERS[cidx]
typ = MAIN_WINDOW_TOP_LINE_METRICS[key]
@ -706,39 +682,36 @@ class FSTop(object):
if typ == MetricType.METRIC_TYPE_PERCENTAGE:
perc = calc_perc(m)
metrics_dict[fs_name][client_id][self.items(item)] = perc
self.fsstats.addstr(y_coord, coord[0],
self.fsstats.addstr(y_coord, xp,
f'{perc}', curses.A_DIM)
xp += len(f'{self.items(item)}{self.mtype(typ)}') + ITEMS_PAD_LEN
elif typ == MetricType.METRIC_TYPE_LATENCY:
lat = calc_lat(m)
metrics_dict[fs_name][client_id][self.items(item)] = lat
self.fsstats.addstr(y_coord, coord[0],
self.fsstats.addstr(y_coord, xp,
f'{lat}', curses.A_DIM)
xp += len(f'{self.items(item)}{self.mtype(typ)}') + ITEMS_PAD_LEN
elif typ == MetricType.METRIC_TYPE_STDEV:
stdev = calc_stdev(m)
metrics_dict[fs_name][client_id][self.items(item)] = stdev
self.fsstats.addstr(y_coord, coord[0],
self.fsstats.addstr(y_coord, xp,
f'{stdev}', curses.A_DIM)
xp += len(f'{self.items(item)}{self.mtype(typ)}') + ITEMS_PAD_LEN
elif typ == MetricType.METRIC_TYPE_SIZE:
size = calc_size(m)
metrics_dict[fs_name][client_id][self.items(item)] = size
self.fsstats.addstr(y_coord, coord[0],
self.fsstats.addstr(y_coord, xp,
f'{size}', curses.A_DIM)
xp += len(f'{self.items(item)}{self.mtype(typ)}') + ITEMS_PAD_LEN
# average io sizes
if key == "READ_IO_SIZES":
coord = x_coord_map["READ_IO_AVG"]
elif key == "WRITE_IO_SIZES":
coord = x_coord_map["WRITE_IO_AVG"]
avg_size = calc_avg_size(m)
metrics_dict[fs_name][client_id][self.avg_items(key)] = avg_size
self.fsstats.addstr(y_coord, coord[0],
self.fsstats.addstr(y_coord, xp,
f'{avg_size}', curses.A_DIM)
xp += len(f'{self.avg_items(item)}{self.mtype(typ)}') + ITEMS_PAD_LEN
# io speeds
if key == "READ_IO_SIZES":
coord = x_coord_map["READ_IO_SPEED"]
elif key == "WRITE_IO_SIZES":
coord = x_coord_map["WRITE_IO_SPEED"]
size = 0
if key == "READ_IO_SIZES":
if m[1] > 0:
@ -754,20 +727,20 @@ class FSTop(object):
last_write_size[client_id] = m[1]
speed = calc_speed(abs(size), duration)
metrics_dict[fs_name][client_id][self.speed_items(key)] = speed
self.fsstats.addstr(y_coord, coord[0],
self.fsstats.addstr(y_coord, xp,
f'{speed}', curses.A_DIM)
xp += len(f'{self.speed_items(item)}{self.speed_mtype(typ)}') + ITEMS_PAD_LEN
else:
# display 0th element from metric tuple
self.fsstats.addstr(y_coord, coord[0], f'{m[0]}', curses.A_DIM)
self.fsstats.addstr(y_coord, xp, f'{m[0]}', curses.A_DIM)
xp += len(f'{self.items(item)}{self.mtype(typ)}') + ITEMS_PAD_LEN
else:
self.fsstats.addstr(y_coord, coord[0], "N/A", curses.A_DIM)
self.fsstats.addstr(y_coord, xp, "N/A", curses.A_DIM)
xp += len(self.items(item)) + ITEMS_PAD_LEN
cidx += 1
for item in MAIN_WINDOW_TOP_LINE_ITEMS_END:
coord = x_coord_map[item]
wrapLen = self.PAD_WIDTH - coord[0]
# always place the FS_TOP_MAIN_WINDOW_COL_MNTPT_HOST_ADDR in the
# last, it will be a very long string to display
wrapLen = self.PAD_WIDTH - xp
if item == FS_TOP_MAIN_WINDOW_COL_MNTPT_HOST_ADDR:
if FSTop.has_metrics(client_meta,
[CLIENT_METADATA_MOUNT_POINT_KEY,
@ -777,12 +750,13 @@ class FSTop(object):
f'{client_meta[CLIENT_METADATA_HOSTNAME_KEY]}/'\
f'{client_meta[CLIENT_METADATA_IP_KEY]}'
self.fsstats.addstr(
y_coord, coord[0],
y_coord, xp,
wrap(mount_point, wrapLen), curses.A_DIM)
else:
self.fsstats.addstr(y_coord, coord[0], "N/A", curses.A_DIM)
self.fsstats.addstr(y_coord, xp, "N/A", curses.A_DIM)
xp += len(self.items(item)) + ITEMS_PAD_LEN
def create_clients(self, x_coord_map, stats_json, fs_name):
def create_clients(self, stats_json, fs_name):
global metrics_dict, current_states
counters = [m.upper() for m in stats_json[GLOBAL_COUNTERS_KEY]]
self.tablehead_y += 2
@ -804,8 +778,7 @@ class FSTop(object):
self.create_client(
fs_name, client_id,
stats_json[GLOBAL_METRICS_KEY].get(fs_name, {}).get(client_id, {}),
counters, res.get(client_id, {}),
x_coord_map, self.tablehead_y)
counters, res.get(client_id, {}), self.tablehead_y)
self.tablehead_y += 1
def create_header(self, stats_json, help, screen_title="", color_id=0):
@ -898,8 +871,8 @@ class FSTop(object):
num_client = len(client_metadata)
vscrollEnd += num_client
if self.create_header(stats_json, help, screen_title, 3):
x_coord_map = self.create_top_line_and_build_coord()
self.create_clients(x_coord_map, stats_json, fs)
self.create_table_header()
self.create_clients(stats_json, fs)
# scroll and refresh
if cmd == curses.KEY_DOWN:
@ -1028,8 +1001,8 @@ class FSTop(object):
vscrollEnd += num_client
if self.create_header(stats_json, help, screen_title, 2):
if not index: # do it only for the first fs
x_coord_map = self.create_top_line_and_build_coord()
self.create_clients(x_coord_map, stats_json, fs)
self.create_table_header()
self.create_clients(stats_json, fs)
# scroll and refresh
if cmd == curses.KEY_DOWN: