| 1 | #!/usr/bin/python |
| 2 | # |
| 3 | # fio_latency2csv.py |
| 4 | # |
| 5 | # This tool converts fio's json+ completion latency data to CSV format. |
| 6 | # For example: |
| 7 | # |
| 8 | # fio_latency2csv.py fio-jsonplus.output fio-latency.csv |
| 9 | # |
| 10 | |
| 11 | import os |
| 12 | import json |
| 13 | import argparse |
| 14 | |
| 15 | |
| 16 | def parse_args(): |
| 17 | parser = argparse.ArgumentParser() |
| 18 | parser.add_argument('source', |
| 19 | help='fio json+ output file containing completion ' |
| 20 | 'latency data') |
| 21 | parser.add_argument('dest', |
| 22 | help='destination file stub for latency data in CSV ' |
| 23 | 'format. job number will be appended to filename') |
| 24 | args = parser.parse_args() |
| 25 | |
| 26 | return args |
| 27 | |
| 28 | |
| 29 | # from stat.c |
| 30 | def plat_idx_to_val(idx, FIO_IO_U_PLAT_BITS=6, FIO_IO_U_PLAT_VAL=64): |
| 31 | # MSB <= (FIO_IO_U_PLAT_BITS-1), cannot be rounded off. Use |
| 32 | # all bits of the sample as index |
| 33 | if (idx < (FIO_IO_U_PLAT_VAL << 1)): |
| 34 | return idx |
| 35 | |
| 36 | # Find the group and compute the minimum value of that group |
| 37 | error_bits = (idx >> FIO_IO_U_PLAT_BITS) - 1 |
| 38 | base = 1 << (error_bits + FIO_IO_U_PLAT_BITS) |
| 39 | |
| 40 | # Find its bucket number of the group |
| 41 | k = idx % FIO_IO_U_PLAT_VAL |
| 42 | |
| 43 | # Return the mean of the range of the bucket |
| 44 | return (base + ((k + 0.5) * (1 << error_bits))) |
| 45 | |
| 46 | |
| 47 | def percentile(idx, run_total): |
| 48 | total = run_total[len(run_total)-1] |
| 49 | if total == 0: |
| 50 | return 0 |
| 51 | |
| 52 | return float(run_total[x]) / total |
| 53 | |
| 54 | |
| 55 | if __name__ == '__main__': |
| 56 | args = parse_args() |
| 57 | |
| 58 | with open(args.source, 'r') as source: |
| 59 | jsondata = json.loads(source.read()) |
| 60 | |
| 61 | bins = {} |
| 62 | bin_const = {} |
| 63 | run_total = {} |
| 64 | ddir_list = ['read', 'write', 'trim'] |
| 65 | const_list = ['FIO_IO_U_PLAT_NR', 'FIO_IO_U_PLAT_BITS', |
| 66 | 'FIO_IO_U_PLAT_VAL'] |
| 67 | |
| 68 | for jobnum in range(0,len(jsondata['jobs'])): |
| 69 | prev_ddir = None |
| 70 | for ddir in ddir_list: |
| 71 | bins[ddir] = jsondata['jobs'][jobnum][ddir]['clat']['bins'] |
| 72 | |
| 73 | bin_const[ddir] = {} |
| 74 | for const in const_list: |
| 75 | bin_const[ddir][const] = bins[ddir].pop(const) |
| 76 | if prev_ddir: |
| 77 | assert bin_const[ddir][const] == bin_const[prev_ddir][const] |
| 78 | prev_ddir = ddir |
| 79 | |
| 80 | run_total[ddir] = [0 for x in |
| 81 | range(bin_const[ddir]['FIO_IO_U_PLAT_NR'])] |
| 82 | run_total[ddir][0] = bins[ddir]['0'] |
| 83 | for x in range(1, bin_const[ddir]['FIO_IO_U_PLAT_NR']): |
| 84 | run_total[ddir][x] = run_total[ddir][x-1] + bins[ddir][str(x)] |
| 85 | |
| 86 | stub, ext = os.path.splitext(args.dest) |
| 87 | outfile = stub + '_job' + str(jobnum) + ext |
| 88 | |
| 89 | with open(outfile, 'w') as output: |
| 90 | output.write("clat (usec),") |
| 91 | for ddir in ddir_list: |
| 92 | output.write("{0},".format(ddir)) |
| 93 | output.write("\n") |
| 94 | |
| 95 | for x in range(bin_const['read']['FIO_IO_U_PLAT_NR']): |
| 96 | output.write("{0},".format(plat_idx_to_val(x, |
| 97 | bin_const['read']['FIO_IO_U_PLAT_BITS'], |
| 98 | bin_const['read']['FIO_IO_U_PLAT_VAL']))) |
| 99 | for ddir in ddir_list: |
| 100 | output.write("{0},".format(percentile(x, run_total[ddir]))) |
| 101 | output.write("\n") |