import subprocess
import multiprocessing
from pathlib import Path
+from statsmodels.sandbox.stats.runs import runstest_1samp
class FioTest():
return file_data, success
+ def get_file_fail(self, filename):
+ """Safely read a file and fail the test upon error."""
+ file_data = None
+
+ try:
+ with open(filename, "r") as output_file:
+ file_data = output_file.read()
+ except OSError:
+ self.failure_reason += " unable to read file {0}".format(filename)
+ self.passed = False
+
+ return file_data
+
def check_result(self):
"""Check fio job results."""
if 'json' not in self.output_format:
return
- file_data, success = self.get_file(os.path.join(self.test_dir, self.fio_output))
- if not success:
- self.failure_reason = "{0} unable to open output file,".format(self.failure_reason)
- self.passed = False
+ file_data = self.get_file_fail(os.path.join(self.test_dir, self.fio_output))
+ if not file_data:
return
#
class FioJobTest_t0008(FioJobTest):
"""Test consists of fio test job t0008
Confirm that read['io_kbytes'] = 32768 and that
- write['io_kbytes'] ~ 16568
+ write['io_kbytes'] ~ 16384
- I did runs with fio-ae2fafc8 and saw write['io_kbytes'] values of
- 16585, 16588. With two runs of fio-3.16 I obtained 16568"""
+ This is a 50/50 seq read/write workload. Since fio flips a coin to
+ determine whether to issue a read or a write, total bytes written will not
+ be exactly 16384K. But total bytes read will be exactly 32768K because
+ reads will include the initial phase as well as the verify phase where all
+ the blocks originally written will be read."""
def check_result(self):
super(FioJobTest_t0008, self).check_result()
if not self.passed:
return
- ratio = self.json_data['jobs'][0]['write']['io_kbytes'] / 16568
+ ratio = self.json_data['jobs'][0]['write']['io_kbytes'] / 16384
logging.debug("Test %d: ratio: %f", self.testnum, ratio)
- if ratio < 0.99 or ratio > 1.01:
+ if ratio < 0.97 or ratio > 1.03:
self.failure_reason = "{0} bytes written mismatch,".format(self.failure_reason)
self.passed = False
if self.json_data['jobs'][0]['read']['io_kbytes'] != 32768:
for i in range(1, 4):
filename = os.path.join(self.test_dir, "{0}_iops.{1}.log".format(os.path.basename(
self.fio_job), i))
- file_data, success = self.get_file(filename)
-
- if not success:
- self.failure_reason = "{0} unable to open output file,".format(self.failure_reason)
- self.passed = False
+ file_data = self.get_file_fail(filename)
+ if not file_data:
return
iops_files.append(file_data.splitlines())
for i in range(1, 4):
filename = os.path.join(self.test_dir, "{0}_iops.{1}.log".format(os.path.basename(
self.fio_job), i))
- file_data, success = self.get_file(filename)
-
- if not success:
- self.failure_reason = "{0} unable to open output file,".format(self.failure_reason)
- self.passed = False
+ file_data = self.get_file_fail(filename)
+ if not file_data:
return
iops_files.append(file_data.splitlines())
super(FioJobTest_t0019, self).check_result()
bw_log_filename = os.path.join(self.test_dir, "test_bw.log")
- file_data, success = self.get_file(bw_log_filename)
- if not success:
- self.failure_reason += " unable to open output file {0}".format(bw_log_filename)
- self.passed = False
+ file_data = self.get_file_fail(bw_log_filename)
+ if not file_data:
return
log_lines = file_data.split('\n')
super(FioJobTest_t0020, self).check_result()
bw_log_filename = os.path.join(self.test_dir, "test_bw.log")
- file_data, success = self.get_file(bw_log_filename)
- if not success:
- self.failure_reason += " unable to open output file {0}".format(bw_log_filename)
- self.passed = False
+ file_data = self.get_file_fail(bw_log_filename)
+ if not file_data:
return
log_lines = file_data.split('\n')
- seq_count = 0
- offsets = set()
+ offsets = []
prev = int(log_lines[0].split(',')[4])
for line in log_lines[1:]:
- offsets.add(prev/4096)
+ offsets.append(prev/4096)
if len(line.strip()) == 0:
continue
cur = int(line.split(',')[4])
- if cur - prev == 4096:
- seq_count += 1
prev = cur
- # 10 is an arbitrary threshold
- if seq_count > 10:
- self.passed = False
- self.failure_reason = "too many ({0}) consecutive offsets".format(seq_count)
-
if len(offsets) != 256:
self.passed = False
self.failure_reason += " number of offsets is {0} instead of 256".format(len(offsets))
self.passed = False
self.failure_reason += " missing offset {0}".format(i*4096)
+ (z, p) = runstest_1samp(list(offsets))
+ if p < 0.05:
+ self.passed = False
+ self.failure_reason += f" runs test failed with p = {p}"
+
class FioJobTest_t0022(FioJobTest):
"""Test consists of fio test job t0022"""
super(FioJobTest_t0022, self).check_result()
bw_log_filename = os.path.join(self.test_dir, "test_bw.log")
- file_data, success = self.get_file(bw_log_filename)
- if not success:
- self.failure_reason += " unable to open output file {0}".format(bw_log_filename)
- self.passed = False
+ file_data = self.get_file_fail(bw_log_filename)
+ if not file_data:
return
log_lines = file_data.split('\n')
"""Make sure that trims are followed by writes of the same size at the same offset."""
bw_log_filename = os.path.join(self.test_dir, filename)
- file_data, success = self.get_file(bw_log_filename)
- if not success:
- self.failure_reason += " unable to open output file {0}".format(bw_log_filename)
- self.passed = False
+ file_data = self.get_file_fail(bw_log_filename)
+ if not file_data:
return
log_lines = file_data.split('\n')
def check_all_offsets(self, filename, sectorsize, filesize):
"""Make sure all offsets were touched."""
- file_data, success = self.get_file(os.path.join(self.test_dir, filename))
- if not success:
- self.passed = False
- self.failure_reason = " could not open {0}".format(filename)
+ file_data = self.get_file_fail(os.path.join(self.test_dir, filename))
+ if not file_data:
return
log_lines = file_data.split('\n')
self.check_all_offsets("bssplit_bw.log", 512, filesize)
+class FioJobTest_t0025(FioJobTest):
+ """Test experimental verify read backs written data pattern."""
+ def check_result(self):
+ super(FioJobTest_t0025, self).check_result()
+
+ if not self.passed:
+ return
+
+ if self.json_data['jobs'][0]['read']['io_kbytes'] != 128:
+ self.passed = False
+
+class FioJobTest_t0027(FioJobTest):
+ def setup(self, *args, **kws):
+ super(FioJobTest_t0027, self).setup(*args, **kws)
+ self.pattern_file = os.path.join(self.test_dir, "t0027.pattern")
+ self.output_file = os.path.join(self.test_dir, "t0027file")
+ self.pattern = os.urandom(16 << 10)
+ with open(self.pattern_file, "wb") as f:
+ f.write(self.pattern)
+
+ def check_result(self):
+ super(FioJobTest_t0027, self).check_result()
+
+ if not self.passed:
+ return
+
+ with open(self.output_file, "rb") as f:
+ data = f.read()
+
+ if data != self.pattern:
+ self.passed = False
+
class FioJobTest_iops_rate(FioJobTest):
- """Test consists of fio test job t0009
+ """Test consists of fio test job t0011
Confirm that job0 iops == 1000
and that job1_iops / job0_iops ~ 8
With two runs of fio-3.16 I observed a ratio of 8.3"""
'pre_success': None,
'requirements': [],
},
+ {
+ 'test_id': 25,
+ 'test_class': FioJobTest_t0025,
+ 'job': 't0025.fio',
+ 'success': SUCCESS_DEFAULT,
+ 'pre_job': None,
+ 'pre_success': None,
+ 'output_format': 'json',
+ 'requirements': [],
+ },
+ {
+ 'test_id': 26,
+ 'test_class': FioJobTest,
+ 'job': 't0026.fio',
+ 'success': SUCCESS_DEFAULT,
+ 'pre_job': None,
+ 'pre_success': None,
+ 'requirements': [Requirements.not_windows],
+ },
+ {
+ 'test_id': 27,
+ 'test_class': FioJobTest_t0027,
+ 'job': 't0027.fio',
+ 'success': SUCCESS_DEFAULT,
+ 'pre_job': None,
+ 'pre_success': None,
+ 'requirements': [],
+ },
+ {
+ 'test_id': 28,
+ 'test_class': FioJobTest,
+ 'job': 't0028-c6cade16.fio',
+ 'success': SUCCESS_DEFAULT,
+ 'pre_job': None,
+ 'pre_success': None,
+ 'requirements': [],
+ },
{
'test_id': 1000,
'test_class': FioExeTest,
'success': SUCCESS_DEFAULT,
'requirements': [],
},
+ {
+ 'test_id': 1013,
+ 'test_class': FioExeTest,
+ 'exe': 't/random_seed.py',
+ 'parameters': ['-f', '{fio_path}'],
+ 'success': SUCCESS_DEFAULT,
+ 'requirements': [],
+ },
]