We need to use a 64-bit cast for the shift the the user_data, and
fix the init of minv in the clat percentile calculation.
Signed-off-by: Jens Axboe <axboe@kernel.dk>
unsigned long *ovals = NULL;
bool is_last;
- *minv = -1ULL;
+ *minv = -1UL;
*maxv = 0;
ovals = malloc(len * sizeof(*ovals));
sqe->off = offset;
sqe->user_data = (unsigned long) f->fileno;
if (stats && stats_running)
- sqe->user_data |= ((unsigned long)s->clock_index << 32);
+ sqe->user_data |= ((uint64_t)s->clock_index << 32);
}
static int prep_more_ios_uring(struct submitter *s, int max_ios)