Merge branch 'master' of https://github.com/celestinechen/fio
[fio.git] / examples / dedupe-global.fio
CommitLineData
b6aa7b87
BD
1# Writing to 2 files that share the duplicate blocks.
2# The dedupe working set is spread uniformly such that when
3# each of the jobs choose to perform a dedup operation they will
4# regenerate a buffer from the global space.
5# If you test the dedup ratio on either file by itself the result
6# is likely lower than if you test the ratio of the two files combined.
7#
8# Use `./t/fio-dedupe <file> -C 1 -c 1 -b 4096` to test the total
9# data reduction ratio.
10#
11#
12# Full example of test:
13# $ ./fio ./examples/dedupe-global.fio
14#
15# Checking ratio on a and b individually:
16# $ ./t/fio-dedupe a.0.0 -C 1 -c 1 -b 4096
17#
18# $ Extents=25600, Unique extents=16817 Duplicated extents=5735
19# $ De-dupe ratio: 1:0.52
20# $ De-dupe working set at least: 22.40%
21# $ Fio setting: dedupe_percentage=34
22# $ Unique capacity 33MB
23#
24# ./t/fio-dedupe b.0.0 -C 1 -c 1 -b 4096
25# $ Extents=25600, Unique extents=17009 Duplicated extents=5636
26# $ De-dupe ratio: 1:0.51
27# $ De-dupe working set at least: 22.02%
28# $ Fio setting: dedupe_percentage=34
29# $ Unique capacity 34MB
30#
31# Combining files:
32# $ cat a.0.0 > c.0.0
33# $ cat b.0.0 >> c.0.0
34#
35# Checking data reduction ratio on combined file:
36# $ ./t/fio-dedupe c.0.0 -C 1 -c 1 -b 4096
37# $ Extents=51200, Unique extents=25747 Duplicated extents=11028
38# $ De-dupe ratio: 1:0.99
39# $ De-dupe working set at least: 21.54%
40# $ Fio setting: dedupe_percentage=50
41# $ Unique capacity 51MB
42#
43[global]
44ioengine=libaio
45iodepth=256
46size=100m
47dedupe_mode=working_set
48dedupe_global=1
49dedupe_percentage=50
50blocksize=4k
51rw=write
52buffer_compress_percentage=50
53dedupe_working_set_percentage=50
54
55[a]
56
57[b]