mm/damon: use damon_sz_region() in appropriate place
authorXin Hao <xhao@linux.alibaba.com>
Tue, 27 Sep 2022 00:19:46 +0000 (08:19 +0800)
committerAndrew Morton <akpm@linux-foundation.org>
Thu, 13 Oct 2022 01:51:49 +0000 (18:51 -0700)
In many places we can use damon_sz_region() to instead of "r->ar.end -
r->ar.start".

Link: https://lkml.kernel.org/r/20220927001946.85375-2-xhao@linux.alibaba.com
Signed-off-by: Xin Hao <xhao@linux.alibaba.com>
Suggested-by: SeongJae Park <sj@kernel.org>
Reviewed-by: SeongJae Park <sj@kernel.org>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
mm/damon/core.c
mm/damon/vaddr.c

index 5b9e0d585aef2066242a3d13a4312b24125baa05..515ac4e52a113f1053913d7e4fb2fe2974e0e879 100644 (file)
@@ -490,7 +490,7 @@ static unsigned long damon_region_sz_limit(struct damon_ctx *ctx)
 
        damon_for_each_target(t, ctx) {
                damon_for_each_region(r, t)
-                       sz += r->ar.end - r->ar.start;
+                       sz += damon_sz_region(r);
        }
 
        if (ctx->attrs.min_nr_regions)
@@ -673,7 +673,7 @@ static bool __damos_valid_target(struct damon_region *r, struct damos *s)
 {
        unsigned long sz;
 
-       sz = r->ar.end - r->ar.start;
+       sz = damon_sz_region(r);
        return s->pattern.min_sz_region <= sz &&
                sz <= s->pattern.max_sz_region &&
                s->pattern.min_nr_accesses <= r->nr_accesses &&
@@ -701,7 +701,7 @@ static void damon_do_apply_schemes(struct damon_ctx *c,
 
        damon_for_each_scheme(s, c) {
                struct damos_quota *quota = &s->quota;
-               unsigned long sz = r->ar.end - r->ar.start;
+               unsigned long sz = damon_sz_region(r);
                struct timespec64 begin, end;
                unsigned long sz_applied = 0;
 
@@ -730,14 +730,14 @@ static void damon_do_apply_schemes(struct damon_ctx *c,
                                sz = ALIGN_DOWN(quota->charge_addr_from -
                                                r->ar.start, DAMON_MIN_REGION);
                                if (!sz) {
-                                       if (r->ar.end - r->ar.start <=
-                                                       DAMON_MIN_REGION)
+                                       if (damon_sz_region(r) <=
+                                           DAMON_MIN_REGION)
                                                continue;
                                        sz = DAMON_MIN_REGION;
                                }
                                damon_split_region_at(t, r, sz);
                                r = damon_next_region(r);
-                               sz = r->ar.end - r->ar.start;
+                               sz = damon_sz_region(r);
                        }
                        quota->charge_target_from = NULL;
                        quota->charge_addr_from = 0;
@@ -842,8 +842,7 @@ static void kdamond_apply_schemes(struct damon_ctx *c)
                                        continue;
                                score = c->ops.get_scheme_score(
                                                c, t, r, s);
-                               quota->histogram[score] +=
-                                       r->ar.end - r->ar.start;
+                               quota->histogram[score] += damon_sz_region(r);
                                if (score > max_score)
                                        max_score = score;
                        }
@@ -957,7 +956,7 @@ static void damon_split_regions_of(struct damon_target *t, int nr_subs)
        int i;
 
        damon_for_each_region_safe(r, next, t) {
-               sz_region = r->ar.end - r->ar.start;
+               sz_region = damon_sz_region(r);
 
                for (i = 0; i < nr_subs - 1 &&
                                sz_region > 2 * DAMON_MIN_REGION; i++) {
index ea94e0b2c31132896b249a1b8ef281b4702ec7d4..15f03df66db60e5db517bc73413a72e834f53563 100644 (file)
@@ -72,7 +72,7 @@ static int damon_va_evenly_split_region(struct damon_target *t,
                return -EINVAL;
 
        orig_end = r->ar.end;
-       sz_orig = r->ar.end - r->ar.start;
+       sz_orig = damon_sz_region(r);
        sz_piece = ALIGN_DOWN(sz_orig / nr_pieces, DAMON_MIN_REGION);
 
        if (!sz_piece)
@@ -618,7 +618,7 @@ static unsigned long damos_madvise(struct damon_target *target,
 {
        struct mm_struct *mm;
        unsigned long start = PAGE_ALIGN(r->ar.start);
-       unsigned long len = PAGE_ALIGN(r->ar.end - r->ar.start);
+       unsigned long len = PAGE_ALIGN(damon_sz_region(r));
        unsigned long applied;
 
        mm = damon_get_mm(target);