aboutsummaryrefslogtreecommitdiff
path: root/include/linux
diff options
context:
space:
mode:
authorKinsey Ho <[email protected]>2024-09-05 00:30:53 +0000
committerAndrew Morton <[email protected]>2024-09-09 16:39:16 -0700
commitec0db74b4b1f249ffca4df450f54c17573114045 (patch)
tree70c06f61013d633a5de5a2b532c4e7f34597b270 /include/linux
parent3d150e31a1f62f0b39bdd8b968f563f8f3915d7a (diff)
mm: restart if multiple traversals raced
Currently, if multiple reclaimers raced on the same position, the reclaimers which detect the race will still reclaim from the same memcg. Instead, the reclaimers which detect the race should move on to the next memcg in the hierarchy. So, in the case where multiple traversals race, jump back to the start of the mem_cgroup_iter() function to find the next memcg in the hierarchy to reclaim from. Link: https://lkml.kernel.org/r/[email protected] Reported-by: [email protected] Closes: https://lore.kernel.org/[email protected]/ Signed-off-by: Kinsey Ho <[email protected]> Reviewed-by: T.J. Mercier <[email protected]> Cc: Johannes Weiner <[email protected]> Cc: Michal Hocko <[email protected]> Cc: Michal Koutný <[email protected]> Cc: Muchun Song <[email protected]> Cc: Roman Gushchin <[email protected]> Cc: Shakeel Butt <[email protected]> Cc: Tejun Heo <[email protected]> Cc: Yosry Ahmed <[email protected]> Cc: Zefan Li <[email protected]> Cc: Hugh Dickins <[email protected]> Signed-off-by: Andrew Morton <[email protected]>
Diffstat (limited to 'include/linux')
-rw-r--r--include/linux/memcontrol.h4
1 files changed, 2 insertions, 2 deletions
diff --git a/include/linux/memcontrol.h b/include/linux/memcontrol.h
index fe05fdb92779..2ef94c74847d 100644
--- a/include/linux/memcontrol.h
+++ b/include/linux/memcontrol.h
@@ -57,7 +57,7 @@ enum memcg_memory_event {
struct mem_cgroup_reclaim_cookie {
pg_data_t *pgdat;
- unsigned int generation;
+ int generation;
};
#ifdef CONFIG_MEMCG
@@ -78,7 +78,7 @@ struct lruvec_stats;
struct mem_cgroup_reclaim_iter {
struct mem_cgroup *position;
/* scan generation, increased every round-trip */
- unsigned int generation;
+ atomic_t generation;
};
/*