aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBart Van Assche <[email protected]>2019-02-14 15:00:40 -0800
committerIngo Molnar <[email protected]>2019-02-28 07:55:40 +0100
commit09329d1c2024522308ca4de977fc6bba753bab1a (patch)
tree25c46670faa60ed60830c24deb4ed9cd49494732
parent523b113bace5e64e860d8c61d7aa25057d274753 (diff)
locking/lockdep: Reorder struct lock_class members
This patch does not change any functionality but makes the patch that frees lock classes that are no longer in use easier to read. Signed-off-by: Bart Van Assche <[email protected]> Signed-off-by: Peter Zijlstra (Intel) <[email protected]> Cc: Andrew Morton <[email protected]> Cc: Johannes Berg <[email protected]> Cc: Linus Torvalds <[email protected]> Cc: Paul E. McKenney <[email protected]> Cc: Peter Zijlstra <[email protected]> Cc: Thomas Gleixner <[email protected]> Cc: Waiman Long <[email protected]> Cc: Will Deacon <[email protected]> Cc: [email protected] Cc: [email protected] Link: https://lkml.kernel.org/r/[email protected] Signed-off-by: Ingo Molnar <[email protected]>
-rw-r--r--include/linux/lockdep.h14
1 files changed, 7 insertions, 7 deletions
diff --git a/include/linux/lockdep.h b/include/linux/lockdep.h
index c5335df2372f..0c38bade84b7 100644
--- a/include/linux/lockdep.h
+++ b/include/linux/lockdep.h
@@ -76,6 +76,13 @@ struct lock_class {
*/
struct list_head lock_entry;
+ /*
+ * These fields represent a directed graph of lock dependencies,
+ * to every node we attach a list of "forward" and a list of
+ * "backward" graph nodes.
+ */
+ struct list_head locks_after, locks_before;
+
struct lockdep_subclass_key *key;
unsigned int subclass;
unsigned int dep_gen_id;
@@ -87,13 +94,6 @@ struct lock_class {
struct stack_trace usage_traces[XXX_LOCK_USAGE_STATES];
/*
- * These fields represent a directed graph of lock dependencies,
- * to every node we attach a list of "forward" and a list of
- * "backward" graph nodes.
- */
- struct list_head locks_after, locks_before;
-
- /*
* Generation counter, when doing certain classes of graph walking,
* to ensure that we check one node only once:
*/