|
@@ -312,6 +312,50 @@ EXPORT_SYMBOL_GPL(__rcu_read_unlock);
|
|
|
|
|
|
#ifdef CONFIG_RCU_CPU_STALL_DETECTOR
|
|
|
|
|
|
+#ifdef CONFIG_RCU_CPU_STALL_VERBOSE
|
|
|
+
|
|
|
+/*
|
|
|
+ * Dump detailed information for all tasks blocking the current RCU
|
|
|
+ * grace period on the specified rcu_node structure.
|
|
|
+ */
|
|
|
+static void rcu_print_detail_task_stall_rnp(struct rcu_node *rnp)
|
|
|
+{
|
|
|
+ unsigned long flags;
|
|
|
+ struct list_head *lp;
|
|
|
+ int phase;
|
|
|
+ struct task_struct *t;
|
|
|
+
|
|
|
+ if (rcu_preempted_readers(rnp)) {
|
|
|
+ raw_spin_lock_irqsave(&rnp->lock, flags);
|
|
|
+ phase = rnp->gpnum & 0x1;
|
|
|
+ lp = &rnp->blocked_tasks[phase];
|
|
|
+ list_for_each_entry(t, lp, rcu_node_entry)
|
|
|
+ sched_show_task(t);
|
|
|
+ raw_spin_unlock_irqrestore(&rnp->lock, flags);
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+/*
|
|
|
+ * Dump detailed information for all tasks blocking the current RCU
|
|
|
+ * grace period.
|
|
|
+ */
|
|
|
+static void rcu_print_detail_task_stall(struct rcu_state *rsp)
|
|
|
+{
|
|
|
+ struct rcu_node *rnp = rcu_get_root(rsp);
|
|
|
+
|
|
|
+ rcu_print_detail_task_stall_rnp(rnp);
|
|
|
+ rcu_for_each_leaf_node(rsp, rnp)
|
|
|
+ rcu_print_detail_task_stall_rnp(rnp);
|
|
|
+}
|
|
|
+
|
|
|
+#else /* #ifdef CONFIG_RCU_CPU_STALL_VERBOSE */
|
|
|
+
|
|
|
+static void rcu_print_detail_task_stall(struct rcu_state *rsp)
|
|
|
+{
|
|
|
+}
|
|
|
+
|
|
|
+#endif /* #else #ifdef CONFIG_RCU_CPU_STALL_VERBOSE */
|
|
|
+
|
|
|
/*
|
|
|
* Scan the current list of tasks blocked within RCU read-side critical
|
|
|
* sections, printing out the tid of each.
|
|
@@ -760,6 +804,14 @@ static void rcu_report_unblock_qs_rnp(struct rcu_node *rnp, unsigned long flags)
|
|
|
|
|
|
#ifdef CONFIG_RCU_CPU_STALL_DETECTOR
|
|
|
|
|
|
+/*
|
|
|
+ * Because preemptable RCU does not exist, we never have to check for
|
|
|
+ * tasks blocked within RCU read-side critical sections.
|
|
|
+ */
|
|
|
+static void rcu_print_detail_task_stall(struct rcu_state *rsp)
|
|
|
+{
|
|
|
+}
|
|
|
+
|
|
|
/*
|
|
|
* Because preemptable RCU does not exist, we never have to check for
|
|
|
* tasks blocked within RCU read-side critical sections.
|