|
@@ -7290,7 +7290,7 @@ static inline bool cfs_rq_has_blocked(struct cfs_rq *cfs_rq)
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
-static inline bool others_rqs_have_blocked(struct rq *rq)
|
|
|
+static inline bool others_have_blocked(struct rq *rq)
|
|
|
{
|
|
|
if (READ_ONCE(rq->avg_rt.util_avg))
|
|
|
return true;
|
|
@@ -7298,6 +7298,11 @@ static inline bool others_rqs_have_blocked(struct rq *rq)
|
|
|
if (READ_ONCE(rq->avg_dl.util_avg))
|
|
|
return true;
|
|
|
|
|
|
+#if defined(CONFIG_IRQ_TIME_ACCOUNTING) || defined(CONFIG_PARAVIRT_TIME_ACCOUNTING)
|
|
|
+ if (READ_ONCE(rq->avg_irq.util_avg))
|
|
|
+ return true;
|
|
|
+#endif
|
|
|
+
|
|
|
return false;
|
|
|
}
|
|
|
|
|
@@ -7362,8 +7367,9 @@ static void update_blocked_averages(int cpu)
|
|
|
}
|
|
|
update_rt_rq_load_avg(rq_clock_task(rq), rq, 0);
|
|
|
update_dl_rq_load_avg(rq_clock_task(rq), rq, 0);
|
|
|
+ update_irq_load_avg(rq, 0);
|
|
|
/* Don't need periodic decay once load/util_avg are null */
|
|
|
- if (others_rqs_have_blocked(rq))
|
|
|
+ if (others_have_blocked(rq))
|
|
|
done = false;
|
|
|
|
|
|
#ifdef CONFIG_NO_HZ_COMMON
|
|
@@ -7432,9 +7438,10 @@ static inline void update_blocked_averages(int cpu)
|
|
|
update_cfs_rq_load_avg(cfs_rq_clock_task(cfs_rq), cfs_rq);
|
|
|
update_rt_rq_load_avg(rq_clock_task(rq), rq, 0);
|
|
|
update_dl_rq_load_avg(rq_clock_task(rq), rq, 0);
|
|
|
+ update_irq_load_avg(rq, 0);
|
|
|
#ifdef CONFIG_NO_HZ_COMMON
|
|
|
rq->last_blocked_load_update_tick = jiffies;
|
|
|
- if (!cfs_rq_has_blocked(cfs_rq) && !others_rqs_have_blocked(rq))
|
|
|
+ if (!cfs_rq_has_blocked(cfs_rq) && !others_have_blocked(rq))
|
|
|
rq->has_blocked_load = 0;
|
|
|
#endif
|
|
|
rq_unlock_irqrestore(rq, &rf);
|