|
@@ -638,6 +638,11 @@ static u64 div64_likely32(u64 divident, unsigned long divisor)
|
|
|
|
|
|
#define WMULT_SHIFT 32
|
|
|
|
|
|
+/*
|
|
|
+ * Shift right and round:
|
|
|
+ */
|
|
|
+#define RSR(x, y) (((x) + (1UL << ((y) - 1))) >> (y))
|
|
|
+
|
|
|
static unsigned long
|
|
|
calc_delta_mine(unsigned long delta_exec, unsigned long weight,
|
|
|
struct load_weight *lw)
|
|
@@ -645,18 +650,17 @@ calc_delta_mine(unsigned long delta_exec, unsigned long weight,
|
|
|
u64 tmp;
|
|
|
|
|
|
if (unlikely(!lw->inv_weight))
|
|
|
- lw->inv_weight = WMULT_CONST / lw->weight;
|
|
|
+ lw->inv_weight = (WMULT_CONST - lw->weight/2) / lw->weight + 1;
|
|
|
|
|
|
tmp = (u64)delta_exec * weight;
|
|
|
/*
|
|
|
* Check whether we'd overflow the 64-bit multiplication:
|
|
|
*/
|
|
|
- if (unlikely(tmp > WMULT_CONST)) {
|
|
|
- tmp = ((tmp >> WMULT_SHIFT/2) * lw->inv_weight)
|
|
|
- >> (WMULT_SHIFT/2);
|
|
|
- } else {
|
|
|
- tmp = (tmp * lw->inv_weight) >> WMULT_SHIFT;
|
|
|
- }
|
|
|
+ if (unlikely(tmp > WMULT_CONST))
|
|
|
+ tmp = RSR(RSR(tmp, WMULT_SHIFT/2) * lw->inv_weight,
|
|
|
+ WMULT_SHIFT/2);
|
|
|
+ else
|
|
|
+ tmp = RSR(tmp * lw->inv_weight, WMULT_SHIFT);
|
|
|
|
|
|
return (unsigned long)min(tmp, (u64)(unsigned long)LONG_MAX);
|
|
|
}
|