|
@@ -75,6 +75,7 @@ static unsigned int freeze_events_kernel = MMCR0_FCS;
|
|
|
|
|
|
#define MMCR0_FCHV 0
|
|
|
#define MMCR0_PMCjCE MMCR0_PMCnCE
|
|
|
+#define MMCR0_PMAO 0
|
|
|
|
|
|
#define SPRN_MMCRA SPRN_MMCR2
|
|
|
#define MMCRA_SAMPLE_ENABLE 0
|
|
@@ -852,7 +853,7 @@ static void write_mmcr0(struct cpu_hw_events *cpuhw, unsigned long mmcr0)
|
|
|
static void power_pmu_disable(struct pmu *pmu)
|
|
|
{
|
|
|
struct cpu_hw_events *cpuhw;
|
|
|
- unsigned long flags;
|
|
|
+ unsigned long flags, val;
|
|
|
|
|
|
if (!ppmu)
|
|
|
return;
|
|
@@ -860,9 +861,6 @@ static void power_pmu_disable(struct pmu *pmu)
|
|
|
cpuhw = &__get_cpu_var(cpu_hw_events);
|
|
|
|
|
|
if (!cpuhw->disabled) {
|
|
|
- cpuhw->disabled = 1;
|
|
|
- cpuhw->n_added = 0;
|
|
|
-
|
|
|
/*
|
|
|
* Check if we ever enabled the PMU on this cpu.
|
|
|
*/
|
|
@@ -871,6 +869,21 @@ static void power_pmu_disable(struct pmu *pmu)
|
|
|
cpuhw->pmcs_enabled = 1;
|
|
|
}
|
|
|
|
|
|
+ /*
|
|
|
+ * Set the 'freeze counters' bit, clear PMAO.
|
|
|
+ */
|
|
|
+ val = mfspr(SPRN_MMCR0);
|
|
|
+ val |= MMCR0_FC;
|
|
|
+ val &= ~MMCR0_PMAO;
|
|
|
+
|
|
|
+ /*
|
|
|
+ * The barrier is to make sure the mtspr has been
|
|
|
+ * executed and the PMU has frozen the events etc.
|
|
|
+ * before we return.
|
|
|
+ */
|
|
|
+ write_mmcr0(cpuhw, val);
|
|
|
+ mb();
|
|
|
+
|
|
|
/*
|
|
|
* Disable instruction sampling if it was enabled
|
|
|
*/
|
|
@@ -880,14 +893,8 @@ static void power_pmu_disable(struct pmu *pmu)
|
|
|
mb();
|
|
|
}
|
|
|
|
|
|
- /*
|
|
|
- * Set the 'freeze counters' bit.
|
|
|
- * The barrier is to make sure the mtspr has been
|
|
|
- * executed and the PMU has frozen the events
|
|
|
- * before we return.
|
|
|
- */
|
|
|
- write_mmcr0(cpuhw, mfspr(SPRN_MMCR0) | MMCR0_FC);
|
|
|
- mb();
|
|
|
+ cpuhw->disabled = 1;
|
|
|
+ cpuhw->n_added = 0;
|
|
|
}
|
|
|
local_irq_restore(flags);
|
|
|
}
|