|
@@ -18,39 +18,31 @@
|
|
|
#define MSR_ARCH_PERFMON_EVENTSEL0 0x186
|
|
|
#define MSR_ARCH_PERFMON_EVENTSEL1 0x187
|
|
|
|
|
|
-#define ARCH_PERFMON_EVENTSEL_ENABLE (1 << 22)
|
|
|
-#define ARCH_PERFMON_EVENTSEL_ANY (1 << 21)
|
|
|
-#define ARCH_PERFMON_EVENTSEL_INT (1 << 20)
|
|
|
-#define ARCH_PERFMON_EVENTSEL_OS (1 << 17)
|
|
|
-#define ARCH_PERFMON_EVENTSEL_USR (1 << 16)
|
|
|
-
|
|
|
-/*
|
|
|
- * Includes eventsel and unit mask as well:
|
|
|
- */
|
|
|
-
|
|
|
-
|
|
|
-#define INTEL_ARCH_EVTSEL_MASK 0x000000FFULL
|
|
|
-#define INTEL_ARCH_UNIT_MASK 0x0000FF00ULL
|
|
|
-#define INTEL_ARCH_EDGE_MASK 0x00040000ULL
|
|
|
-#define INTEL_ARCH_INV_MASK 0x00800000ULL
|
|
|
-#define INTEL_ARCH_CNT_MASK 0xFF000000ULL
|
|
|
-#define INTEL_ARCH_EVENT_MASK (INTEL_ARCH_UNIT_MASK|INTEL_ARCH_EVTSEL_MASK)
|
|
|
-
|
|
|
-/*
|
|
|
- * filter mask to validate fixed counter events.
|
|
|
- * the following filters disqualify for fixed counters:
|
|
|
- * - inv
|
|
|
- * - edge
|
|
|
- * - cnt-mask
|
|
|
- * The other filters are supported by fixed counters.
|
|
|
- * The any-thread option is supported starting with v3.
|
|
|
- */
|
|
|
-#define INTEL_ARCH_FIXED_MASK \
|
|
|
- (INTEL_ARCH_CNT_MASK| \
|
|
|
- INTEL_ARCH_INV_MASK| \
|
|
|
- INTEL_ARCH_EDGE_MASK|\
|
|
|
- INTEL_ARCH_UNIT_MASK|\
|
|
|
- INTEL_ARCH_EVENT_MASK)
|
|
|
+#define ARCH_PERFMON_EVENTSEL_EVENT 0x000000FFULL
|
|
|
+#define ARCH_PERFMON_EVENTSEL_UMASK 0x0000FF00ULL
|
|
|
+#define ARCH_PERFMON_EVENTSEL_USR (1ULL << 16)
|
|
|
+#define ARCH_PERFMON_EVENTSEL_OS (1ULL << 17)
|
|
|
+#define ARCH_PERFMON_EVENTSEL_EDGE (1ULL << 18)
|
|
|
+#define ARCH_PERFMON_EVENTSEL_INT (1ULL << 20)
|
|
|
+#define ARCH_PERFMON_EVENTSEL_ANY (1ULL << 21)
|
|
|
+#define ARCH_PERFMON_EVENTSEL_ENABLE (1ULL << 22)
|
|
|
+#define ARCH_PERFMON_EVENTSEL_INV (1ULL << 23)
|
|
|
+#define ARCH_PERFMON_EVENTSEL_CMASK 0xFF000000ULL
|
|
|
+
|
|
|
+#define AMD64_EVENTSEL_EVENT \
|
|
|
+ (ARCH_PERFMON_EVENTSEL_EVENT | (0x0FULL << 32))
|
|
|
+#define INTEL_ARCH_EVENT_MASK \
|
|
|
+ (ARCH_PERFMON_EVENTSEL_UMASK | ARCH_PERFMON_EVENTSEL_EVENT)
|
|
|
+
|
|
|
+#define X86_RAW_EVENT_MASK \
|
|
|
+ (ARCH_PERFMON_EVENTSEL_EVENT | \
|
|
|
+ ARCH_PERFMON_EVENTSEL_UMASK | \
|
|
|
+ ARCH_PERFMON_EVENTSEL_EDGE | \
|
|
|
+ ARCH_PERFMON_EVENTSEL_INV | \
|
|
|
+ ARCH_PERFMON_EVENTSEL_CMASK)
|
|
|
+#define AMD64_RAW_EVENT_MASK \
|
|
|
+ (X86_RAW_EVENT_MASK | \
|
|
|
+ AMD64_EVENTSEL_EVENT)
|
|
|
|
|
|
#define ARCH_PERFMON_UNHALTED_CORE_CYCLES_SEL 0x3c
|
|
|
#define ARCH_PERFMON_UNHALTED_CORE_CYCLES_UMASK (0x00 << 8)
|