Merge branch 'for-next' of git://git.kernel.org/pub/scm/linux/kernel/git/j.anaszewski...
[linux-2.6-block.git] / arch / arm64 / kernel / perf_event.c
index 5dcdbffa28e7e47540e90727927fb9c44488c2b9..838ccf123307df40b97f776ba82fa1b7e8182b25 100644 (file)
@@ -21,6 +21,7 @@
 
 #include <asm/irq_regs.h>
 #include <asm/perf_event.h>
+#include <asm/sysreg.h>
 #include <asm/virt.h>
 
 #include <linux/of.h>
 #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR                      0x41
 #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD               0x42
 #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_WR               0x43
+#define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_INNER            0x44
+#define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_OUTER            0x45
+#define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WB_VICTIM               0x46
+#define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WB_CLEAN                        0x47
+#define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_INVAL                   0x48
+
 #define ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD                 0x4C
 #define ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR                 0x4D
 #define ARMV8_IMPDEF_PERFCTR_L1D_TLB_RD                                0x4E
 #define ARMV8_IMPDEF_PERFCTR_L1D_TLB_WR                                0x4F
+#define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_RD                      0x50
+#define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_WR                      0x51
+#define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_REFILL_RD               0x52
+#define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_REFILL_WR               0x53
+
+#define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_WB_VICTIM               0x56
+#define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_WB_CLEAN                        0x57
+#define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_INVAL                   0x58
+
+#define ARMV8_IMPDEF_PERFCTR_L2D_TLB_REFILL_RD                 0x5C
+#define ARMV8_IMPDEF_PERFCTR_L2D_TLB_REFILL_WR                 0x5D
+#define ARMV8_IMPDEF_PERFCTR_L2D_TLB_RD                                0x5E
+#define ARMV8_IMPDEF_PERFCTR_L2D_TLB_WR                                0x5F
+
+#define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_RD                     0x60
+#define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_WR                     0x61
+#define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_SHARED                 0x62
+#define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_NOT_SHARED             0x63
+#define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_NORMAL                 0x64
+#define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_PERIPH                 0x65
+
+#define ARMV8_IMPDEF_PERFCTR_MEM_ACCESS_RD                     0x66
+#define ARMV8_IMPDEF_PERFCTR_MEM_ACCESS_WR                     0x67
+#define ARMV8_IMPDEF_PERFCTR_UNALIGNED_LD_SPEC                 0x68
+#define ARMV8_IMPDEF_PERFCTR_UNALIGNED_ST_SPEC                 0x69
+#define ARMV8_IMPDEF_PERFCTR_UNALIGNED_LDST_SPEC               0x6A
+
+#define ARMV8_IMPDEF_PERFCTR_LDREX_SPEC                                0x6C
+#define ARMV8_IMPDEF_PERFCTR_STREX_PASS_SPEC                   0x6D
+#define ARMV8_IMPDEF_PERFCTR_STREX_FAIL_SPEC                   0x6E
+#define ARMV8_IMPDEF_PERFCTR_STREX_SPEC                                0x6F
+#define ARMV8_IMPDEF_PERFCTR_LD_SPEC                           0x70
+#define ARMV8_IMPDEF_PERFCTR_ST_SPEC                           0x71
+#define ARMV8_IMPDEF_PERFCTR_LDST_SPEC                         0x72
+#define ARMV8_IMPDEF_PERFCTR_DP_SPEC                           0x73
+#define ARMV8_IMPDEF_PERFCTR_ASE_SPEC                          0x74
+#define ARMV8_IMPDEF_PERFCTR_VFP_SPEC                          0x75
+#define ARMV8_IMPDEF_PERFCTR_PC_WRITE_SPEC                     0x76
+#define ARMV8_IMPDEF_PERFCTR_CRYPTO_SPEC                       0x77
+#define ARMV8_IMPDEF_PERFCTR_BR_IMMED_SPEC                     0x78
+#define ARMV8_IMPDEF_PERFCTR_BR_RETURN_SPEC                    0x79
+#define ARMV8_IMPDEF_PERFCTR_BR_INDIRECT_SPEC                  0x7A
+
+#define ARMV8_IMPDEF_PERFCTR_ISB_SPEC                          0x7C
+#define ARMV8_IMPDEF_PERFCTR_DSB_SPEC                          0x7D
+#define ARMV8_IMPDEF_PERFCTR_DMB_SPEC                          0x7E
+
+#define ARMV8_IMPDEF_PERFCTR_EXC_UNDEF                         0x81
+#define ARMV8_IMPDEF_PERFCTR_EXC_SVC                           0x82
+#define ARMV8_IMPDEF_PERFCTR_EXC_PABORT                                0x83
+#define ARMV8_IMPDEF_PERFCTR_EXC_DABORT                                0x84
+
+#define ARMV8_IMPDEF_PERFCTR_EXC_IRQ                           0x86
+#define ARMV8_IMPDEF_PERFCTR_EXC_FIQ                           0x87
+#define ARMV8_IMPDEF_PERFCTR_EXC_SMC                           0x88
+
+#define ARMV8_IMPDEF_PERFCTR_EXC_HVC                           0x8A
+#define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_PABORT                   0x8B
+#define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_DABORT                   0x8C
+#define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_OTHER                    0x8D
+#define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_IRQ                      0x8E
+#define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_FIQ                      0x8F
+#define ARMV8_IMPDEF_PERFCTR_RC_LD_SPEC                                0x90
+#define ARMV8_IMPDEF_PERFCTR_RC_ST_SPEC                                0x91
+
+#define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_RD                      0xA0
+#define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_WR                      0xA1
+#define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_REFILL_RD               0xA2
+#define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_REFILL_WR               0xA3
+
+#define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_WB_VICTIM               0xA6
+#define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_WB_CLEAN                        0xA7
+#define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_INVAL                   0xA8
 
 /* ARMv8 Cortex-A53 specific event types. */
 #define ARMV8_A53_PERFCTR_PREF_LINEFILL                                0xC2
@@ -154,6 +234,20 @@ static const unsigned armv8_thunder_perf_map[PERF_COUNT_HW_MAX] = {
        [PERF_COUNT_HW_STALLED_CYCLES_BACKEND]  = ARMV8_PMUV3_PERFCTR_STALL_BACKEND,
 };
 
+/* Broadcom Vulcan events mapping */
+static const unsigned armv8_vulcan_perf_map[PERF_COUNT_HW_MAX] = {
+       PERF_MAP_ALL_UNSUPPORTED,
+       [PERF_COUNT_HW_CPU_CYCLES]              = ARMV8_PMUV3_PERFCTR_CPU_CYCLES,
+       [PERF_COUNT_HW_INSTRUCTIONS]            = ARMV8_PMUV3_PERFCTR_INST_RETIRED,
+       [PERF_COUNT_HW_CACHE_REFERENCES]        = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
+       [PERF_COUNT_HW_CACHE_MISSES]            = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
+       [PERF_COUNT_HW_BRANCH_INSTRUCTIONS]     = ARMV8_PMUV3_PERFCTR_BR_RETIRED,
+       [PERF_COUNT_HW_BRANCH_MISSES]           = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
+       [PERF_COUNT_HW_BUS_CYCLES]              = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
+       [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV8_PMUV3_PERFCTR_STALL_FRONTEND,
+       [PERF_COUNT_HW_STALLED_CYCLES_BACKEND]  = ARMV8_PMUV3_PERFCTR_STALL_BACKEND,
+};
+
 static const unsigned armv8_pmuv3_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
                                                [PERF_COUNT_HW_CACHE_OP_MAX]
                                                [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
@@ -246,10 +340,51 @@ static const unsigned armv8_thunder_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
        [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
 };
 
+static const unsigned armv8_vulcan_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
+                                             [PERF_COUNT_HW_CACHE_OP_MAX]
+                                             [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
+       PERF_CACHE_MAP_ALL_UNSUPPORTED,
+
+       [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD,
+       [C(L1D)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD,
+       [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR,
+       [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_WR,
+
+       [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_L1I_CACHE,
+       [C(L1I)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL,
+
+       [C(ITLB)][C(OP_READ)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL,
+       [C(ITLB)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB,
+
+       [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_RD,
+       [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_WR,
+       [C(DTLB)][C(OP_READ)][C(RESULT_MISS)]   = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD,
+       [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)]  = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR,
+
+       [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)]  = ARMV8_PMUV3_PERFCTR_BR_PRED,
+       [C(BPU)][C(OP_READ)][C(RESULT_MISS)]    = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
+       [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
+       [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)]   = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
+
+       [C(NODE)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_RD,
+       [C(NODE)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_WR,
+};
+
+static ssize_t
+armv8pmu_events_sysfs_show(struct device *dev,
+                          struct device_attribute *attr, char *page)
+{
+       struct perf_pmu_events_attr *pmu_attr;
+
+       pmu_attr = container_of(attr, struct perf_pmu_events_attr, attr);
+
+       return sprintf(page, "event=0x%03llx\n", pmu_attr->id);
+}
+
 #define ARMV8_EVENT_ATTR_RESOLVE(m) #m
 #define ARMV8_EVENT_ATTR(name, config) \
-       PMU_EVENT_ATTR_STRING(name, armv8_event_attr_##name, \
-                             "event=" ARMV8_EVENT_ATTR_RESOLVE(config))
+       PMU_EVENT_ATTR(name, armv8_event_attr_##name, \
+                      config, armv8pmu_events_sysfs_show)
 
 ARMV8_EVENT_ATTR(sw_incr, ARMV8_PMUV3_PERFCTR_SW_INCR);
 ARMV8_EVENT_ATTR(l1i_cache_refill, ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL);
@@ -281,7 +416,7 @@ ARMV8_EVENT_ATTR(memory_error, ARMV8_PMUV3_PERFCTR_MEMORY_ERROR);
 ARMV8_EVENT_ATTR(inst_spec, ARMV8_PMUV3_PERFCTR_INST_SPEC);
 ARMV8_EVENT_ATTR(ttbr_write_retired, ARMV8_PMUV3_PERFCTR_TTBR_WRITE_RETIRED);
 ARMV8_EVENT_ATTR(bus_cycles, ARMV8_PMUV3_PERFCTR_BUS_CYCLES);
-ARMV8_EVENT_ATTR(chain, ARMV8_PMUV3_PERFCTR_CHAIN);
+/* Don't expose the chain event in /sys, since it's useless in isolation */
 ARMV8_EVENT_ATTR(l1d_cache_allocate, ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE);
 ARMV8_EVENT_ATTR(l2d_cache_allocate, ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE);
 ARMV8_EVENT_ATTR(br_retired, ARMV8_PMUV3_PERFCTR_BR_RETIRED);
@@ -332,7 +467,6 @@ static struct attribute *armv8_pmuv3_event_attrs[] = {
        &armv8_event_attr_inst_spec.attr.attr,
        &armv8_event_attr_ttbr_write_retired.attr.attr,
        &armv8_event_attr_bus_cycles.attr.attr,
-       &armv8_event_attr_chain.attr.attr,
        &armv8_event_attr_l1d_cache_allocate.attr.attr,
        &armv8_event_attr_l2d_cache_allocate.attr.attr,
        &armv8_event_attr_br_retired.attr.attr,
@@ -354,9 +488,27 @@ static struct attribute *armv8_pmuv3_event_attrs[] = {
        NULL,
 };
 
+static umode_t
+armv8pmu_event_attr_is_visible(struct kobject *kobj,
+                              struct attribute *attr, int unused)
+{
+       struct device *dev = kobj_to_dev(kobj);
+       struct pmu *pmu = dev_get_drvdata(dev);
+       struct arm_pmu *cpu_pmu = container_of(pmu, struct arm_pmu, pmu);
+       struct perf_pmu_events_attr *pmu_attr;
+
+       pmu_attr = container_of(attr, struct perf_pmu_events_attr, attr.attr);
+
+       if (test_bit(pmu_attr->id, cpu_pmu->pmceid_bitmap))
+               return attr->mode;
+
+       return 0;
+}
+
 static struct attribute_group armv8_pmuv3_events_attr_group = {
        .name = "events",
        .attrs = armv8_pmuv3_event_attrs,
+       .is_visible = armv8pmu_event_attr_is_visible,
 };
 
 PMU_FORMAT_ATTR(event, "config:0-9");
@@ -397,16 +549,14 @@ static const struct attribute_group *armv8_pmuv3_attr_groups[] = {
 
 static inline u32 armv8pmu_pmcr_read(void)
 {
-       u32 val;
-       asm volatile("mrs %0, pmcr_el0" : "=r" (val));
-       return val;
+       return read_sysreg(pmcr_el0);
 }
 
 static inline void armv8pmu_pmcr_write(u32 val)
 {
        val &= ARMV8_PMU_PMCR_MASK;
        isb();
-       asm volatile("msr pmcr_el0, %0" :: "r" (val));
+       write_sysreg(val, pmcr_el0);
 }
 
 static inline int armv8pmu_has_overflowed(u32 pmovsr)
@@ -428,7 +578,7 @@ static inline int armv8pmu_counter_has_overflowed(u32 pmnc, int idx)
 static inline int armv8pmu_select_counter(int idx)
 {
        u32 counter = ARMV8_IDX_TO_COUNTER(idx);
-       asm volatile("msr pmselr_el0, %0" :: "r" (counter));
+       write_sysreg(counter, pmselr_el0);
        isb();
 
        return idx;
@@ -445,9 +595,9 @@ static inline u32 armv8pmu_read_counter(struct perf_event *event)
                pr_err("CPU%u reading wrong counter %d\n",
                        smp_processor_id(), idx);
        else if (idx == ARMV8_IDX_CYCLE_COUNTER)
-               asm volatile("mrs %0, pmccntr_el0" : "=r" (value));
+               value = read_sysreg(pmccntr_el0);
        else if (armv8pmu_select_counter(idx) == idx)
-               asm volatile("mrs %0, pmxevcntr_el0" : "=r" (value));
+               value = read_sysreg(pmxevcntr_el0);
 
        return value;
 }
@@ -469,47 +619,47 @@ static inline void armv8pmu_write_counter(struct perf_event *event, u32 value)
                 */
                u64 value64 = 0xffffffff00000000ULL | value;
 
-               asm volatile("msr pmccntr_el0, %0" :: "r" (value64));
+               write_sysreg(value64, pmccntr_el0);
        } else if (armv8pmu_select_counter(idx) == idx)
-               asm volatile("msr pmxevcntr_el0, %0" :: "r" (value));
+               write_sysreg(value, pmxevcntr_el0);
 }
 
 static inline void armv8pmu_write_evtype(int idx, u32 val)
 {
        if (armv8pmu_select_counter(idx) == idx) {
                val &= ARMV8_PMU_EVTYPE_MASK;
-               asm volatile("msr pmxevtyper_el0, %0" :: "r" (val));
+               write_sysreg(val, pmxevtyper_el0);
        }
 }
 
 static inline int armv8pmu_enable_counter(int idx)
 {
        u32 counter = ARMV8_IDX_TO_COUNTER(idx);
-       asm volatile("msr pmcntenset_el0, %0" :: "r" (BIT(counter)));
+       write_sysreg(BIT(counter), pmcntenset_el0);
        return idx;
 }
 
 static inline int armv8pmu_disable_counter(int idx)
 {
        u32 counter = ARMV8_IDX_TO_COUNTER(idx);
-       asm volatile("msr pmcntenclr_el0, %0" :: "r" (BIT(counter)));
+       write_sysreg(BIT(counter), pmcntenclr_el0);
        return idx;
 }
 
 static inline int armv8pmu_enable_intens(int idx)
 {
        u32 counter = ARMV8_IDX_TO_COUNTER(idx);
-       asm volatile("msr pmintenset_el1, %0" :: "r" (BIT(counter)));
+       write_sysreg(BIT(counter), pmintenset_el1);
        return idx;
 }
 
 static inline int armv8pmu_disable_intens(int idx)
 {
        u32 counter = ARMV8_IDX_TO_COUNTER(idx);
-       asm volatile("msr pmintenclr_el1, %0" :: "r" (BIT(counter)));
+       write_sysreg(BIT(counter), pmintenclr_el1);
        isb();
        /* Clear the overflow flag in case an interrupt is pending. */
-       asm volatile("msr pmovsclr_el0, %0" :: "r" (BIT(counter)));
+       write_sysreg(BIT(counter), pmovsclr_el0);
        isb();
 
        return idx;
@@ -520,11 +670,11 @@ static inline u32 armv8pmu_getreset_flags(void)
        u32 value;
 
        /* Read */
-       asm volatile("mrs %0, pmovsclr_el0" : "=r" (value));
+       value = read_sysreg(pmovsclr_el0);
 
        /* Write to clear flags */
        value &= ARMV8_PMU_OVSR_MASK;
-       asm volatile("msr pmovsclr_el0, %0" :: "r" (value));
+       write_sysreg(value, pmovsclr_el0);
 
        return value;
 }
@@ -781,22 +931,38 @@ static int armv8_thunder_map_event(struct perf_event *event)
                                ARMV8_PMU_EVTYPE_EVENT);
 }
 
-static void armv8pmu_read_num_pmnc_events(void *info)
+static int armv8_vulcan_map_event(struct perf_event *event)
+{
+       return armpmu_map_event(event, &armv8_vulcan_perf_map,
+                               &armv8_vulcan_perf_cache_map,
+                               ARMV8_PMU_EVTYPE_EVENT);
+}
+
+static void __armv8pmu_probe_pmu(void *info)
 {
-       int *nb_cnt = info;
+       struct arm_pmu *cpu_pmu = info;
+       u32 pmceid[2];
 
        /* Read the nb of CNTx counters supported from PMNC */
-       *nb_cnt = (armv8pmu_pmcr_read() >> ARMV8_PMU_PMCR_N_SHIFT) & ARMV8_PMU_PMCR_N_MASK;
+       cpu_pmu->num_events = (armv8pmu_pmcr_read() >> ARMV8_PMU_PMCR_N_SHIFT)
+               & ARMV8_PMU_PMCR_N_MASK;
 
        /* Add the CPU cycles counter */
-       *nb_cnt += 1;
+       cpu_pmu->num_events += 1;
+
+       pmceid[0] = read_sysreg(pmceid0_el0);
+       pmceid[1] = read_sysreg(pmceid1_el0);
+
+       bitmap_from_u32array(cpu_pmu->pmceid_bitmap,
+                            ARMV8_PMUV3_MAX_COMMON_EVENTS, pmceid,
+                            ARRAY_SIZE(pmceid));
 }
 
-static int armv8pmu_probe_num_events(struct arm_pmu *arm_pmu)
+static int armv8pmu_probe_pmu(struct arm_pmu *cpu_pmu)
 {
-       return smp_call_function_any(&arm_pmu->supported_cpus,
-                                   armv8pmu_read_num_pmnc_events,
-                                   &arm_pmu->num_events, 1);
+       return smp_call_function_any(&cpu_pmu->supported_cpus,
+                                   __armv8pmu_probe_pmu,
+                                   cpu_pmu, 1);
 }
 
 static void armv8_pmu_init(struct arm_pmu *cpu_pmu)
@@ -819,7 +985,8 @@ static int armv8_pmuv3_init(struct arm_pmu *cpu_pmu)
        armv8_pmu_init(cpu_pmu);
        cpu_pmu->name                   = "armv8_pmuv3";
        cpu_pmu->map_event              = armv8_pmuv3_map_event;
-       return armv8pmu_probe_num_events(cpu_pmu);
+       cpu_pmu->pmu.attr_groups        = armv8_pmuv3_attr_groups;
+       return armv8pmu_probe_pmu(cpu_pmu);
 }
 
 static int armv8_a53_pmu_init(struct arm_pmu *cpu_pmu)
@@ -828,7 +995,7 @@ static int armv8_a53_pmu_init(struct arm_pmu *cpu_pmu)
        cpu_pmu->name                   = "armv8_cortex_a53";
        cpu_pmu->map_event              = armv8_a53_map_event;
        cpu_pmu->pmu.attr_groups        = armv8_pmuv3_attr_groups;
-       return armv8pmu_probe_num_events(cpu_pmu);
+       return armv8pmu_probe_pmu(cpu_pmu);
 }
 
 static int armv8_a57_pmu_init(struct arm_pmu *cpu_pmu)
@@ -837,7 +1004,7 @@ static int armv8_a57_pmu_init(struct arm_pmu *cpu_pmu)
        cpu_pmu->name                   = "armv8_cortex_a57";
        cpu_pmu->map_event              = armv8_a57_map_event;
        cpu_pmu->pmu.attr_groups        = armv8_pmuv3_attr_groups;
-       return armv8pmu_probe_num_events(cpu_pmu);
+       return armv8pmu_probe_pmu(cpu_pmu);
 }
 
 static int armv8_a72_pmu_init(struct arm_pmu *cpu_pmu)
@@ -846,7 +1013,7 @@ static int armv8_a72_pmu_init(struct arm_pmu *cpu_pmu)
        cpu_pmu->name                   = "armv8_cortex_a72";
        cpu_pmu->map_event              = armv8_a57_map_event;
        cpu_pmu->pmu.attr_groups        = armv8_pmuv3_attr_groups;
-       return armv8pmu_probe_num_events(cpu_pmu);
+       return armv8pmu_probe_pmu(cpu_pmu);
 }
 
 static int armv8_thunder_pmu_init(struct arm_pmu *cpu_pmu)
@@ -855,7 +1022,16 @@ static int armv8_thunder_pmu_init(struct arm_pmu *cpu_pmu)
        cpu_pmu->name                   = "armv8_cavium_thunder";
        cpu_pmu->map_event              = armv8_thunder_map_event;
        cpu_pmu->pmu.attr_groups        = armv8_pmuv3_attr_groups;
-       return armv8pmu_probe_num_events(cpu_pmu);
+       return armv8pmu_probe_pmu(cpu_pmu);
+}
+
+static int armv8_vulcan_pmu_init(struct arm_pmu *cpu_pmu)
+{
+       armv8_pmu_init(cpu_pmu);
+       cpu_pmu->name                   = "armv8_brcm_vulcan";
+       cpu_pmu->map_event              = armv8_vulcan_map_event;
+       cpu_pmu->pmu.attr_groups        = armv8_pmuv3_attr_groups;
+       return armv8pmu_probe_pmu(cpu_pmu);
 }
 
 static const struct of_device_id armv8_pmu_of_device_ids[] = {
@@ -864,6 +1040,7 @@ static const struct of_device_id armv8_pmu_of_device_ids[] = {
        {.compatible = "arm,cortex-a57-pmu",    .data = armv8_a57_pmu_init},
        {.compatible = "arm,cortex-a72-pmu",    .data = armv8_a72_pmu_init},
        {.compatible = "cavium,thunder-pmu",    .data = armv8_thunder_pmu_init},
+       {.compatible = "brcm,vulcan-pmu",       .data = armv8_vulcan_pmu_init},
        {},
 };