From 44dbdd613cf64ad9df4760951a582e4e35c3e8f9 Mon Sep 17 00:00:00 2001 From: Kristine Dosvik Date: Thu, 30 Nov 2023 23:07:04 +0100 Subject: [PATCH 1/4] counter test Signed-off-by: Kristine Dosvik --- .../mhpmcounter_write_test.c | 241 ++++++++++++++++++ .../custom/mhpmcounter_write_test/test.yaml | 4 + 2 files changed, 245 insertions(+) create mode 100644 cv32e40s/tests/programs/custom/mhpmcounter_write_test/mhpmcounter_write_test.c create mode 100644 cv32e40s/tests/programs/custom/mhpmcounter_write_test/test.yaml diff --git a/cv32e40s/tests/programs/custom/mhpmcounter_write_test/mhpmcounter_write_test.c b/cv32e40s/tests/programs/custom/mhpmcounter_write_test/mhpmcounter_write_test.c new file mode 100644 index 0000000000..9e8b614552 --- /dev/null +++ b/cv32e40s/tests/programs/custom/mhpmcounter_write_test/mhpmcounter_write_test.c @@ -0,0 +1,241 @@ +// Copyright 2023 Silicon Labs, Inc. +// +// SPDX-License-Identifier: Apache-2.0 WITH SHL-2.1 +// +// Licensed under the Solderpad Hardware License v 2.1 (the "License"); you may +// not use this file except in compliance with the License, or, at your option, +// the Apache License version 2.0. +// +// You may obtain a copy of the License at +// https://solderpad.org/licenses/SHL-2.1/ +// +// Unless required by applicable law or agreed to in writing, any work +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include + +int write_mhpmcounters() +{ + uint32_t reg = 0; + __asm__ volatile("mv %0, x0" : "=r"(reg)); + __asm__ volatile("not %0, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB03, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB04, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB05, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB06, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB07, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB08, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB09, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB0A, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB0B, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB0C, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB0D, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB0E, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB0F, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB10, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB11, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB12, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB13, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB14, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB15, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB16, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB17, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB18, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB19, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB1A, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB1B, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB1C, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB1D, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB1E, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB1F, %0" :: "r"(reg)); + + return 0; +} + +int write_mhpmcounterhs() +{ + uint32_t reg = 0; + __asm__ volatile("csrrs x0, 0xB83, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB84, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB85, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB86, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB87, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB88, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB89, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB8A, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB8B, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB8C, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB8D, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB8E, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB8F, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB90, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB91, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB92, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB93, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB94, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB95, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB96, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB97, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB98, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB99, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB9A, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB9B, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB9C, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB9D, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB9E, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, 0xB9F, %0" :: "r"(reg)); + + return 0; +} + +int check_mhpmcounters_are_zero() +{ + uint32_t reg = 0; + __asm__ volatile("csrr %0, 0xB03" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB04" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB05" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB06" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB07" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB08" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB09" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB0A" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB0B" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB0C" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB0D" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB0E" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB0F" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB10" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB11" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB12" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB13" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB14" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB15" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB16" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB17" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB18" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB19" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB1A" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB1B" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB1C" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB1D" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB1E" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB1F" : "=r"(reg)); + if(reg != 0) return 1; + + return 0; +} + +int check_mhpmcounterhs_are_zero() +{ + uint32_t reg = 0; + __asm__ volatile("csrr %0, 0xB83" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB84" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB85" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB86" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB87" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB88" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB89" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB8A" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB8B" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB8C" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB8D" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB8E" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB8F" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB90" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB91" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB92" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB93" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB94" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB95" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB96" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB97" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB98" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB99" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB9A" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB9B" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB9C" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB9D" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB9E" : "=r"(reg)); + if(reg != 0) return 1; + __asm__ volatile("csrr %0, 0xB9F" : "=r"(reg)); + if(reg != 0) return 1; + + return 0; +} + + +int main() +{ + uint32_t is_failure = 0; + + write_mhpmcounters(); + write_mhpmcounterhs(); + + is_failure += check_mhpmcounters_are_zero(); + is_failure += check_mhpmcounterhs_are_zero(); + + if (is_failure) return EXIT_FAILURE; + return EXIT_SUCCESS; + +} diff --git a/cv32e40s/tests/programs/custom/mhpmcounter_write_test/test.yaml b/cv32e40s/tests/programs/custom/mhpmcounter_write_test/test.yaml new file mode 100644 index 0000000000..9f061158d5 --- /dev/null +++ b/cv32e40s/tests/programs/custom/mhpmcounter_write_test/test.yaml @@ -0,0 +1,4 @@ +name: mhpmcounter_write_test +uvm_test: uvmt_$(CV_CORE_LC)_firmware_test_c +description: > + Write to mhpmcounters. Is only verifyed by ISS, should be verifyed that we get excpected behaviour? From 38eebfce499ca15f53cda1f9bdd22c23e2c38870 Mon Sep 17 00:00:00 2001 From: Kristine Dosvik Date: Thu, 30 Nov 2023 23:10:07 +0100 Subject: [PATCH 2/4] add to regression Signed-off-by: Kristine Dosvik --- cv32e40s/regress/cv32e40s_full.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/cv32e40s/regress/cv32e40s_full.yaml b/cv32e40s/regress/cv32e40s_full.yaml index 66ba231d96..b00d1cc985 100644 --- a/cv32e40s/regress/cv32e40s_full.yaml +++ b/cv32e40s/regress/cv32e40s_full.yaml @@ -576,3 +576,10 @@ tests: builds: [ uvmt_cv32e40s_clic ] dir: cv32e40s/sim/uvmt cmd: make test TEST=zcmt_test + + mhpmcounter_write_test: + description: test writing to mhpm counters + builds: [ uvmt_cv32e40s ] + dir: cv32e40s/sim/uvmt + cmd: make test TEST=mhpmcounter_write_test + From d16440a2d06b0e7ff06b84c0192e8aa9c1726116 Mon Sep 17 00:00:00 2001 From: Kristine Dosvik Date: Fri, 1 Dec 2023 12:50:49 +0100 Subject: [PATCH 3/4] CSR names instead of registers Signed-off-by: Kristine Dosvik --- .../mhpmcounter_write_test.c | 233 +++++++++--------- 1 file changed, 117 insertions(+), 116 deletions(-) diff --git a/cv32e40s/tests/programs/custom/mhpmcounter_write_test/mhpmcounter_write_test.c b/cv32e40s/tests/programs/custom/mhpmcounter_write_test/mhpmcounter_write_test.c index 9e8b614552..e11414218c 100644 --- a/cv32e40s/tests/programs/custom/mhpmcounter_write_test/mhpmcounter_write_test.c +++ b/cv32e40s/tests/programs/custom/mhpmcounter_write_test/mhpmcounter_write_test.c @@ -20,40 +20,41 @@ #include #include + int write_mhpmcounters() { uint32_t reg = 0; __asm__ volatile("mv %0, x0" : "=r"(reg)); __asm__ volatile("not %0, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB03, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB04, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB05, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB06, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB07, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB08, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB09, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB0A, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB0B, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB0C, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB0D, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB0E, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB0F, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB10, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB11, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB12, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB13, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB14, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB15, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB16, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB17, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB18, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB19, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB1A, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB1B, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB1C, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB1D, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB1E, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB1F, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter3, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter4, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter5, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter6, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter7, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter8, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter9, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter10, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter11, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter12, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter13, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter14, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter15, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter16, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter17, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter18, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter19, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter20, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter21, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter22, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter23, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter24, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter25, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter26, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter27, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter28, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter29, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter30, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter31, %0" :: "r"(reg)); return 0; } @@ -61,35 +62,35 @@ int write_mhpmcounters() int write_mhpmcounterhs() { uint32_t reg = 0; - __asm__ volatile("csrrs x0, 0xB83, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB84, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB85, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB86, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB87, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB88, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB89, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB8A, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB8B, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB8C, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB8D, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB8E, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB8F, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB90, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB91, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB92, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB93, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB94, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB95, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB96, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB97, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB98, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB99, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB9A, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB9B, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB9C, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB9D, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB9E, %0" :: "r"(reg)); - __asm__ volatile("csrrs x0, 0xB9F, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter3h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter4h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter5h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter6h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter7h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter8h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter9h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter10h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter11h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter12h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter13h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter14h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter15h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter16h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter17h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter18h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter19h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter20h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter21h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter22h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter23h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter24h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter25h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter26h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter27h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter28h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter29h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter30h, %0" :: "r"(reg)); + __asm__ volatile("csrrs x0, mhpmcounter31h, %0" :: "r"(reg)); return 0; } @@ -97,63 +98,63 @@ int write_mhpmcounterhs() int check_mhpmcounters_are_zero() { uint32_t reg = 0; - __asm__ volatile("csrr %0, 0xB03" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter3" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB04" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter4" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB05" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter5" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB06" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter6" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB07" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter7" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB08" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter8" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB09" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter9" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB0A" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter10" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB0B" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter11" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB0C" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter12" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB0D" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter13" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB0E" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter14" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB0F" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter15" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB10" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter16" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB11" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter17" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB12" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter18" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB13" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter19" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB14" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter20" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB15" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter21" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB16" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter22" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB17" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter23" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB18" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter24" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB19" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter25" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB1A" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter26" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB1B" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter27" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB1C" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter28" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB1D" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter29" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB1E" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter30" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB1F" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter31" : "=r"(reg)); if(reg != 0) return 1; return 0; @@ -162,63 +163,63 @@ int check_mhpmcounters_are_zero() int check_mhpmcounterhs_are_zero() { uint32_t reg = 0; - __asm__ volatile("csrr %0, 0xB83" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter3h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB84" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter4h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB85" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter5h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB86" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter6h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB87" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter7h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB88" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter8h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB89" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter9h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB8A" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter10h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB8B" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter11h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB8C" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter12h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB8D" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter13h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB8E" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter14h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB8F" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter15h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB90" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter16h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB91" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter17h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB92" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter18h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB93" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter19h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB94" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter20h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB95" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter21h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB96" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter22h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB97" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter23h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB98" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter24h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB99" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter25h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB9A" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter26h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB9B" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter27h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB9C" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter28h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB9D" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter29h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB9E" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter30h" : "=r"(reg)); if(reg != 0) return 1; - __asm__ volatile("csrr %0, 0xB9F" : "=r"(reg)); + __asm__ volatile("csrr %0, mhpmcounter31h" : "=r"(reg)); if(reg != 0) return 1; return 0; From 08e83a81f92c9e8a9fdffdd4d037d04f248e4afa Mon Sep 17 00:00:00 2001 From: Kristine Dosvik Date: Fri, 1 Dec 2023 13:01:35 +0100 Subject: [PATCH 4/4] change config to clic_default Signed-off-by: Kristine Dosvik --- cv32e40s/regress/cv32e40s_full.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cv32e40s/regress/cv32e40s_full.yaml b/cv32e40s/regress/cv32e40s_full.yaml index b00d1cc985..4ce0a40bd0 100644 --- a/cv32e40s/regress/cv32e40s_full.yaml +++ b/cv32e40s/regress/cv32e40s_full.yaml @@ -579,7 +579,7 @@ tests: mhpmcounter_write_test: description: test writing to mhpm counters - builds: [ uvmt_cv32e40s ] + builds: [ uvmt_cv32e40s_clic ] dir: cv32e40s/sim/uvmt cmd: make test TEST=mhpmcounter_write_test