diff --git a/coverage/header_file.yaml b/coverage/header_file.yaml index b7386d8f0..5769a15c4 100644 --- a/coverage/header_file.yaml +++ b/coverage/header_file.yaml @@ -67,6 +67,7 @@ common: SATP64_ASID: 0x0FFFF00000000000 SATP64_PPN: 0x00000FFFFFFFFFFF SATP_MODE_OFF: 0 + SATP_MODE_SV32: 1 SATP_MODE_SV39: 8 SATP_MODE_SV48: 9 @@ -922,6 +923,13 @@ PMP_MACROS: PMPCFG_RW_BIT: 0x60 PMPCFG_L_BIT: 0x80 PMPCFG_ALL_BIT: 0xFF + +PMM_MACROS: + PMM_MASK: 0x300000000 + PMM_MASK_SV57: 0x200000000 + PMM_MASK_SV48: 0x300000000 + PMM_MASK_DISABLED: 0x000000000 + PMP_helper_Coverpoints: NAPOT_REGION_ADDRESS_MATCH: ((rs1_val + imm_val) ^ (pmpaddr1<<2)) & ~(((pmpaddr1 ^ (pmpaddr1+1))<<2) | 3) ==0 and ((rs1_val+imm_val+access_len-1 ) ^ (pmpaddr1<<2)) & ~(((pmpaddr1 ^ (pmpaddr1+1))<<2) | 3) ==0 @@ -932,4 +940,4 @@ PMP_helper_Coverpoints: TOR_PRIORITY_2_REGION_MATCH: (rs1_val + imm_val >= (pmpaddr0 << 2)) and (rs1_val + imm_val < (pmpaddr1 << 2)) NA4_REGION_ADDRESS_MATCH: (rs1_val + imm_val == (pmpaddr1 << 2)) NA4_PRIORITY_REGION_MATCH: (rs1_val + imm_val == (pmpaddr3 << 2)) - NA4_PRIORITY_2_REGION_MATCH: (rs1_val + imm_val == (pmpaddr1 << 2)) \ No newline at end of file + NA4_PRIORITY_2_REGION_MATCH: (rs1_val + imm_val == (pmpaddr1 << 2)) diff --git a/coverage/rv64_pmm.cgf b/coverage/rv64_pmm.cgf new file mode 100644 index 000000000..273bc5b3d --- /dev/null +++ b/coverage/rv64_pmm.cgf @@ -0,0 +1,959 @@ +#Test Case 1 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_atomic_S_mode_SV48_tag00: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, lr.w, sc.w, lr.d, sc.d}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + +#Test Case 2 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_atomic_S_mode_SV48_tag01: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, lr.w, sc.w, lr.d, sc.d}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lr.w', 'lr.d', 'sc.w', 'sc.d'} and mcause == {0xd, 0xf}{[$1/2]}": 0 + +#Test Case 3 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_atomic_S_mode_SV48_tag02: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, lr.w, sc.w, lr.d, sc.d}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lr.w', 'lr.d', 'sc.w', 'sc.d'} and mcause == {0xd, 0xf}{[$1/2]}": 0 + +#Test Case 4 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_atomic_S_mode_SV48_tag03: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, lr.w, sc.w, lr.d, sc.d}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + +#Test Case 1 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_atomic_U_mode_SV48_tag00: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, lr.w, sc.w, lr.d, sc.d}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + +#Test Case 2 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_atomic_U_mode_SV48_tag01: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, lr.w, sc.w, lr.d, sc.d}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lr.w', 'lr.d', 'sc.w', 'sc.d'} and mcause == {0xd, 0xf}{[$1/2]}": 0 + +#Test Case 3 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_atomic_U_mode_SV48_tag02: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, lr.w, sc.w, lr.d, sc.d}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lr.w', 'lr.d', 'sc.w', 'sc.d'} and mcause == {0xd, 0xf}{[$1/2]}": 0 + +#Test Case 4 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_atomic_U_mode_SV48_tag03: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, lr.w, sc.w, lr.d, sc.d}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + +#Test Case 1 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_basic_S_mode_SV48_tag00: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, lb, lh, lw, lbu, lhu, lwu, sb, sh, sw, sd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lb', 'lh', 'lw', 'sb', 'sh', 'sw'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lb', 'lh', 'lw', 'sb', 'sh', 'sw'} and (len_dptw == 1)": 0 + +#Test Case 2 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_basic_S_mode_SV48_tag01: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, lb, lh, lw, lbu, lhu, lwu, sb, sh, sw, sd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lb', 'lh', 'lw', 'sb', 'sh', 'sw'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lb', 'lh', 'lw', 'lbu', 'lhu', 'lwu', 'sb', 'sh', 'sw', 'sd'} and mcause == {0xd, 0xf}{[$1/6]}": 0 + +#Test Case 3 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_basic_S_mode_SV48_tag02: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, lb, lh, lw, lbu, lhu, lwu, sb, sh, sw, sd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lb', 'lh', 'lw', 'sb', 'sh', 'sw'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lb', 'lh', 'lw', 'lbu', 'lhu', 'lwu', 'sb', 'sh', 'sw', 'sd'} and mcause == {0xd, 0xf}{[$1/6]}": 0 + +#Test Case 4 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_basic_S_mode_SV48_tag03: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, lb, lh, lw, lbu, lhu, lwu, sb, sh, sw, sd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lb', 'lh', 'lw', 'sb', 'sh', 'sw'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lb', 'lh', 'lw', 'sb', 'sh', 'sw'} and (len_dptw == 1)": 0 + +#Test Case 1 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_basic_U_mode_SV48_tag00: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, lb, lh, lw, lbu, lhu, lwu, sb, sh, sw, sd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lb', 'lh', 'lw', 'sb', 'sh', 'sw'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lb', 'lh', 'lw', 'sb', 'sh', 'sw'} and (len_dptw == 1)": 0 + +#Test Case 2 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_basic_U_mode_SV48_tag01: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, lb, lh, lw, lbu, lhu, lwu, sb, sh, sw, sd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lb', 'lh', 'lw', 'sb', 'sh', 'sw'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lb', 'lh', 'lw', 'lbu', 'lhu', 'lwu', 'sb', 'sh', 'sw', 'sd'} and mcause == {0xd, 0xf}{[$1/6]}": 0 + +#Test Case 3 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_basic_U_mode_SV48_tag02: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, lb, lh, lw, lbu, lhu, lwu, sb, sh, sw, sd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lb', 'lh', 'lw', 'sb', 'sh', 'sw'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lb', 'lh', 'lw', 'lbu', 'lhu', 'lwu', 'sb', 'sh', 'sw', 'sd'} and mcause == {0xd, 0xf}{[$1/6]}": 0 + +#Test Case 4 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_basic_U_mode_SV48_tag03: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, lb, lh, lw, lbu, lhu, lwu, sb, sh, sw, sd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lb', 'lh', 'lw', 'sb', 'sh', 'sw'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lb', 'lh', 'lw', 'sb', 'sh', 'sw'} and (len_dptw == 1)": 0 + +#Test Case 1 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_compressed_S_mode_SV48_tag00: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, c.lw, c.sw, c.ld, c.sd, c.lwsp, c.swsp, c.ldsp, c.sdsp}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + +#Test Case 2 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_compressed_S_mode_SV48_tag01: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, c.lw, c.sw, c.ld, c.sd, c.lwsp, c.swsp, c.ldsp, c.sdsp}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and mcause == {0xd, 0xf}{[$1/4]}": 0 + +#Test Case 3 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_compressed_S_mode_SV48_tag02: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, c.lw, c.sw, c.ld, c.sd, c.lwsp, c.swsp, c.ldsp, c.sdsp}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and mcause == {0xd, 0xf}{[$1/4]}": 0 + +#Test Case 4 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_compressed_S_mode_SV48_tag03: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, c.lw, c.sw, c.ld, c.sd, c.lwsp, c.swsp, c.ldsp, c.sdsp}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + +#Test Case 1 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_compressed_U_mode_SV48_tag00: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, c.lw, c.sw, c.ld, c.sd, c.lwsp, c.swsp, c.ldsp, c.sdsp}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + +#Test Case 2 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_compressed_U_mode_SV48_tag01: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, c.lw, c.sw, c.ld, c.sd, c.lwsp, c.swsp, c.ldsp, c.sdsp}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and mcause == {0xd, 0xf}{[$1/4]}": 0 + +#Test Case 3 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_compressed_U_mode_SV48_tag02: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, c.lw, c.sw, c.ld, c.sd, c.lwsp, c.swsp, c.ldsp, c.sdsp}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and mcause == {0xd, 0xf}{[$1/4]}": 0 + +#Test Case 4 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_compressed_U_mode_SV48_tag03: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, c.lw, c.sw, c.ld, c.sd, c.lwsp, c.swsp, c.ldsp, c.sdsp}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + +#Test Case 1 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_float_S_mode_SV48_tag00: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, flw, fsw, fld, fsd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + +#Test Case 2 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_float_S_mode_SV48_tag01: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, flw, fsw, fld, fsd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and mcause == {0xd, 0xf}{[$1/2]}": 0 + +#Test Case 3 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_float_S_mode_SV48_tag02: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, flw, fsw, fld, fsd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and mcause == {0xd, 0xf}{[$1/2]}": 0 + +#Test Case 4 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_float_S_mode_SV48_tag03: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, flw, fsw, fld, fsd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'S' and ((menvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + +#Test Case 1 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_float_U_mode_SV48_tag00: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, flw, fsw, fld, fsd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + +#Test Case 2 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_float_U_mode_SV48_tag01: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, flw, fsw, fld, fsd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and mcause == {0xd, 0xf}{[$1/2]}": 0 + +#Test Case 3 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_float_U_mode_SV48_tag02: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, flw, fsw, fld, fsd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and mcause == {0xd, 0xf}{[$1/2]}": 0 + +#Test Case 4 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_float_U_mode_SV48_tag03: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, flw, fsw, fld, fsd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'U' and ((senvcfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + +#Test Case 1 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_atomic_M_mode_SV48_tag00: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrw, lr.w, sc.w, lr.d, sc.d}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + +#Test Case 2 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_atomic_M_mode_SV48_tag01: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, lr.w, sc.w, lr.d, sc.d}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lr.w', 'lr.d', 'sc.w', 'sc.d'} and mcause == {0xd, 0xf}{[$1/2]}": 0 + +#Test Case 3 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_atomic_M_mode_SV48_tag02: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, lr.w, sc.w, lr.d, sc.d}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lr.w', 'lr.d', 'sc.w', 'sc.d'} and mcause == {0xd, 0xf}{[$1/2]}": 0 + +#Test Case 4 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_atomic_M_mode_SV48_tag03: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, lr.w, sc.w, lr.d, sc.d}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lr.w', 'sc.w', 'lr.d', 'sc.d'} and (len_dptw == 1)": 0 + +#Test Case 1 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_basic_M_mode_SV48_tag00: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, lb, lh, lw, lbu, lhu, lwu, sb, sh, sw, sd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lb', 'lh', 'lw', 'lbu', 'lhu', 'lwu', 'sb', 'sh', 'sw', 'sd'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lb', 'lh', 'lw', 'lbu', 'lhu', 'lwu', 'sb', 'sh', 'sw', 'sd'} and (len_dptw == 1)": 0 + +#Test Case 2 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_basic_M_mode_SV48_tag01: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, lb, lh, lw, lbu, lhu, lwu, sb, sh, sw, sd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lb', 'lh', 'lw', 'lbu', 'lhu', 'lwu', 'sb', 'sh', 'sw', 'sd'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lb', 'lh', 'lw', 'lbu', 'lhu', 'lwu', 'sb', 'sh', 'sw', 'sd'} and mcause == {0xd, 0xf}{[$1/6]}": 0 + +#Test Case 3 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_basic_M_mode_SV48_tag02: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, lb, lh, lw, lbu, lhu, lwu, sb, sh, sw, sd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lb', 'lh', 'lw', 'lbu', 'lhu', 'lwu', 'sb', 'sh', 'sw', 'sd'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lb', 'lh', 'lw', 'lbu', 'lhu', 'lwu', 'sb', 'sh', 'sw', 'sd'} and mcause == {0xd, 0xf}{[$1/6]}": 0 + +#Test Case 4 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_basic_M_mode_SV48_tag03: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, lb, lh, lw, lbu, lhu, lwu, sb, sh, sw, sd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'lb', 'lh', 'lw', 'lbu', 'lhu', 'lwu', 'sb', 'sh', 'sw', 'sd'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'lb', 'lh', 'lw', 'lbu', 'lhu', 'lwu', 'sb', 'sh', 'sw', 'sd'} and (len_dptw == 1)": 0 + +#Test Case 1 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_compressed_M_mode_SV48_tag00: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, c.lw, c.sw, c.ld, c.sd, c.lwsp, c.swsp, c.ldsp, c.sdsp}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + +#Test Case 2 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_compressed_M_mode_SV48_tag01: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, c.lw, c.sw, c.ld, c.sd, c.lwsp, c.swsp, c.ldsp, c.sdsp}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and mcause == {0xd, 0xf}{[$1/4]}": 0 + +#Test Case 3 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_compressed_M_mode_SV48_tag02: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, c.lw, c.sw, c.ld, c.sd, c.lwsp, c.swsp, c.ldsp, c.sdsp}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and mcause == {0xd, 0xf}{[$1/4]}": 0 + +#Test Case 4 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_compressed_M_mode_SV48_tag03: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, c.lw, c.sw, c.ld, c.sd, c.lwsp, c.swsp, c.ldsp, c.sdsp}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'c.lw', 'c.ld', 'c.lwsp', 'c.ldsp', 'c.sw', 'c.sd', 'c.swsp', 'c.sdsp'} and (len_dptw == 1)": 0 + +#Test Case 1 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_float_M_mode_SV48_tag00: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, flw, fsw, fld, fsd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + +#Test Case 2 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_float_M_mode_SV48_tag01: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, flw, fsw, fld, fsd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and mcause == {0xd, 0xf}{[$1/2]}": 0 + +#Test Case 3 +#Pointer Masking Disabled -> Page faults +#Pointer Masking Enabled -> No fault +pm_float_M_mode_SV48_tag02: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, flw, fsw, fld, fsd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was expected page faults + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and mcause == {0xd, 0xf}{[$1/2]}": 0 + +#Test Case 4 +#Pointer Masking Disabled -> No fault +#Pointer Masking Enabled -> No fault +pm_float_M_mode_SV48_tag03: + config: + - check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; + mnemonics: + #LR/SC instructions for atomic + "{csrrs, flw, fsw, fld, fsd}": 0 + csr_comb: + #check whether the Pointer Masking is enabled for SV-48 + "((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48})": 0 + val_comb: + #Enabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_SV48}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 + #Disabled Pointer Masking + #Verify that there was no fault by confirming there is atleast a page table walk of length 1 + "mode == 'M' and ((mseccfg & ${PMM_MASK}) == ${PMM_MASK_DISABLED}) and mnemonic == {'flw', 'fld', 'fsw', 'fsd'} and (len_dptw == 1)": 0 \ No newline at end of file diff --git a/riscv-test-suite/env/arch_test.h b/riscv-test-suite/env/arch_test.h index 80a150505..045bb2226 100644 --- a/riscv-test-suite/env/arch_test.h +++ b/riscv-test-suite/env/arch_test.h @@ -1418,7 +1418,7 @@ adj_\__MODE__\()epc_rtn: // adj mepc so there is at least 4B of p /**** FIXME: if in Mmode and mode!=bare & MPRV=1, then T4 be altered to point to the mode of the mstatus.mpp that is stored in Xtrampend_sv ****/ - +#ifndef PMM_EXT_ENAB csrr T2, CSR_XTVAL chk_\__MODE__\()tval: @@ -1461,7 +1461,7 @@ adj_\__MODE__\()tval: sv_\__MODE__\()tval: SREG T3, 3*REGWIDTH(T1) // save 4th sig value, (rel tval) - +#endif skp_\__MODE__\()tval: .ifc \__MODE__ , M diff --git a/riscv-test-suite/env/test_macros.h b/riscv-test-suite/env/test_macros.h index 4ed2bc010..f7bb34ae1 100644 --- a/riscv-test-suite/env/test_macros.h +++ b/riscv-test-suite/env/test_macros.h @@ -30,6 +30,16 @@ #define SIG sig_bgn_off #define VMEM vmem_bgn_off +//This macro is used to go from M -> M with virtualization enabled +#define ENABLE_VIRT_MMODE() ;\ + LI (s7, MSTATUS_MPRV) ;\ + csrs mstatus,s7 ;\ + LI (s7, MSTATUS_MPP) ;\ + csrs mstatus,s7 ;\ + auipc t0, 0 ;\ + addi t0, t0, 16 ;\ + csrw mepc, t0 ;\ + mret; #define SATP_SETUP(_TR0, _TR1, MODE);\ LA(_TR0, rvtest_Sroot_pg_tbl) ;\ diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_M_sv48_tag00.S b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_M_sv48_tag00.S new file mode 100644 index 000000000..db9418449 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_M_sv48_tag00.S @@ -0,0 +1,237 @@ +/* +Verification Goal: Set PMM = 00 in the msecccfg and tag bits are 0x0000 with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in M-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exception will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IA_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_atomic_M_mode_SV48_tag00) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0x008000000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + ENABLE_VIRT_MMODE() + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + nop + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + sfence.vma + lr.w a2, 0(x8) // Test load access + nop + sfence.vma + sc.w t1, a2, 0(x8) // Test store access + nop + sfence.vma + lr.d a2, 0(x8) // Test load access + nop + sfence.vma + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + + ENABLE_VIRT_MMODE() + csrr t0, satp +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + sfence.vma + lr.w a2, 0(x8) // Test load access + nop + sfence.vma + sc.w t1, a2, 0(x8) // Test store access + nop + sfence.vma + lr.d a2, 0(x8) // Test load access + nop + sfence.vma + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Pointer Masking (PMM) --------------------------- + + LI a3, 0x000000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_M_sv48_tag01.S b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_M_sv48_tag01.S new file mode 100644 index 000000000..0603d93d3 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_M_sv48_tag01.S @@ -0,0 +1,237 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0x0000 with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in M-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IA_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_atomic_M_mode_SV48_tag01) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0x008000000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + ENABLE_VIRT_MMODE() + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + nop + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + sfence.vma + lr.w a2, 0(x8) // Test load access + nop + sfence.vma + sc.w t1, a2, 0(x8) // Test store access + nop + sfence.vma + lr.d a2, 0(x8) // Test load access + nop + sfence.vma + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrs mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + + ENABLE_VIRT_MMODE() + csrr t0, satp +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + sfence.vma + lr.w a2, 0(x8) // Test load access + nop + sfence.vma + sc.w t1, a2, 0(x8) // Test store access + nop + sfence.vma + lr.d a2, 0(x8) // Test load access + nop + sfence.vma + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Pointer Masking (PMM) --------------------------- + + LI a3, 0x000000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_M_sv48_tag02.S b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_M_sv48_tag02.S new file mode 100644 index 000000000..f4233a5e9 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_M_sv48_tag02.S @@ -0,0 +1,237 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xABAB with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in M-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IA_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_atomic_M_mode_SV48_tag02) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0x008000000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xABAB500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xABAB500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xABAB500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + ENABLE_VIRT_MMODE() + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + nop + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + sfence.vma + lr.w a2, 0(x8) // Test load access + nop + sfence.vma + sc.w t1, a2, 0(x8) // Test store access + nop + sfence.vma + lr.d a2, 0(x8) // Test load access + nop + sfence.vma + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrs mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + + ENABLE_VIRT_MMODE() + csrr t0, satp +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + sfence.vma + lr.w a2, 0(x8) // Test load access + nop + sfence.vma + sc.w t1, a2, 0(x8) // Test store access + nop + sfence.vma + lr.d a2, 0(x8) // Test load access + nop + sfence.vma + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Pointer Masking (PMM) --------------------------- + + LI a3, 0x000000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_M_sv48_tag03.S b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_M_sv48_tag03.S new file mode 100644 index 000000000..dc11fa6eb --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_M_sv48_tag03.S @@ -0,0 +1,237 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xFFFF with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in M-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IA_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_atomic_M_mode_SV48_tag03) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0x008000000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xFFFF900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xFFFF900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xFFFF900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + ENABLE_VIRT_MMODE() + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + nop + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + sfence.vma + lr.w a2, 0(x8) // Test load access + nop + sfence.vma + sc.w t1, a2, 0(x8) // Test store access + nop + sfence.vma + lr.d a2, 0(x8) // Test load access + nop + sfence.vma + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrs mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + + ENABLE_VIRT_MMODE() + csrr t0, satp +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + sfence.vma + lr.w a2, 0(x8) // Test load access + nop + sfence.vma + sc.w t1, a2, 0(x8) // Test store access + nop + sfence.vma + lr.d a2, 0(x8) // Test load access + nop + sfence.vma + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Pointer Masking (PMM) --------------------------- + + LI a3, 0x000000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_S_sv48_tag00.S b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_S_sv48_tag00.S new file mode 100644 index 000000000..8aa5aec31 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_S_sv48_tag00.S @@ -0,0 +1,224 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0x0000 with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exception will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IA_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_atomic_S_mode_SV48_tag00) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + lr.w a2, 0(x8) // Test load access + nop + sc.w t1, a2, 0(x8) // Test store access + nop + lr.d a2, 0(x8) // Test load access + nop + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw menvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + lr.w a2, 0(x8) // Test load access + nop + sc.w t1, a2, 0(x8) // Test store access + nop + lr.d a2, 0(x8) // Test load access + nop + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_S_sv48_tag01.S b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_S_sv48_tag01.S new file mode 100644 index 000000000..a6fd7d7ff --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_S_sv48_tag01.S @@ -0,0 +1,224 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0x0000 with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IA_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_atomic_S_mode_SV48_tag01) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + lr.w a2, 0(x8) // Test load access + nop + sc.w t1, a2, 0(x8) // Test store access + nop + lr.d a2, 0(x8) // Test load access + nop + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw menvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + lr.w a2, 0(x8) // Test load access + nop + sc.w t1, a2, 0(x8) // Test store access + nop + lr.d a2, 0(x8) // Test load access + nop + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_S_sv48_tag10.S b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_S_sv48_tag10.S new file mode 100644 index 000000000..9e83bf038 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_S_sv48_tag10.S @@ -0,0 +1,224 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xABAB with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IA_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_atomic_S_mode_SV48_tag02) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xABAB500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xABAB500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xABAB500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + lr.w a2, 0(x8) // Test load access + nop + sc.w t1, a2, 0(x8) // Test store access + nop + lr.d a2, 0(x8) // Test load access + nop + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw menvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + lr.w a2, 0(x8) // Test load access + nop + sc.w t1, a2, 0(x8) // Test store access + nop + lr.d a2, 0(x8) // Test load access + nop + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_S_sv48_tag11.S b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_S_sv48_tag11.S new file mode 100644 index 000000000..7c75c4efc --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_S_sv48_tag11.S @@ -0,0 +1,224 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xFFFF with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IA_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_atomic_S_mode_SV48_tag03) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xFFFF900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xFFFF900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xFFFF900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + lr.w a2, 0(x8) // Test load access + nop + sc.w t1, a2, 0(x8) // Test store access + nop + lr.d a2, 0(x8) // Test load access + nop + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw menvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + lr.w a2, 0(x8) // Test load access + nop + sc.w t1, a2, 0(x8) // Test store access + nop + lr.d a2, 0(x8) // Test load access + nop + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_U_sv48_tag00.S b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_U_sv48_tag00.S new file mode 100644 index 000000000..6900ce4b9 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_U_sv48_tag00.S @@ -0,0 +1,225 @@ +/* +Verification Goal: Set PMM = 01 in the senvcfg and tag bits are 0x0000 with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exception will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IA_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_atomic_U_mode_SV48_tag00) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + lr.w a2, 0(x8) // Test load access + nop + sc.w t1, a2, 0(x8) // Test store access + nop + lr.d a2, 0(x8) // Test load access + nop + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw senvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + lr.w a2, 0(x8) // Test load access + nop + sc.w t1, a2, 0(x8) // Test store access + nop + lr.d a2, 0(x8) // Test load access + nop + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_U_sv48_tag01.S b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_U_sv48_tag01.S new file mode 100644 index 000000000..582d91255 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_U_sv48_tag01.S @@ -0,0 +1,224 @@ +/* +Verification Goal: Set PMM = 01 in the senvcfg and tag bits are 0x0000 with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IA_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_atomic_U_mode_SV48_tag01) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Umode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + lr.w a2, 0(x8) // Test load access + nop + sc.w t1, a2, 0(x8) // Test store access + nop + lr.d a2, 0(x8) // Test load access + nop + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw senvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + lr.w a2, 0(x8) // Test load access + nop + sc.w t1, a2, 0(x8) // Test store access + nop + lr.d a2, 0(x8) // Test load access + nop + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_U_sv48_tag10.S b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_U_sv48_tag10.S new file mode 100644 index 000000000..a16c7b29e --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_U_sv48_tag10.S @@ -0,0 +1,224 @@ +/* +Verification Goal: Set PMM = 01 in the senvcfg and tag bits are 0xABAB with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IA_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_atomic_U_mode_SV48_tag02) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xABAB500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xABAB500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xABAB500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + lr.w a2, 0(x8) // Test load access + nop + sc.w t1, a2, 0(x8) // Test store access + nop + lr.d a2, 0(x8) // Test load access + nop + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw senvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + lr.w a2, 0(x8) // Test load access + nop + sc.w t1, a2, 0(x8) // Test store access + nop + lr.d a2, 0(x8) // Test load access + nop + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_U_sv48_tag11.S b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_U_sv48_tag11.S new file mode 100644 index 000000000..66c8bb363 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_atomics/PMM_atomic_01_U_sv48_tag11.S @@ -0,0 +1,224 @@ +/* +Verification Goal: Set PMM = 01 in the senvcfg and tag bits are 0xFFFF with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IA_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*A.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_atomic_U_mode_SV48_tag03) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xFFFF900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xFFFF900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xFFFF900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + lr.w a2, 0(x8) // Test load access + nop + sc.w t1, a2, 0(x8) // Test store access + nop + lr.d a2, 0(x8) // Test load access + nop + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw senvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) // Load virtual address of data + nop + lr.w a2, 0(x8) // Test load access + nop + sc.w t1, a2, 0(x8) // Test store access + nop + lr.d a2, 0(x8) // Test load access + nop + sc.d t1, a2, 0(x8) // Test store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_M_sv48_tag00.S b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_M_sv48_tag00.S new file mode 100644 index 000000000..82fcc69a2 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_M_sv48_tag00.S @@ -0,0 +1,278 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0x0000 with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in M-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exception will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64I_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_basic_M_mode_SV48_tag00) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0x008000000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + ENABLE_VIRT_MMODE() + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + sfence.vma + lb x9, 0(x8) // test the load access + nop + sfence.vma + lh x9, 0(x8) // test the load access + nop + sfence.vma + lw x9, 0(x8) // test the load access + nop + sfence.vma + ld x9, 0(x8) // test the load access + nop + sfence.vma + sb x9, 0(x8) // test the store access + nop + sfence.vma + sh x9, 0(x8) // test the store access + nop + sfence.vma + sw x9, 0(x8) // test the store access + nop + sfence.vma + sd x9, 0(x8) // test the store access + nop + sfence.vma + lbu x9, 0(x8) // test the load access + nop + sfence.vma + lhu x9, 0(x8) // test the load access + nop + sfence.vma + lwu x9, 0(x8) + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrs mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + + ENABLE_VIRT_MMODE() + csrr t0, satp +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + sfence.vma + lb x9, 0(x8) // test the load access + nop + sfence.vma + lh x9, 0(x8) // test the load access + nop + sfence.vma + lw x9, 0(x8) // test the load access + nop + sfence.vma + ld x9, 0(x8) // test the load access + nop + sfence.vma + sb x9, 0(x8) // test the store access + nop + sfence.vma + sh x9, 0(x8) // test the store access + nop + sfence.vma + sw x9, 0(x8) // test the store access + nop + sfence.vma + sd x9, 0(x8) // test the store access + nop + sfence.vma + lbu x9, 0(x8) // test the load access + nop + sfence.vma + lhu x9, 0(x8) // test the load access + nop + sfence.vma + lwu x9, 0(x8) + nop + +# -------------------- Disable Pointer Masking (PMM) --------------------------- + + LI a3, 0x000000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_M_sv48_tag01.S b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_M_sv48_tag01.S new file mode 100644 index 000000000..040c17317 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_M_sv48_tag01.S @@ -0,0 +1,278 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0x0000 with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in M-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64I_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_basic_M_mode_SV48_tag01) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0x008000000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + ENABLE_VIRT_MMODE() + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + sfence.vma + lb x9, 0(x8) // test the load access + nop + sfence.vma + lh x9, 0(x8) // test the load access + nop + sfence.vma + lw x9, 0(x8) // test the load access + nop + sfence.vma + ld x9, 0(x8) // test the load access + nop + sfence.vma + sb x9, 0(x8) // test the store access + nop + sfence.vma + sh x9, 0(x8) // test the store access + nop + sfence.vma + sw x9, 0(x8) // test the store access + nop + sfence.vma + sd x9, 0(x8) // test the store access + nop + sfence.vma + lbu x9, 0(x8) // test the load access + nop + sfence.vma + lhu x9, 0(x8) // test the load access + nop + sfence.vma + lwu x9, 0(x8) + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrs mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + + ENABLE_VIRT_MMODE() + csrr t0, satp +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + sfence.vma + lb x9, 0(x8) // test the load access + nop + sfence.vma + lh x9, 0(x8) // test the load access + nop + sfence.vma + lw x9, 0(x8) // test the load access + nop + sfence.vma + ld x9, 0(x8) // test the load access + nop + sfence.vma + sb x9, 0(x8) // test the store access + nop + sfence.vma + sh x9, 0(x8) // test the store access + nop + sfence.vma + sw x9, 0(x8) // test the store access + nop + sfence.vma + sd x9, 0(x8) // test the store access + nop + sfence.vma + lbu x9, 0(x8) // test the load access + nop + sfence.vma + lhu x9, 0(x8) // test the load access + nop + sfence.vma + lwu x9, 0(x8) + nop + +# -------------------- Disable Pointer Masking (PMM) --------------------------- + + LI a3, 0x000000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_M_sv48_tag10.S b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_M_sv48_tag10.S new file mode 100644 index 000000000..8b6f23327 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_M_sv48_tag10.S @@ -0,0 +1,278 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xABAB with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64I_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_basic_M_mode_SV48_tag02) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0x008000000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xABAB500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xABAB500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xABAB500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + ENABLE_VIRT_MMODE() + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + sfence.vma + lb x9, 0(x8) // test the load access + nop + sfence.vma + lh x9, 0(x8) // test the load access + nop + sfence.vma + lw x9, 0(x8) // test the load access + nop + sfence.vma + ld x9, 0(x8) // test the load access + nop + sfence.vma + sb x9, 0(x8) // test the store access + nop + sfence.vma + sh x9, 0(x8) // test the store access + nop + sfence.vma + sw x9, 0(x8) // test the store access + nop + sfence.vma + sd x9, 0(x8) // test the store access + nop + sfence.vma + lbu x9, 0(x8) // test the load access + nop + sfence.vma + lhu x9, 0(x8) // test the load access + nop + sfence.vma + lwu x9, 0(x8) + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrs mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + + ENABLE_VIRT_MMODE() + csrr t0, satp +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + sfence.vma + lb x9, 0(x8) // test the load access + nop + sfence.vma + lh x9, 0(x8) // test the load access + nop + sfence.vma + lw x9, 0(x8) // test the load access + nop + sfence.vma + ld x9, 0(x8) // test the load access + nop + sfence.vma + sb x9, 0(x8) // test the store access + nop + sfence.vma + sh x9, 0(x8) // test the store access + nop + sfence.vma + sw x9, 0(x8) // test the store access + nop + sfence.vma + sd x9, 0(x8) // test the store access + nop + sfence.vma + lbu x9, 0(x8) // test the load access + nop + sfence.vma + lhu x9, 0(x8) // test the load access + nop + sfence.vma + lwu x9, 0(x8) + nop + +# -------------------- Disable Pointer Masking (PMM) --------------------------- + + LI a3, 0x000000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_M_sv48_tag11.S b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_M_sv48_tag11.S new file mode 100644 index 000000000..017cede5f --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_M_sv48_tag11.S @@ -0,0 +1,278 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xFFFF with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in M-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64I_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_basic_M_mode_SV48_tag03) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0x008000000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xFFFF900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xFFFF900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xFFFF900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + ENABLE_VIRT_MMODE() + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + sfence.vma + lb x9, 0(x8) // test the load access + nop + sfence.vma + lh x9, 0(x8) // test the load access + nop + sfence.vma + lw x9, 0(x8) // test the load access + nop + sfence.vma + ld x9, 0(x8) // test the load access + nop + sfence.vma + sb x9, 0(x8) // test the store access + nop + sfence.vma + sh x9, 0(x8) // test the store access + nop + sfence.vma + sw x9, 0(x8) // test the store access + nop + sfence.vma + sd x9, 0(x8) // test the store access + nop + sfence.vma + lbu x9, 0(x8) // test the load access + nop + sfence.vma + lhu x9, 0(x8) // test the load access + nop + sfence.vma + lwu x9, 0(x8) + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrs mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + + ENABLE_VIRT_MMODE() + csrr t0, satp +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + sfence.vma + lb x9, 0(x8) // test the load access + nop + sfence.vma + lh x9, 0(x8) // test the load access + nop + sfence.vma + lw x9, 0(x8) // test the load access + nop + sfence.vma + ld x9, 0(x8) // test the load access + nop + sfence.vma + sb x9, 0(x8) // test the store access + nop + sfence.vma + sh x9, 0(x8) // test the store access + nop + sfence.vma + sw x9, 0(x8) // test the store access + nop + sfence.vma + sd x9, 0(x8) // test the store access + nop + sfence.vma + lbu x9, 0(x8) // test the load access + nop + sfence.vma + lhu x9, 0(x8) // test the load access + nop + sfence.vma + lwu x9, 0(x8) + nop + +# -------------------- Disable Pointer Masking (PMM) --------------------------- + + LI a3, 0x000000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_S_sv48_tag00.S b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_S_sv48_tag00.S new file mode 100644 index 000000000..964f7d9f4 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_S_sv48_tag00.S @@ -0,0 +1,250 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0x0000 with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exception will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64I_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_basic_S_mode_SV48_tag00) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + lb x9, 0(x8) // test the load access + nop + lh x9, 0(x8) // test the load access + nop + lw x9, 0(x8) // test the load access + nop + ld x9, 0(x8) // test the load access + nop + sb x9, 0(x8) // test the store access + nop + sh x9, 0(x8) // test the store access + nop + sw x9, 0(x8) // test the store access + nop + sd x9, 0(x8) // test the store access + nop + lbu x9, 0(x8) // test the load access + nop + lhu x9, 0(x8) // test the load access + nop + lwu x9, 0(x8) + nop +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw menvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + lb x9, 0(x8) // test the load access + nop + lh x9, 0(x8) // test the load access + nop + lw x9, 0(x8) // test the load access + nop + ld x9, 0(x8) // test the load access + nop + sb x9, 0(x8) // test the store access + nop + sh x9, 0(x8) // test the store access + nop + sw x9, 0(x8) // test the store access + nop + sd x9, 0(x8) // test the store access + nop + lbu x9, 0(x8) // test the load access + nop + lhu x9, 0(x8) // test the load access + nop + lwu x9, 0(x8) + nop +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_S_sv48_tag01.S b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_S_sv48_tag01.S new file mode 100644 index 000000000..379ba890f --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_S_sv48_tag01.S @@ -0,0 +1,250 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0x0000 with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64I_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_basic_S_mode_SV48_tag01) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + lb x9, 0(x8) // test the load access + nop + lh x9, 0(x8) // test the load access + nop + lw x9, 0(x8) // test the load access + nop + ld x9, 0(x8) // test the load access + nop + sb x9, 0(x8) // test the store access + nop + sh x9, 0(x8) // test the store access + nop + sw x9, 0(x8) // test the store access + nop + sd x9, 0(x8) // test the store access + nop + lbu x9, 0(x8) // test the load access + nop + lhu x9, 0(x8) // test the load access + nop + lwu x9, 0(x8) + nop +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw menvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + lb x9, 0(x8) // test the load access + nop + lh x9, 0(x8) // test the load access + nop + lw x9, 0(x8) // test the load access + nop + ld x9, 0(x8) // test the load access + nop + sb x9, 0(x8) // test the store access + nop + sh x9, 0(x8) // test the store access + nop + sw x9, 0(x8) // test the store access + nop + sd x9, 0(x8) // test the store access + nop + lbu x9, 0(x8) // test the load access + nop + lhu x9, 0(x8) // test the load access + nop + lwu x9, 0(x8) + nop +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_S_sv48_tag10.S b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_S_sv48_tag10.S new file mode 100644 index 000000000..7de3aa95e --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_S_sv48_tag10.S @@ -0,0 +1,250 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xABAB with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64I_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_basic_S_mode_SV48_tag02) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xABAB500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xABAB500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xABAB500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + lb x9, 0(x8) // test the load access + nop + lh x9, 0(x8) // test the load access + nop + lw x9, 0(x8) // test the load access + nop + ld x9, 0(x8) // test the load access + nop + sb x9, 0(x8) // test the store access + nop + sh x9, 0(x8) // test the store access + nop + sw x9, 0(x8) // test the store access + nop + sd x9, 0(x8) // test the store access + nop + lbu x9, 0(x8) // test the load access + nop + lhu x9, 0(x8) // test the load access + nop + lwu x9, 0(x8) + nop +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw menvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + lb x9, 0(x8) // test the load access + nop + lh x9, 0(x8) // test the load access + nop + lw x9, 0(x8) // test the load access + nop + ld x9, 0(x8) // test the load access + nop + sb x9, 0(x8) // test the store access + nop + sh x9, 0(x8) // test the store access + nop + sw x9, 0(x8) // test the store access + nop + sd x9, 0(x8) // test the store access + nop + lbu x9, 0(x8) // test the load access + nop + lhu x9, 0(x8) // test the load access + nop + lwu x9, 0(x8) + nop +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_S_sv48_tag11.S b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_S_sv48_tag11.S new file mode 100644 index 000000000..3768734f4 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_S_sv48_tag11.S @@ -0,0 +1,250 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xFFFF with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64I_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_basic_S_mode_SV48_tag03) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xFFFF900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xFFFF900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xFFFF900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + lb x9, 0(x8) // test the load access + nop + lh x9, 0(x8) // test the load access + nop + lw x9, 0(x8) // test the load access + nop + ld x9, 0(x8) // test the load access + nop + sb x9, 0(x8) // test the store access + nop + sh x9, 0(x8) // test the store access + nop + sw x9, 0(x8) // test the store access + nop + sd x9, 0(x8) // test the store access + nop + lbu x9, 0(x8) // test the load access + nop + lhu x9, 0(x8) // test the load access + nop + lwu x9, 0(x8) + nop +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw menvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + lb x9, 0(x8) // test the load access + nop + lh x9, 0(x8) // test the load access + nop + lw x9, 0(x8) // test the load access + nop + ld x9, 0(x8) // test the load access + nop + sb x9, 0(x8) // test the store access + nop + sh x9, 0(x8) // test the store access + nop + sw x9, 0(x8) // test the store access + nop + sd x9, 0(x8) // test the store access + nop + lbu x9, 0(x8) // test the load access + nop + lhu x9, 0(x8) // test the load access + nop + lwu x9, 0(x8) + nop +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_U_sv48_tag00.S b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_U_sv48_tag00.S new file mode 100644 index 000000000..b586e2e24 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_U_sv48_tag00.S @@ -0,0 +1,251 @@ +/* +Verification Goal: Set PMM = 01 in the senvcfg and tag bits are 0x0000 with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exception will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64I_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_basic_U_mode_SV48_tag00) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + lb x9, 0(x8) // test the load access + nop + lh x9, 0(x8) // test the load access + nop + lw x9, 0(x8) // test the load access + nop + ld x9, 0(x8) // test the load access + nop + sb x9, 0(x8) // test the store access + nop + sh x9, 0(x8) // test the store access + nop + sw x9, 0(x8) // test the store access + nop + sd x9, 0(x8) // test the store access + nop + lbu x9, 0(x8) // test the load access + nop + lhu x9, 0(x8) // test the load access + nop + lwu x9, 0(x8) + nop +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw senvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + lb x9, 0(x8) // test the load access + nop + lh x9, 0(x8) // test the load access + nop + lw x9, 0(x8) // test the load access + nop + ld x9, 0(x8) // test the load access + nop + sb x9, 0(x8) // test the store access + nop + sh x9, 0(x8) // test the store access + nop + sw x9, 0(x8) // test the store access + nop + sd x9, 0(x8) // test the store access + nop + lbu x9, 0(x8) // test the load access + nop + lhu x9, 0(x8) // test the load access + nop + lwu x9, 0(x8) + nop +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_U_sv48_tag01.S b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_U_sv48_tag01.S new file mode 100644 index 000000000..466c644a6 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_U_sv48_tag01.S @@ -0,0 +1,250 @@ +/* +Verification Goal: Set PMM = 01 in the senvcfg and tag bits are 0x0000 with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64I_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_basic_U_mode_SV48_tag01) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Umode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + lb x9, 0(x8) // test the load access + nop + lh x9, 0(x8) // test the load access + nop + lw x9, 0(x8) // test the load access + nop + ld x9, 0(x8) // test the load access + nop + sb x9, 0(x8) // test the store access + nop + sh x9, 0(x8) // test the store access + nop + sw x9, 0(x8) // test the store access + nop + sd x9, 0(x8) // test the store access + nop + lbu x9, 0(x8) // test the load access + nop + lhu x9, 0(x8) // test the load access + nop + lwu x9, 0(x8) + nop +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw senvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + lb x9, 0(x8) // test the load access + nop + lh x9, 0(x8) // test the load access + nop + lw x9, 0(x8) // test the load access + nop + ld x9, 0(x8) // test the load access + nop + sb x9, 0(x8) // test the store access + nop + sh x9, 0(x8) // test the store access + nop + sw x9, 0(x8) // test the store access + nop + sd x9, 0(x8) // test the store access + nop + lbu x9, 0(x8) // test the load access + nop + lhu x9, 0(x8) // test the load access + nop + lwu x9, 0(x8) + nop +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_U_sv48_tag10.S b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_U_sv48_tag10.S new file mode 100644 index 000000000..be309ec94 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_U_sv48_tag10.S @@ -0,0 +1,250 @@ +/* +Verification Goal: Set PMM = 01 in the senvcfg and tag bits are 0xABAB with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64I_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_basic_U_mode_SV48_tag02) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xABAB500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xABAB500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xABAB500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + lb x9, 0(x8) // test the load access + nop + lh x9, 0(x8) // test the load access + nop + lw x9, 0(x8) // test the load access + nop + ld x9, 0(x8) // test the load access + nop + sb x9, 0(x8) // test the store access + nop + sh x9, 0(x8) // test the store access + nop + sw x9, 0(x8) // test the store access + nop + sd x9, 0(x8) // test the store access + nop + lbu x9, 0(x8) // test the load access + nop + lhu x9, 0(x8) // test the load access + nop + lwu x9, 0(x8) + nop +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw senvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + lb x9, 0(x8) // test the load access + nop + lh x9, 0(x8) // test the load access + nop + lw x9, 0(x8) // test the load access + nop + ld x9, 0(x8) // test the load access + nop + sb x9, 0(x8) // test the store access + nop + sh x9, 0(x8) // test the store access + nop + sw x9, 0(x8) // test the store access + nop + sd x9, 0(x8) // test the store access + nop + lbu x9, 0(x8) // test the load access + nop + lhu x9, 0(x8) // test the load access + nop + lwu x9, 0(x8) + nop +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_U_sv48_tag11.S b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_U_sv48_tag11.S new file mode 100644 index 000000000..d2a962c03 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_basic/PMM_basic_01_U_sv48_tag11.S @@ -0,0 +1,250 @@ +/* +Verification Goal: Set PMM = 01 in the senvcfg and tag bits are 0xFFFF with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64I_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_basic_U_mode_SV48_tag03) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xFFFF900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xFFFF900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xFFFF900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + lb x9, 0(x8) // test the load access + nop + lh x9, 0(x8) // test the load access + nop + lw x9, 0(x8) // test the load access + nop + ld x9, 0(x8) // test the load access + nop + sb x9, 0(x8) // test the store access + nop + sh x9, 0(x8) // test the store access + nop + sw x9, 0(x8) // test the store access + nop + sd x9, 0(x8) // test the store access + nop + lbu x9, 0(x8) // test the load access + nop + lhu x9, 0(x8) // test the load access + nop + lwu x9, 0(x8) + nop +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw senvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + lb x9, 0(x8) // test the load access + nop + lh x9, 0(x8) // test the load access + nop + lw x9, 0(x8) // test the load access + nop + ld x9, 0(x8) // test the load access + nop + sb x9, 0(x8) // test the store access + nop + sh x9, 0(x8) // test the store access + nop + sw x9, 0(x8) // test the store access + nop + sd x9, 0(x8) // test the store access + nop + lbu x9, 0(x8) // test the load access + nop + lhu x9, 0(x8) // test the load access + nop + lwu x9, 0(x8) + nop +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_M_sv48_tag00.S b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_M_sv48_tag00.S new file mode 100644 index 000000000..08b96b1fa --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_M_sv48_tag00.S @@ -0,0 +1,301 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0x0000 with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in M-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exception will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IC_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_compressed_M_mode_SV48_tag00) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0x008000000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + # li x8, va_rvtest_data_begin + ENABLE_VIRT_MMODE() + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + nop + li x8, va_rvtest_data_begin + nop + sfence.vma + c.lw x9, 0(x8) // test the load access + nop + nop + nop + sfence.vma + c.sw x9, 0(x8) // test the store access + nop + nop + nop + sfence.vma + c.ld x9, 0(x8) // test the load access + nop + nop + nop + sfence.vma + c.sd x9, 0(x8) // test the store access + nop + nop + nop + sfence.vma + LI (x2, va_rvtest_data_begin) + nop + sfence.vma + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + sfence.vma + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + sfence.vma + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + sfence.vma + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrs mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + + ENABLE_VIRT_MMODE() + csrr t0, satp +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + nop + li x8, va_rvtest_data_begin + nop + sfence.vma + c.lw x9, 0(x8) // test the load access + nop + nop + nop + sfence.vma + c.sw x9, 0(x8) // test the store access + nop + nop + nop + sfence.vma + c.ld x9, 0(x8) // test the load access + nop + nop + nop + sfence.vma + c.sd x9, 0(x8) // test the store access + nop + nop + nop + sfence.vma + LI (x2, va_rvtest_data_begin) + nop + sfence.vma + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + sfence.vma + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + sfence.vma + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + sfence.vma + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + + +# -------------------- Disable Pointer Masking (PMM) --------------------------- + + LI a3, 0x000000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_M_sv48_tag01.S b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_M_sv48_tag01.S new file mode 100644 index 000000000..1df7bfa3f --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_M_sv48_tag01.S @@ -0,0 +1,301 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0x0000 with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in M-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IC_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_compressed_M_mode_SV48_tag01) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0x008000000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + # li x8, va_rvtest_data_begin + ENABLE_VIRT_MMODE() + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + nop + li x8, va_rvtest_data_begin + nop + sfence.vma + c.lw x9, 0(x8) // test the load access + nop + nop + nop + sfence.vma + c.sw x9, 0(x8) // test the store access + nop + nop + nop + sfence.vma + c.ld x9, 0(x8) // test the load access + nop + nop + nop + sfence.vma + c.sd x9, 0(x8) // test the store access + nop + nop + nop + sfence.vma + LI (x2, va_rvtest_data_begin) + nop + sfence.vma + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + sfence.vma + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + sfence.vma + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + sfence.vma + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrs mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + + ENABLE_VIRT_MMODE() + csrr t0, satp +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + nop + li x8, va_rvtest_data_begin + nop + sfence.vma + c.lw x9, 0(x8) // test the load access + nop + nop + nop + sfence.vma + c.sw x9, 0(x8) // test the store access + nop + nop + nop + sfence.vma + c.ld x9, 0(x8) // test the load access + nop + nop + nop + sfence.vma + c.sd x9, 0(x8) // test the store access + nop + nop + nop + sfence.vma + LI (x2, va_rvtest_data_begin) + nop + sfence.vma + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + sfence.vma + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + sfence.vma + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + sfence.vma + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + + +# -------------------- Disable Pointer Masking (PMM) --------------------------- + + LI a3, 0x000000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_M_sv48_tag10.S b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_M_sv48_tag10.S new file mode 100644 index 000000000..1f9be5611 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_M_sv48_tag10.S @@ -0,0 +1,301 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xABAB with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in M-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IC_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_compressed_M_mode_SV48_tag02) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0x008000000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xABAB500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xABAB500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xABAB500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + # li x8, va_rvtest_data_begin + ENABLE_VIRT_MMODE() + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + nop + li x8, va_rvtest_data_begin + nop + sfence.vma + c.lw x9, 0(x8) // test the load access + nop + nop + nop + sfence.vma + c.sw x9, 0(x8) // test the store access + nop + nop + nop + sfence.vma + c.ld x9, 0(x8) // test the load access + nop + nop + nop + sfence.vma + c.sd x9, 0(x8) // test the store access + nop + nop + nop + sfence.vma + LI (x2, va_rvtest_data_begin) + nop + sfence.vma + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + sfence.vma + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + sfence.vma + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + sfence.vma + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrs mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + + ENABLE_VIRT_MMODE() + csrr t0, satp +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + nop + li x8, va_rvtest_data_begin + nop + sfence.vma + c.lw x9, 0(x8) // test the load access + nop + nop + nop + sfence.vma + c.sw x9, 0(x8) // test the store access + nop + nop + nop + sfence.vma + c.ld x9, 0(x8) // test the load access + nop + nop + nop + sfence.vma + c.sd x9, 0(x8) // test the store access + nop + nop + nop + sfence.vma + LI (x2, va_rvtest_data_begin) + nop + sfence.vma + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + sfence.vma + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + sfence.vma + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + sfence.vma + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + + +# -------------------- Disable Pointer Masking (PMM) --------------------------- + + LI a3, 0x000000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_M_sv48_tag11.S b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_M_sv48_tag11.S new file mode 100644 index 000000000..45e745240 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_M_sv48_tag11.S @@ -0,0 +1,301 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xFFFF with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in M-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IC_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_compressed_M_mode_SV48_tag03) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0x008000000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xFFFF900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xFFFF900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xFFFF900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + # li x8, va_rvtest_data_begin + ENABLE_VIRT_MMODE() + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + nop + li x8, va_rvtest_data_begin + nop + sfence.vma + c.lw x9, 0(x8) // test the load access + nop + nop + nop + sfence.vma + c.sw x9, 0(x8) // test the store access + nop + nop + nop + sfence.vma + c.ld x9, 0(x8) // test the load access + nop + nop + nop + sfence.vma + c.sd x9, 0(x8) // test the store access + nop + nop + nop + sfence.vma + LI (x2, va_rvtest_data_begin) + nop + sfence.vma + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + sfence.vma + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + sfence.vma + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + sfence.vma + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrs mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + + ENABLE_VIRT_MMODE() + csrr t0, satp +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + nop + li x8, va_rvtest_data_begin + nop + sfence.vma + c.lw x9, 0(x8) // test the load access + nop + nop + nop + sfence.vma + c.sw x9, 0(x8) // test the store access + nop + nop + nop + sfence.vma + c.ld x9, 0(x8) // test the load access + nop + nop + nop + sfence.vma + c.sd x9, 0(x8) // test the store access + nop + nop + nop + sfence.vma + LI (x2, va_rvtest_data_begin) + nop + sfence.vma + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + sfence.vma + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + sfence.vma + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + sfence.vma + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + + +# -------------------- Disable Pointer Masking (PMM) --------------------------- + + LI a3, 0x000000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_S_sv48_tag00.S b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_S_sv48_tag00.S new file mode 100644 index 000000000..a3b04e59b --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_S_sv48_tag00.S @@ -0,0 +1,274 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0x0000 with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exception will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IC_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_compressed_S_mode_SV48_tag00) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + c.lw x9, 0(x8) // test the load access + nop + nop + c.sw x9, 0(x8) // test the store access + nop + nop + nop + c.ld x9, 0(x8) // test the load access + nop + nop + nop + c.sd x9, 0(x8) // test the store access + nop + nop + nop + LI (x2, va_rvtest_data_begin) + nop + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw menvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + c.lw x9, 0(x8) // test the load access + nop + nop + c.sw x9, 0(x8) // test the store access + nop + nop + nop + c.ld x9, 0(x8) // test the load access + nop + nop + nop + c.sd x9, 0(x8) // test the store access + nop + nop + nop + LI (x2, va_rvtest_data_begin) + nop + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_S_sv48_tag01.S b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_S_sv48_tag01.S new file mode 100644 index 000000000..6369e83da --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_S_sv48_tag01.S @@ -0,0 +1,274 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0x0000 with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IC_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_compressed_S_mode_SV48_tag01) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + c.lw x9, 0(x8) // test the load access + nop + nop + c.sw x9, 0(x8) // test the store access + nop + nop + nop + c.ld x9, 0(x8) // test the load access + nop + nop + nop + c.sd x9, 0(x8) // test the store access + nop + nop + nop + LI (x2, va_rvtest_data_begin) + nop + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw menvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + c.lw x9, 0(x8) // test the load access + nop + nop + c.sw x9, 0(x8) // test the store access + nop + nop + nop + c.ld x9, 0(x8) // test the load access + nop + nop + nop + c.sd x9, 0(x8) // test the store access + nop + nop + nop + LI (x2, va_rvtest_data_begin) + nop + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_S_sv48_tag10.S b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_S_sv48_tag10.S new file mode 100644 index 000000000..98e9485d5 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_S_sv48_tag10.S @@ -0,0 +1,274 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xABAB with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IC_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_compressed_S_mode_SV48_tag02) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xABAB500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xABAB500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xABAB500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + c.lw x9, 0(x8) // test the load access + nop + nop + c.sw x9, 0(x8) // test the store access + nop + nop + nop + c.ld x9, 0(x8) // test the load access + nop + nop + nop + c.sd x9, 0(x8) // test the store access + nop + nop + nop + LI (x2, va_rvtest_data_begin) + nop + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw menvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + c.lw x9, 0(x8) // test the load access + nop + nop + c.sw x9, 0(x8) // test the store access + nop + nop + nop + c.ld x9, 0(x8) // test the load access + nop + nop + nop + c.sd x9, 0(x8) // test the store access + nop + nop + nop + LI (x2, va_rvtest_data_begin) + nop + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_S_sv48_tag11.S b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_S_sv48_tag11.S new file mode 100644 index 000000000..161281569 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_S_sv48_tag11.S @@ -0,0 +1,274 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xFFFF with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IC_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_compressed_S_mode_SV48_tag03) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xFFFF900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xFFFF900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xFFFF900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + c.lw x9, 0(x8) // test the load access + nop + nop + c.sw x9, 0(x8) // test the store access + nop + nop + nop + c.ld x9, 0(x8) // test the load access + nop + nop + nop + c.sd x9, 0(x8) // test the store access + nop + nop + nop + LI (x2, va_rvtest_data_begin) + nop + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw menvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + c.lw x9, 0(x8) // test the load access + nop + nop + c.sw x9, 0(x8) // test the store access + nop + nop + nop + c.ld x9, 0(x8) // test the load access + nop + nop + nop + c.sd x9, 0(x8) // test the store access + nop + nop + nop + LI (x2, va_rvtest_data_begin) + nop + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_U_sv48_tag00.S b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_U_sv48_tag00.S new file mode 100644 index 000000000..5a6cc1a3c --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_U_sv48_tag00.S @@ -0,0 +1,275 @@ +/* +Verification Goal: Set PMM = 01 in the senvcfg and tag bits are 0x0000 with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exception will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IC_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_compressed_U_mode_SV48_tag00) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + c.lw x9, 0(x8) // test the load access + nop + nop + c.sw x9, 0(x8) // test the store access + nop + nop + nop + c.ld x9, 0(x8) // test the load access + nop + nop + nop + c.sd x9, 0(x8) // test the store access + nop + nop + nop + LI (x2, va_rvtest_data_begin) + nop + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw senvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + c.lw x9, 0(x8) // test the load access + nop + nop + c.sw x9, 0(x8) // test the store access + nop + nop + nop + c.ld x9, 0(x8) // test the load access + nop + nop + nop + c.sd x9, 0(x8) // test the store access + nop + nop + nop + LI (x2, va_rvtest_data_begin) + nop + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_U_sv48_tag01.S b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_U_sv48_tag01.S new file mode 100644 index 000000000..a06eb64b4 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_U_sv48_tag01.S @@ -0,0 +1,274 @@ +/* +Verification Goal: Set PMM = 01 in the senvcfg and tag bits are 0x0000 with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IC_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_compressed_U_mode_SV48_tag01) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Umode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + c.lw x9, 0(x8) // test the load access + nop + nop + c.sw x9, 0(x8) // test the store access + nop + nop + nop + c.ld x9, 0(x8) // test the load access + nop + nop + nop + c.sd x9, 0(x8) // test the store access + nop + nop + nop + LI (x2, va_rvtest_data_begin) + nop + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw senvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + c.lw x9, 0(x8) // test the load access + nop + nop + c.sw x9, 0(x8) // test the store access + nop + nop + nop + c.ld x9, 0(x8) // test the load access + nop + nop + nop + c.sd x9, 0(x8) // test the store access + nop + nop + nop + LI (x2, va_rvtest_data_begin) + nop + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_U_sv48_tag10.S b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_U_sv48_tag10.S new file mode 100644 index 000000000..427208cd5 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_U_sv48_tag10.S @@ -0,0 +1,274 @@ +/* +Verification Goal: Set PMM = 01 in the senvcfg and tag bits are 0xABAB with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IC_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_compressed_U_mode_SV48_tag02) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xABAB500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xABAB500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xABAB500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + c.lw x9, 0(x8) // test the load access + nop + nop + c.sw x9, 0(x8) // test the store access + nop + nop + nop + c.ld x9, 0(x8) // test the load access + nop + nop + nop + c.sd x9, 0(x8) // test the store access + nop + nop + nop + LI (x2, va_rvtest_data_begin) + nop + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw senvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + c.lw x9, 0(x8) // test the load access + nop + nop + c.sw x9, 0(x8) // test the store access + nop + nop + nop + c.ld x9, 0(x8) // test the load access + nop + nop + nop + c.sd x9, 0(x8) // test the store access + nop + nop + nop + LI (x2, va_rvtest_data_begin) + nop + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_U_sv48_tag11.S b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_U_sv48_tag11.S new file mode 100644 index 000000000..ac5e1ebb0 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_compressed/PMM_compressed_01_U_sv48_tag11.S @@ -0,0 +1,274 @@ +/* +Verification Goal: Set PMM = 01 in the senvcfg and tag bits are 0xFFFF with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IC_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*C.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_compressed_U_mode_SV48_tag03) + +RVTEST_SIGBASE( x13,signature_x13_1) + + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xFFFF900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xFFFF900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xFFFF900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + c.lw x9, 0(x8) // test the load access + nop + nop + c.sw x9, 0(x8) // test the store access + nop + nop + nop + c.ld x9, 0(x8) // test the load access + nop + nop + nop + c.sd x9, 0(x8) // test the store access + nop + nop + nop + LI (x2, va_rvtest_data_begin) + nop + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw senvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + c.lw x9, 0(x8) // test the load access + nop + nop + c.sw x9, 0(x8) // test the store access + nop + nop + nop + c.ld x9, 0(x8) // test the load access + nop + nop + nop + c.sd x9, 0(x8) // test the store access + nop + nop + nop + LI (x2, va_rvtest_data_begin) + nop + c.lwsp x9, 0(x2) // test the load access + nop + nop + nop + c.swsp x9, 0(x2) // test the store access + nop + nop + nop + c.ldsp x9, 0(x2) // test the load access + nop + nop + nop + c.sdsp x9, 0(x2) // test the store access + nop + nop + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_M_SV48_tag00.S b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_M_SV48_tag00.S new file mode 100644 index 000000000..374ebf1dc --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_M_SV48_tag00.S @@ -0,0 +1,236 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xFFFF with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in M-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IFD_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_float_M_mode_SV48_tag00) + +RVTEST_SIGBASE( x13,signature_x13_1) +RVTEST_FP_ENABLE() + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0x008000000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + # li x8, va_rvtest_data_begin + ENABLE_VIRT_MMODE() + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + sfence.vma + nop + flw f1, 0(x8) // test the load access + nop + sfence.vma + fsw f1, 0(x8) // test the store access + nop + sfence.vma + fld f1, 0(x8) // test the load access + nop + sfence.vma + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrs mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + + ENABLE_VIRT_MMODE() + csrr t0, satp +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + sfence.vma + nop + flw f1, 0(x8) // test the load access + nop + sfence.vma + fsw f1, 0(x8) // test the store access + nop + sfence.vma + fld f1, 0(x8) // test the load access + nop + sfence.vma + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Pointer Masking (PMM) --------------------------- + + LI a3, 0x000000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_M_SV48_tag01.S b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_M_SV48_tag01.S new file mode 100644 index 000000000..f137b6de6 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_M_SV48_tag01.S @@ -0,0 +1,236 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0x0000 with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IFD_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_float_M_mode_SV48_tag01) + +RVTEST_SIGBASE( x13,signature_x13_1) +RVTEST_FP_ENABLE() + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0x008000000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + # li x8, va_rvtest_data_begin + ENABLE_VIRT_MMODE() + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + sfence.vma + nop + flw f1, 0(x8) // test the load access + nop + sfence.vma + fsw f1, 0(x8) // test the store access + nop + sfence.vma + fld f1, 0(x8) // test the load access + nop + sfence.vma + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrs mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + + ENABLE_VIRT_MMODE() + csrr t0, satp +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + sfence.vma + nop + flw f1, 0(x8) // test the load access + nop + sfence.vma + fsw f1, 0(x8) // test the store access + nop + sfence.vma + fld f1, 0(x8) // test the load access + nop + sfence.vma + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Pointer Masking (PMM) --------------------------- + + LI a3, 0x000000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_M_SV48_tag10.S b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_M_SV48_tag10.S new file mode 100644 index 000000000..67c4bde30 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_M_SV48_tag10.S @@ -0,0 +1,236 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xABAB with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IFD_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_float_M_mode_SV48_tag02) + +RVTEST_SIGBASE( x13,signature_x13_1) +RVTEST_FP_ENABLE() + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0x008000000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xABAB500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xABAB500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xABAB500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + # li x8, va_rvtest_data_begin + ENABLE_VIRT_MMODE() + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + sfence.vma + nop + flw f1, 0(x8) // test the load access + nop + sfence.vma + fsw f1, 0(x8) // test the store access + nop + sfence.vma + fld f1, 0(x8) // test the load access + nop + sfence.vma + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrs mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + + ENABLE_VIRT_MMODE() + csrr t0, satp +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + sfence.vma + nop + flw f1, 0(x8) // test the load access + nop + sfence.vma + fsw f1, 0(x8) // test the store access + nop + sfence.vma + fld f1, 0(x8) // test the load access + nop + sfence.vma + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Pointer Masking (PMM) --------------------------- + + LI a3, 0x000000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_M_SV48_tag11.S b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_M_SV48_tag11.S new file mode 100644 index 000000000..19ecce38f --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_M_SV48_tag11.S @@ -0,0 +1,236 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xFFFF with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IFD_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_float_M_mode_SV48_tag03) + +RVTEST_SIGBASE( x13,signature_x13_1) +RVTEST_FP_ENABLE() + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0x008000000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xFFFF900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xFFFF900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xFFFF900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X| PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + # li x8, va_rvtest_data_begin + ENABLE_VIRT_MMODE() + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + sfence.vma + nop + flw f1, 0(x8) // test the load access + nop + sfence.vma + fsw f1, 0(x8) // test the store access + nop + sfence.vma + fld f1, 0(x8) // test the load access + nop + sfence.vma + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrs mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + + ENABLE_VIRT_MMODE() + csrr t0, satp +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + sfence.vma + nop + flw f1, 0(x8) // test the load access + nop + sfence.vma + fsw f1, 0(x8) // test the store access + nop + sfence.vma + fld f1, 0(x8) // test the load access + nop + sfence.vma + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Pointer Masking (PMM) --------------------------- + + LI a3, 0x000000000 + csrw mseccfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_S_sv48_tag00.S b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_S_sv48_tag00.S new file mode 100644 index 000000000..c27677ddc --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_S_sv48_tag00.S @@ -0,0 +1,223 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0x0000 with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exception will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IFD_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_float_S_mode_SV48_tag00) + +RVTEST_SIGBASE( x13,signature_x13_1) +RVTEST_FP_ENABLE() + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + flw f1, 0(x8) // test the load access + nop + fsw f1, 0(x8) // test the store access + nop + fld f1, 0(x8) // test the load access + nop + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw menvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + flw f1, 0(x8) // test the load access + nop + fsw f1, 0(x8) // test the store access + nop + fld f1, 0(x8) // test the load access + nop + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_S_sv48_tag01.S b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_S_sv48_tag01.S new file mode 100644 index 000000000..3d02899c7 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_S_sv48_tag01.S @@ -0,0 +1,225 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0x0000 with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IFD_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_float_S_mode_SV48_tag01) + +RVTEST_SIGBASE( x13,signature_x13_1) +RVTEST_FP_ENABLE() + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + flw f1, 0(x8) // test the load access + nop + fsw f1, 0(x8) // test the store access + nop + fld f1, 0(x8) // test the load access + nop + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw menvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + flw f1, 0(x8) // test the load access + nop + fsw f1, 0(x8) // test the store access + nop + fld f1, 0(x8) // test the load access + nop + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_S_sv48_tag10.S b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_S_sv48_tag10.S new file mode 100644 index 000000000..38fe57f20 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_S_sv48_tag10.S @@ -0,0 +1,224 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xABAB with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IFD_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_float_S_mode_SV48_tag02) + +RVTEST_SIGBASE( x13,signature_x13_1) +RVTEST_FP_ENABLE() + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xABAB500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xABAB500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xABAB500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + flw f1, 0(x8) // test the load access + nop + fsw f1, 0(x8) // test the store access + nop + fld f1, 0(x8) // test the load access + nop + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw menvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + flw f1, 0(x8) // test the load access + nop + fsw f1, 0(x8) // test the store access + nop + fld f1, 0(x8) // test the load access + nop + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_S_sv48_tag11.S b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_S_sv48_tag11.S new file mode 100644 index 000000000..0efa7413f --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_S_sv48_tag11.S @@ -0,0 +1,224 @@ +/* +Verification Goal: Set PMM = 01 in the menvcfg and tag bits are 0xFFFF with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IFD_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_float_S_mode_SV48_tag03) + +RVTEST_SIGBASE( x13,signature_x13_1) +RVTEST_FP_ENABLE() + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xFFFF900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xFFFF900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xFFFF900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + flw f1, 0(x8) // test the load access + nop + fsw f1, 0(x8) // test the store access + nop + fld f1, 0(x8) // test the load access + nop + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw menvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Smode // Go back to Supervisor mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + flw f1, 0(x8) // test the load access + nop + fsw f1, 0(x8) // test the store access + nop + fld f1, 0(x8) // test the load access + nop + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_U_sv48_tag00.S b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_U_sv48_tag00.S new file mode 100644 index 000000000..ec8131bdb --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_U_sv48_tag00.S @@ -0,0 +1,224 @@ +/* +Verification Goal: Set PMM = 01 in the senvcfg and tag bits are 0x0000 with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exception will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IFD_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_float_U_mode_SV48_tag00) + +RVTEST_SIGBASE( x13,signature_x13_1) +RVTEST_FP_ENABLE() + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + flw f1, 0(x8) // test the load access + nop + fsw f1, 0(x8) // test the store access + nop + fld f1, 0(x8) // test the load access + nop + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw senvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + flw f1, 0(x8) // test the load access + nop + fsw f1, 0(x8) // test the store access + nop + fld f1, 0(x8) // test the load access + nop + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_U_sv48_tag01.S b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_U_sv48_tag01.S new file mode 100644 index 000000000..bb2c0460e --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_U_sv48_tag01.S @@ -0,0 +1,224 @@ +/* +Verification Goal: Set PMM = 01 in the senvcfg and tag bits are 0x0000 with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IFD_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_float_U_mode_SV48_tag01) + +RVTEST_SIGBASE( x13,signature_x13_1) +RVTEST_FP_ENABLE() + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0x0000900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0x0000900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0x0000900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Umode // Go back to Supervisor mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + flw f1, 0(x8) // test the load access + nop + fsw f1, 0(x8) // test the store access + nop + fld f1, 0(x8) // test the load access + nop + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw senvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + flw f1, 0(x8) // test the load access + nop + fsw f1, 0(x8) // test the store access + nop + fld f1, 0(x8) // test the load access + nop + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_U_sv48_tag10.S b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_U_sv48_tag10.S new file mode 100644 index 000000000..9739ba629 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_U_sv48_tag10.S @@ -0,0 +1,224 @@ +/* +Verification Goal: Set PMM = 01 in the senvcfg and tag bits are 0xABAB with bit[47]=0, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be invalid, exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IFD_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_float_U_mode_SV48_tag02) + +RVTEST_SIGBASE( x13,signature_x13_1) +RVTEST_FP_ENABLE() + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xABAB500000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xABAB500000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xABAB500000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + flw f1, 0(x8) // test the load access + nop + fsw f1, 0(x8) // test the store access + nop + fld f1, 0(x8) // test the load access + nop + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw senvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + flw f1, 0(x8) // test the load access + nop + fsw f1, 0(x8) // test the store access + nop + fld f1, 0(x8) // test the load access + nop + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file diff --git a/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_U_sv48_tag11.S b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_U_sv48_tag11.S new file mode 100644 index 000000000..22efe1c48 --- /dev/null +++ b/riscv-test-suite/rv64i_m/pmm/pmm_float/PMM_float_01_U_sv48_tag11.S @@ -0,0 +1,224 @@ +/* +Verification Goal: Set PMM = 01 in the senvcfg and tag bits are 0xFFFF with bit[47]=1, +test whether or not pointer masking with PMLEN = 16 is enabled or not in S-Mode in sv48 +Description: +If Pointer Masking is enabled, then the Effective Address will be masked accordingly, no exception will be generated, +If Pointer Masking is disabled, then the Effective Address will be valid, no exceptions will be created +due to the invalid Virtual Address, +*/ + +#define PMM_EXT_ENAB +#include "model_test.h" +#include "arch_test.h" + +RVTEST_ISA("RV64IFD_Zicsr") + +# Test code region +.section .text.init +.globl rvtest_entry_point +rvtest_entry_point: +RVMODEL_BOOT +RVTEST_CODE_BEGIN +#ifdef TEST_CASE_1 + + RVTEST_CASE(1,"//check ISA:=regex(.*64.*); check ISA:=regex(.*I.*F.*D.*Zicsr.*); def rvtest_mtrap_routine=True; def rvtest_strap_routine=True; def TEST_CASE_1=True; mac PMM_MACROS",pm_float_U_mode_SV48_tag03) + +RVTEST_SIGBASE( x13,signature_x13_1) +RVTEST_FP_ENABLE() + +main: +# ------------------------ Set the PMP for all memory regions ----------------- + ALL_MEM_PMP +# ------------------------------------------------------------------------------ + + csrw satp, zero // Set satp to zero (bare mode) + +# --------------------------- Define Addresses ------------------------------- + .set pa_rvtest_code_begin, 0x8000000000039c // 56-bit physical address of the code section + .set pa_rvtest_data_begin, 0x80000000003530 // 56-bit physical address of the data section + .set pa_rvtest_sig_begin, 0x80000000006218 // 56-bit physical address of the signature section + .set va_rvtest_code_begin, 0xFFFF80000000039c // 48-bit virtual address of the code section + .set va_rvtest_data_begin, 0xFFFF900000000000 // 48-bit virtual address of the data section + .set va_rvtest_sig_begin, 0xFFFF900000006218 // 48-bit virtual address of the signature section + .set va_rvtest_vmem_begin, 0xFFFF900000000000 // 48-bit virtual address of vmem + +# ------------------------------------------------------------------------------ + +# ------------------------ Save Area Logic ------------------------------------ + + /* Save virtual addresses of code and data in the S-mode save area */ + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + addi t3, t0, sv_area_sz // Adjust for save area size + csrr sp, mscratch // Read from mscratch + add t1, sp, t3 // Add save area offset to stack pointer + csrw sscratch, t1 // Write new value to sscratch + + LI (t0, va_rvtest_code_begin) // Load virtual address of code + LA (t1, rvtest_code_begin) // Load physical address of code + sub t0, t0, t1 // Calculate (VA - PA) + csrr sp, mscratch // Read from mscratch + LREG t1, code_bgn_off+0*sv_area_sz(sp) // Load base address of code save area + add t2, t1, t0 // Add offset for code + SREG t2, code_bgn_off+1*sv_area_sz(sp) // Save adjusted code address + + LI (t0, va_rvtest_data_begin) // Load virtual address of data + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, data_bgn_off+0*sv_area_sz(sp) // Load base address of data save area + add t2, t1, t0 // Add offset for data + SREG t2, data_bgn_off+1*sv_area_sz(sp) // Save adjusted data address + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, sig_bgn_off+0*sv_area_sz(sp) // Load base address of signature save area + add t2, t1, t0 // Add offset for signature + SREG t2, sig_bgn_off+1*sv_area_sz(sp) // Save adjusted signature address + + // vmem + LI (t0, va_rvtest_vmem_begin) // Load virtual address of vmem + LA (t1, rvtest_data_begin) // Load physical address of data + sub t0, t0, t1 // Calculate (VA - PA) + LREG t1, vmem_bgn_off+0*sv_area_sz(sp) // Load base address of vmem save area + add t2, t1, t0 // Add offset for vmem + SREG t2, vmem_bgn_off+1*sv_area_sz(sp) // Save adjusted vmem address + +# --------------------- Set the Level 3 PTE for SV48 ------------------------- + + // Set level 3 PTE for the code area (PA: 0x200000000000 -> VA: 0x80000000000000) + LI (a0, pa_rvtest_code_begin) // Load physical address of code + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_code_begin, LEVEL3, sv48) // Set up level 3 PTE + + // Set level 3 PTE for the data area + LI (a0, pa_rvtest_code_begin) + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_data_begin, LEVEL3, sv48) + +# --------------------- Set the Level 3 PTE for Signature --------------------- + // Set level 3 PTE for the signature area + LI (a0, pa_rvtest_code_begin) // Load physical address of signature area + LI (a1, (PTE_V | PTE_A | PTE_W | PTE_R | PTE_D | PTE_X | PTE_U)) // Set permission bits + PTE_SETUP_RV64(a0, a1, t0, t1, va_rvtest_sig_begin, LEVEL3, sv48) // PTE setup + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LI (t1, pa_rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + add x13, x13, t0 // Adjust signature register + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB +satp_setup: + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + +# -------------------- Enter and Exit M Mode ---------------------------------- + + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM (Pointer Masking) without Masking -------------- + +vm_en: + LI (x8, va_rvtest_data_begin) + nop + flw f1, 0(x8) // test the load access + nop + fsw f1, 0(x8) // test the store access + nop + fld f1, 0(x8) // test the load access + nop + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Enable Pointer Masking (PMM) --------------------------- + + LI a3, 0x300000000 + csrw senvcfg, a3 // Enable pointer masking with PMLEN = 16 + +# -------------------- Set SATP for Virtualization ---------------------------- + + sfence.vma // Flush the TLB + SATP_SETUP_RV64(sv48) // Set SATP for SV48 mode + RVTEST_GOTO_LOWER_MODE Umode // Go back to User mode + +# -------------------- Test PMM with Masking Enabled -------------------------- + +vm_en_with_pointer_masking: + LI (x8, va_rvtest_data_begin) + nop + flw f1, 0(x8) // test the load access + nop + fsw f1, 0(x8) // test the store access + nop + fld f1, 0(x8) // test the load access + nop + fsd f1, 0(x8) // test the store access + nop + +# -------------------- Disable Virtualization --------------------------------- + + RVTEST_GOTO_MMODE // Switch back to Machine mode + +# -------------------- Signature Update --------------------------------------- + + LI (t0, va_rvtest_sig_begin) // Load virtual address of signature + LA (t1, rvtest_sig_begin) // Load physical address of signature + sub t0, t0, t1 // Calculate (VA - PA) + sub x13, x13, t0 // Update signature register + addi x13, x13, REGWIDTH // Adjust for register width + nop + + LI (a4, 0x123) + RVTEST_SIGUPD(x13, a4) // Verify signature after virtualization disabled + +#endif + +# ----------------------------------------------------------------------------- + +RVTEST_CODE_END +RVMODEL_HALT + +RVTEST_DATA_BEGIN +.align 12 +rvtest_data: + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef + .dword 0xdeadbeefdeadbeef +#ifdef rvtest_strap_routine +.align 12 +rvtest_slvl1_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl2_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl3_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +rvtest_slvl4_pg_tbl: + RVTEST_PTE_IDENT_MAP(0,LVLS,RVTEST_ALLPERMS) +#endif +RVTEST_DATA_END +RVMODEL_DATA_BEGIN +rvtest_sig_begin: +sig_begin_canary: +CANARY; +// test signatures initialization +signature_x13_1: + .fill 256*(XLEN/64),4,0xcafebeef + +// trap signatures initialization +#ifdef rvtest_mtrap_routine +mtrap_sigptr: + .fill 256*(XLEN/64),4,0xdeadbeef +#endif + +sig_end_canary: +CANARY; +rvtest_sig_end: +RVMODEL_DATA_END \ No newline at end of file