1/* SPDX-License-Identifier: GPL-2.0-only */ 2/* 3 * AMD Memory Encryption Support 4 * 5 * Copyright (C) 2017 Advanced Micro Devices, Inc. 6 * 7 * Author: Tom Lendacky <thomas.lendacky@amd.com> 8 */ 9 10#include <linux/linkage.h> 11 12#include <asm/processor-flags.h> 13#include <asm/msr.h> 14#include <asm/asm-offsets.h> 15 16 .text 17 .code32 18SYM_FUNC_START(get_sev_encryption_bit) 19 xor %eax, %eax 20 21#ifdef CONFIG_AMD_MEM_ENCRYPT 22 push %ebx 23 push %ecx 24 push %edx 25 26 /* Check if running under a hypervisor */ 27 movl $1, %eax 28 cpuid 29 bt $31, %ecx /* Check the hypervisor bit */ 30 jnc .Lno_sev 31 32 movl $0x80000000, %eax /* CPUID to check the highest leaf */ 33 cpuid 34 cmpl $0x8000001f, %eax /* See if 0x8000001f is available */ 35 jb .Lno_sev 36 37 /* 38 * Check for the SEV feature: 39 * CPUID Fn8000_001F[EAX] - Bit 1 40 * CPUID Fn8000_001F[EBX] - Bits 5:0 41 * Pagetable bit position used to indicate encryption 42 */ 43 movl $0x8000001f, %eax 44 cpuid 45 bt $1, %eax /* Check if SEV is available */ 46 jnc .Lno_sev 47 48 movl $MSR_AMD64_SEV, %ecx /* Read the SEV MSR */ 49 rdmsr 50 bt $MSR_AMD64_SEV_ENABLED_BIT, %eax /* Check if SEV is active */ 51 jnc .Lno_sev 52 53 movl %ebx, %eax 54 andl $0x3f, %eax /* Return the encryption bit location */ 55 jmp .Lsev_exit 56 57.Lno_sev: 58 xor %eax, %eax 59 60.Lsev_exit: 61 pop %edx 62 pop %ecx 63 pop %ebx 64 65#endif /* CONFIG_AMD_MEM_ENCRYPT */ 66 67 ret 68SYM_FUNC_END(get_sev_encryption_bit) 69 70 .code64 71 72#include "../../kernel/sev_verify_cbit.S" 73 74SYM_FUNC_START(set_sev_encryption_mask) 75#ifdef CONFIG_AMD_MEM_ENCRYPT 76 push %rbp 77 push %rdx 78 79 movq %rsp, %rbp /* Save current stack pointer */ 80 81 call get_sev_encryption_bit /* Get the encryption bit position */ 82 testl %eax, %eax 83 jz .Lno_sev_mask 84 85 bts %rax, sme_me_mask(%rip) /* Create the encryption mask */ 86 87 /* 88 * Read MSR_AMD64_SEV again and store it to sev_status. Can't do this in 89 * get_sev_encryption_bit() because this function is 32-bit code and 90 * shared between 64-bit and 32-bit boot path. 91 */ 92 movl $MSR_AMD64_SEV, %ecx /* Read the SEV MSR */ 93 rdmsr 94 95 /* Store MSR value in sev_status */ 96 shlq $32, %rdx 97 orq %rdx, %rax 98 movq %rax, sev_status(%rip) 99 100.Lno_sev_mask: 101 movq %rbp, %rsp /* Restore original stack pointer */ 102 103 pop %rdx 104 pop %rbp 105#endif 106 107 xor %rax, %rax 108 ret 109SYM_FUNC_END(set_sev_encryption_mask) 110 111 .data 112 113#ifdef CONFIG_AMD_MEM_ENCRYPT 114 .balign 8 115SYM_DATA(sme_me_mask, .quad 0) 116SYM_DATA(sev_status, .quad 0) 117SYM_DATA(sev_check_data, .quad 0) 118#endif 119