1e10cd4b0SChristoph Hellwig // SPDX-License-Identifier: GPL-2.0
2e10cd4b0SChristoph Hellwig
3e10cd4b0SChristoph Hellwig #include <linux/export.h>
4e10cd4b0SChristoph Hellwig #include <linux/mm.h>
5e10cd4b0SChristoph Hellwig #include <asm/pgtable.h>
6*4867fbbdSAnshuman Khandual #include <asm/mem_encrypt.h>
7*4867fbbdSAnshuman Khandual
8*4867fbbdSAnshuman Khandual static pgprot_t protection_map[16] __ro_after_init = {
9*4867fbbdSAnshuman Khandual [VM_NONE] = PAGE_NONE,
10*4867fbbdSAnshuman Khandual [VM_READ] = PAGE_READONLY,
11*4867fbbdSAnshuman Khandual [VM_WRITE] = PAGE_COPY,
12*4867fbbdSAnshuman Khandual [VM_WRITE | VM_READ] = PAGE_COPY,
13*4867fbbdSAnshuman Khandual [VM_EXEC] = PAGE_READONLY_EXEC,
14*4867fbbdSAnshuman Khandual [VM_EXEC | VM_READ] = PAGE_READONLY_EXEC,
15*4867fbbdSAnshuman Khandual [VM_EXEC | VM_WRITE] = PAGE_COPY_EXEC,
16*4867fbbdSAnshuman Khandual [VM_EXEC | VM_WRITE | VM_READ] = PAGE_COPY_EXEC,
17*4867fbbdSAnshuman Khandual [VM_SHARED] = PAGE_NONE,
18*4867fbbdSAnshuman Khandual [VM_SHARED | VM_READ] = PAGE_READONLY,
19*4867fbbdSAnshuman Khandual [VM_SHARED | VM_WRITE] = PAGE_SHARED,
20*4867fbbdSAnshuman Khandual [VM_SHARED | VM_WRITE | VM_READ] = PAGE_SHARED,
21*4867fbbdSAnshuman Khandual [VM_SHARED | VM_EXEC] = PAGE_READONLY_EXEC,
22*4867fbbdSAnshuman Khandual [VM_SHARED | VM_EXEC | VM_READ] = PAGE_READONLY_EXEC,
23*4867fbbdSAnshuman Khandual [VM_SHARED | VM_EXEC | VM_WRITE] = PAGE_SHARED_EXEC,
24*4867fbbdSAnshuman Khandual [VM_SHARED | VM_EXEC | VM_WRITE | VM_READ] = PAGE_SHARED_EXEC
25*4867fbbdSAnshuman Khandual };
26*4867fbbdSAnshuman Khandual
add_encrypt_protection_map(void)27*4867fbbdSAnshuman Khandual void add_encrypt_protection_map(void)
28*4867fbbdSAnshuman Khandual {
29*4867fbbdSAnshuman Khandual unsigned int i;
30*4867fbbdSAnshuman Khandual
31*4867fbbdSAnshuman Khandual for (i = 0; i < ARRAY_SIZE(protection_map); i++)
32*4867fbbdSAnshuman Khandual protection_map[i] = pgprot_encrypted(protection_map[i]);
33*4867fbbdSAnshuman Khandual }
34e10cd4b0SChristoph Hellwig
vm_get_page_prot(unsigned long vm_flags)35e10cd4b0SChristoph Hellwig pgprot_t vm_get_page_prot(unsigned long vm_flags)
36e10cd4b0SChristoph Hellwig {
37e10cd4b0SChristoph Hellwig unsigned long val = pgprot_val(protection_map[vm_flags &
38e10cd4b0SChristoph Hellwig (VM_READ|VM_WRITE|VM_EXEC|VM_SHARED)]);
39e10cd4b0SChristoph Hellwig
40e10cd4b0SChristoph Hellwig #ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS
41e10cd4b0SChristoph Hellwig /*
42e10cd4b0SChristoph Hellwig * Take the 4 protection key bits out of the vma->vm_flags value and
43e10cd4b0SChristoph Hellwig * turn them in to the bits that we can put in to a pte.
44e10cd4b0SChristoph Hellwig *
45e10cd4b0SChristoph Hellwig * Only override these if Protection Keys are available (which is only
46e10cd4b0SChristoph Hellwig * on 64-bit).
47e10cd4b0SChristoph Hellwig */
48e10cd4b0SChristoph Hellwig if (vm_flags & VM_PKEY_BIT0)
49e10cd4b0SChristoph Hellwig val |= _PAGE_PKEY_BIT0;
50e10cd4b0SChristoph Hellwig if (vm_flags & VM_PKEY_BIT1)
51e10cd4b0SChristoph Hellwig val |= _PAGE_PKEY_BIT1;
52e10cd4b0SChristoph Hellwig if (vm_flags & VM_PKEY_BIT2)
53e10cd4b0SChristoph Hellwig val |= _PAGE_PKEY_BIT2;
54e10cd4b0SChristoph Hellwig if (vm_flags & VM_PKEY_BIT3)
55e10cd4b0SChristoph Hellwig val |= _PAGE_PKEY_BIT3;
56e10cd4b0SChristoph Hellwig #endif
57e10cd4b0SChristoph Hellwig
58e10cd4b0SChristoph Hellwig val = __sme_set(val);
59e10cd4b0SChristoph Hellwig if (val & _PAGE_PRESENT)
60e10cd4b0SChristoph Hellwig val &= __supported_pte_mask;
61e10cd4b0SChristoph Hellwig return __pgprot(val);
62e10cd4b0SChristoph Hellwig }
63e10cd4b0SChristoph Hellwig EXPORT_SYMBOL(vm_get_page_prot);
64