12053bc57SKirill A. Shutemov // SPDX-License-Identifier: GPL-2.0-only
22053bc57SKirill A. Shutemov 
32053bc57SKirill A. Shutemov #include <linux/efi.h>
42053bc57SKirill A. Shutemov #include <linux/memblock.h>
52053bc57SKirill A. Shutemov #include <linux/spinlock.h>
62053bc57SKirill A. Shutemov #include <asm/unaccepted_memory.h>
72053bc57SKirill A. Shutemov 
82053bc57SKirill A. Shutemov /* Protects unaccepted memory bitmap */
92053bc57SKirill A. Shutemov static DEFINE_SPINLOCK(unaccepted_memory_lock);
102053bc57SKirill A. Shutemov 
112053bc57SKirill A. Shutemov /*
122053bc57SKirill A. Shutemov  * accept_memory() -- Consult bitmap and accept the memory if needed.
132053bc57SKirill A. Shutemov  *
142053bc57SKirill A. Shutemov  * Only memory that is explicitly marked as unaccepted in the bitmap requires
152053bc57SKirill A. Shutemov  * an action. All the remaining memory is implicitly accepted and doesn't need
162053bc57SKirill A. Shutemov  * acceptance.
172053bc57SKirill A. Shutemov  *
182053bc57SKirill A. Shutemov  * No need to accept:
192053bc57SKirill A. Shutemov  *  - anything if the system has no unaccepted table;
202053bc57SKirill A. Shutemov  *  - memory that is below phys_base;
212053bc57SKirill A. Shutemov  *  - memory that is above the memory that addressable by the bitmap;
222053bc57SKirill A. Shutemov  */
232053bc57SKirill A. Shutemov void accept_memory(phys_addr_t start, phys_addr_t end)
242053bc57SKirill A. Shutemov {
252053bc57SKirill A. Shutemov 	struct efi_unaccepted_memory *unaccepted;
262053bc57SKirill A. Shutemov 	unsigned long range_start, range_end;
272053bc57SKirill A. Shutemov 	unsigned long flags;
282053bc57SKirill A. Shutemov 	u64 unit_size;
292053bc57SKirill A. Shutemov 
302053bc57SKirill A. Shutemov 	unaccepted = efi_get_unaccepted_table();
312053bc57SKirill A. Shutemov 	if (!unaccepted)
322053bc57SKirill A. Shutemov 		return;
332053bc57SKirill A. Shutemov 
342053bc57SKirill A. Shutemov 	unit_size = unaccepted->unit_size;
352053bc57SKirill A. Shutemov 
362053bc57SKirill A. Shutemov 	/*
372053bc57SKirill A. Shutemov 	 * Only care for the part of the range that is represented
382053bc57SKirill A. Shutemov 	 * in the bitmap.
392053bc57SKirill A. Shutemov 	 */
402053bc57SKirill A. Shutemov 	if (start < unaccepted->phys_base)
412053bc57SKirill A. Shutemov 		start = unaccepted->phys_base;
422053bc57SKirill A. Shutemov 	if (end < unaccepted->phys_base)
432053bc57SKirill A. Shutemov 		return;
442053bc57SKirill A. Shutemov 
452053bc57SKirill A. Shutemov 	/* Translate to offsets from the beginning of the bitmap */
462053bc57SKirill A. Shutemov 	start -= unaccepted->phys_base;
472053bc57SKirill A. Shutemov 	end -= unaccepted->phys_base;
482053bc57SKirill A. Shutemov 
49*c211c19eSKirill A. Shutemov 	/*
50*c211c19eSKirill A. Shutemov 	 * load_unaligned_zeropad() can lead to unwanted loads across page
51*c211c19eSKirill A. Shutemov 	 * boundaries. The unwanted loads are typically harmless. But, they
52*c211c19eSKirill A. Shutemov 	 * might be made to totally unrelated or even unmapped memory.
53*c211c19eSKirill A. Shutemov 	 * load_unaligned_zeropad() relies on exception fixup (#PF, #GP and now
54*c211c19eSKirill A. Shutemov 	 * #VE) to recover from these unwanted loads.
55*c211c19eSKirill A. Shutemov 	 *
56*c211c19eSKirill A. Shutemov 	 * But, this approach does not work for unaccepted memory. For TDX, a
57*c211c19eSKirill A. Shutemov 	 * load from unaccepted memory will not lead to a recoverable exception
58*c211c19eSKirill A. Shutemov 	 * within the guest. The guest will exit to the VMM where the only
59*c211c19eSKirill A. Shutemov 	 * recourse is to terminate the guest.
60*c211c19eSKirill A. Shutemov 	 *
61*c211c19eSKirill A. Shutemov 	 * There are two parts to fix this issue and comprehensively avoid
62*c211c19eSKirill A. Shutemov 	 * access to unaccepted memory. Together these ensure that an extra
63*c211c19eSKirill A. Shutemov 	 * "guard" page is accepted in addition to the memory that needs to be
64*c211c19eSKirill A. Shutemov 	 * used:
65*c211c19eSKirill A. Shutemov 	 *
66*c211c19eSKirill A. Shutemov 	 * 1. Implicitly extend the range_contains_unaccepted_memory(start, end)
67*c211c19eSKirill A. Shutemov 	 *    checks up to end+unit_size if 'end' is aligned on a unit_size
68*c211c19eSKirill A. Shutemov 	 *    boundary.
69*c211c19eSKirill A. Shutemov 	 *
70*c211c19eSKirill A. Shutemov 	 * 2. Implicitly extend accept_memory(start, end) to end+unit_size if
71*c211c19eSKirill A. Shutemov 	 *    'end' is aligned on a unit_size boundary. (immediately following
72*c211c19eSKirill A. Shutemov 	 *    this comment)
73*c211c19eSKirill A. Shutemov 	 */
74*c211c19eSKirill A. Shutemov 	if (!(end % unit_size))
75*c211c19eSKirill A. Shutemov 		end += unit_size;
76*c211c19eSKirill A. Shutemov 
772053bc57SKirill A. Shutemov 	/* Make sure not to overrun the bitmap */
782053bc57SKirill A. Shutemov 	if (end > unaccepted->size * unit_size * BITS_PER_BYTE)
792053bc57SKirill A. Shutemov 		end = unaccepted->size * unit_size * BITS_PER_BYTE;
802053bc57SKirill A. Shutemov 
812053bc57SKirill A. Shutemov 	range_start = start / unit_size;
822053bc57SKirill A. Shutemov 
832053bc57SKirill A. Shutemov 	spin_lock_irqsave(&unaccepted_memory_lock, flags);
842053bc57SKirill A. Shutemov 	for_each_set_bitrange_from(range_start, range_end, unaccepted->bitmap,
852053bc57SKirill A. Shutemov 				   DIV_ROUND_UP(end, unit_size)) {
862053bc57SKirill A. Shutemov 		unsigned long phys_start, phys_end;
872053bc57SKirill A. Shutemov 		unsigned long len = range_end - range_start;
882053bc57SKirill A. Shutemov 
892053bc57SKirill A. Shutemov 		phys_start = range_start * unit_size + unaccepted->phys_base;
902053bc57SKirill A. Shutemov 		phys_end = range_end * unit_size + unaccepted->phys_base;
912053bc57SKirill A. Shutemov 
922053bc57SKirill A. Shutemov 		arch_accept_memory(phys_start, phys_end);
932053bc57SKirill A. Shutemov 		bitmap_clear(unaccepted->bitmap, range_start, len);
942053bc57SKirill A. Shutemov 	}
952053bc57SKirill A. Shutemov 	spin_unlock_irqrestore(&unaccepted_memory_lock, flags);
962053bc57SKirill A. Shutemov }
972053bc57SKirill A. Shutemov 
982053bc57SKirill A. Shutemov bool range_contains_unaccepted_memory(phys_addr_t start, phys_addr_t end)
992053bc57SKirill A. Shutemov {
1002053bc57SKirill A. Shutemov 	struct efi_unaccepted_memory *unaccepted;
1012053bc57SKirill A. Shutemov 	unsigned long flags;
1022053bc57SKirill A. Shutemov 	bool ret = false;
1032053bc57SKirill A. Shutemov 	u64 unit_size;
1042053bc57SKirill A. Shutemov 
1052053bc57SKirill A. Shutemov 	unaccepted = efi_get_unaccepted_table();
1062053bc57SKirill A. Shutemov 	if (!unaccepted)
1072053bc57SKirill A. Shutemov 		return false;
1082053bc57SKirill A. Shutemov 
1092053bc57SKirill A. Shutemov 	unit_size = unaccepted->unit_size;
1102053bc57SKirill A. Shutemov 
1112053bc57SKirill A. Shutemov 	/*
1122053bc57SKirill A. Shutemov 	 * Only care for the part of the range that is represented
1132053bc57SKirill A. Shutemov 	 * in the bitmap.
1142053bc57SKirill A. Shutemov 	 */
1152053bc57SKirill A. Shutemov 	if (start < unaccepted->phys_base)
1162053bc57SKirill A. Shutemov 		start = unaccepted->phys_base;
1172053bc57SKirill A. Shutemov 	if (end < unaccepted->phys_base)
1182053bc57SKirill A. Shutemov 		return false;
1192053bc57SKirill A. Shutemov 
1202053bc57SKirill A. Shutemov 	/* Translate to offsets from the beginning of the bitmap */
1212053bc57SKirill A. Shutemov 	start -= unaccepted->phys_base;
1222053bc57SKirill A. Shutemov 	end -= unaccepted->phys_base;
1232053bc57SKirill A. Shutemov 
124*c211c19eSKirill A. Shutemov 	/*
125*c211c19eSKirill A. Shutemov 	 * Also consider the unaccepted state of the *next* page. See fix #1 in
126*c211c19eSKirill A. Shutemov 	 * the comment on load_unaligned_zeropad() in accept_memory().
127*c211c19eSKirill A. Shutemov 	 */
128*c211c19eSKirill A. Shutemov 	if (!(end % unit_size))
129*c211c19eSKirill A. Shutemov 		end += unit_size;
130*c211c19eSKirill A. Shutemov 
1312053bc57SKirill A. Shutemov 	/* Make sure not to overrun the bitmap */
1322053bc57SKirill A. Shutemov 	if (end > unaccepted->size * unit_size * BITS_PER_BYTE)
1332053bc57SKirill A. Shutemov 		end = unaccepted->size * unit_size * BITS_PER_BYTE;
1342053bc57SKirill A. Shutemov 
1352053bc57SKirill A. Shutemov 	spin_lock_irqsave(&unaccepted_memory_lock, flags);
1362053bc57SKirill A. Shutemov 	while (start < end) {
1372053bc57SKirill A. Shutemov 		if (test_bit(start / unit_size, unaccepted->bitmap)) {
1382053bc57SKirill A. Shutemov 			ret = true;
1392053bc57SKirill A. Shutemov 			break;
1402053bc57SKirill A. Shutemov 		}
1412053bc57SKirill A. Shutemov 
1422053bc57SKirill A. Shutemov 		start += unit_size;
1432053bc57SKirill A. Shutemov 	}
1442053bc57SKirill A. Shutemov 	spin_unlock_irqrestore(&unaccepted_memory_lock, flags);
1452053bc57SKirill A. Shutemov 
1462053bc57SKirill A. Shutemov 	return ret;
1472053bc57SKirill A. Shutemov }
148