Lines Matching refs:p

134 	u32 *p;  in kvm_alloc()  local
143 p = (void*)&kvm_tmp[kvm_tmp_index]; in kvm_alloc()
146 return p; in kvm_alloc()
157 u32 *p; in kvm_patch_ins_mtmsrd() local
162 p = kvm_alloc(kvm_emulate_mtmsrd_len * 4); in kvm_patch_ins_mtmsrd()
163 if (!p) in kvm_patch_ins_mtmsrd()
167 distance_start = (ulong)p - (ulong)inst; in kvm_patch_ins_mtmsrd()
169 distance_end = next_inst - (ulong)&p[kvm_emulate_mtmsrd_branch_offs]; in kvm_patch_ins_mtmsrd()
178 memcpy(p, kvm_emulate_mtmsrd, kvm_emulate_mtmsrd_len * 4); in kvm_patch_ins_mtmsrd()
179 p[kvm_emulate_mtmsrd_branch_offs] |= distance_end & KVM_INST_B_MASK; in kvm_patch_ins_mtmsrd()
182 kvm_patch_ins_ll(&p[kvm_emulate_mtmsrd_reg_offs], in kvm_patch_ins_mtmsrd()
186 kvm_patch_ins_ll(&p[kvm_emulate_mtmsrd_reg_offs], in kvm_patch_ins_mtmsrd()
190 p[kvm_emulate_mtmsrd_reg_offs] |= rt; in kvm_patch_ins_mtmsrd()
194 p[kvm_emulate_mtmsrd_orig_ins_offs] = *inst; in kvm_patch_ins_mtmsrd()
195 flush_icache_range((ulong)p, (ulong)p + kvm_emulate_mtmsrd_len * 4); in kvm_patch_ins_mtmsrd()
210 u32 *p; in kvm_patch_ins_mtmsr() local
215 p = kvm_alloc(kvm_emulate_mtmsr_len * 4); in kvm_patch_ins_mtmsr()
216 if (!p) in kvm_patch_ins_mtmsr()
220 distance_start = (ulong)p - (ulong)inst; in kvm_patch_ins_mtmsr()
222 distance_end = next_inst - (ulong)&p[kvm_emulate_mtmsr_branch_offs]; in kvm_patch_ins_mtmsr()
231 memcpy(p, kvm_emulate_mtmsr, kvm_emulate_mtmsr_len * 4); in kvm_patch_ins_mtmsr()
232 p[kvm_emulate_mtmsr_branch_offs] |= distance_end & KVM_INST_B_MASK; in kvm_patch_ins_mtmsr()
237 kvm_patch_ins_ll(&p[kvm_emulate_mtmsr_reg1_offs], in kvm_patch_ins_mtmsr()
239 kvm_patch_ins_ll(&p[kvm_emulate_mtmsr_reg2_offs], in kvm_patch_ins_mtmsr()
243 kvm_patch_ins_ll(&p[kvm_emulate_mtmsr_reg1_offs], in kvm_patch_ins_mtmsr()
245 kvm_patch_ins_ll(&p[kvm_emulate_mtmsr_reg2_offs], in kvm_patch_ins_mtmsr()
249 p[kvm_emulate_mtmsr_reg1_offs] |= rt; in kvm_patch_ins_mtmsr()
250 p[kvm_emulate_mtmsr_reg2_offs] |= rt; in kvm_patch_ins_mtmsr()
254 p[kvm_emulate_mtmsr_orig_ins_offs] = *inst; in kvm_patch_ins_mtmsr()
255 flush_icache_range((ulong)p, (ulong)p + kvm_emulate_mtmsr_len * 4); in kvm_patch_ins_mtmsr()
271 u32 *p; in kvm_patch_ins_wrtee() local
276 p = kvm_alloc(kvm_emulate_wrtee_len * 4); in kvm_patch_ins_wrtee()
277 if (!p) in kvm_patch_ins_wrtee()
281 distance_start = (ulong)p - (ulong)inst; in kvm_patch_ins_wrtee()
283 distance_end = next_inst - (ulong)&p[kvm_emulate_wrtee_branch_offs]; in kvm_patch_ins_wrtee()
292 memcpy(p, kvm_emulate_wrtee, kvm_emulate_wrtee_len * 4); in kvm_patch_ins_wrtee()
293 p[kvm_emulate_wrtee_branch_offs] |= distance_end & KVM_INST_B_MASK; in kvm_patch_ins_wrtee()
296 p[kvm_emulate_wrtee_reg_offs] = in kvm_patch_ins_wrtee()
302 kvm_patch_ins_ll(&p[kvm_emulate_wrtee_reg_offs], in kvm_patch_ins_wrtee()
306 kvm_patch_ins_ll(&p[kvm_emulate_wrtee_reg_offs], in kvm_patch_ins_wrtee()
310 p[kvm_emulate_wrtee_reg_offs] |= rt; in kvm_patch_ins_wrtee()
315 p[kvm_emulate_wrtee_orig_ins_offs] = *inst; in kvm_patch_ins_wrtee()
316 flush_icache_range((ulong)p, (ulong)p + kvm_emulate_wrtee_len * 4); in kvm_patch_ins_wrtee()
328 u32 *p; in kvm_patch_ins_wrteei_0() local
333 p = kvm_alloc(kvm_emulate_wrteei_0_len * 4); in kvm_patch_ins_wrteei_0()
334 if (!p) in kvm_patch_ins_wrteei_0()
338 distance_start = (ulong)p - (ulong)inst; in kvm_patch_ins_wrteei_0()
340 distance_end = next_inst - (ulong)&p[kvm_emulate_wrteei_0_branch_offs]; in kvm_patch_ins_wrteei_0()
348 memcpy(p, kvm_emulate_wrteei_0, kvm_emulate_wrteei_0_len * 4); in kvm_patch_ins_wrteei_0()
349 p[kvm_emulate_wrteei_0_branch_offs] |= distance_end & KVM_INST_B_MASK; in kvm_patch_ins_wrteei_0()
350 flush_icache_range((ulong)p, (ulong)p + kvm_emulate_wrteei_0_len * 4); in kvm_patch_ins_wrteei_0()
369 u32 *p; in kvm_patch_ins_mtsrin() local
374 p = kvm_alloc(kvm_emulate_mtsrin_len * 4); in kvm_patch_ins_mtsrin()
375 if (!p) in kvm_patch_ins_mtsrin()
379 distance_start = (ulong)p - (ulong)inst; in kvm_patch_ins_mtsrin()
381 distance_end = next_inst - (ulong)&p[kvm_emulate_mtsrin_branch_offs]; in kvm_patch_ins_mtsrin()
390 memcpy(p, kvm_emulate_mtsrin, kvm_emulate_mtsrin_len * 4); in kvm_patch_ins_mtsrin()
391 p[kvm_emulate_mtsrin_branch_offs] |= distance_end & KVM_INST_B_MASK; in kvm_patch_ins_mtsrin()
392 p[kvm_emulate_mtsrin_reg1_offs] |= (rb << 10); in kvm_patch_ins_mtsrin()
393 p[kvm_emulate_mtsrin_reg2_offs] |= rt; in kvm_patch_ins_mtsrin()
394 p[kvm_emulate_mtsrin_orig_ins_offs] = *inst; in kvm_patch_ins_mtsrin()
395 flush_icache_range((ulong)p, (ulong)p + kvm_emulate_mtsrin_len * 4); in kvm_patch_ins_mtsrin()
664 u32 *p; in kvm_use_magic_page() local
689 for (p = start; p < end; p++) { in kvm_use_magic_page()
691 if (p >= kvm_template_start && p < kvm_template_end) { in kvm_use_magic_page()
692 p = kvm_template_end - 1; in kvm_use_magic_page()
695 kvm_check_ins(p, features); in kvm_use_magic_page()