1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 #ifndef _ASM_POWERPC_ASM_PROTOTYPES_H
3 #define _ASM_POWERPC_ASM_PROTOTYPES_H
4 /*
5  * This file is for C prototypes of asm symbols that are EXPORTed.
6  * It allows the modversions logic to see their prototype and
7  * generate proper CRCs for them.
8  *
9  * Copyright 2016, Daniel Axtens, IBM Corporation.
10  */
11 
12 #include <linux/threads.h>
13 #include <asm/cacheflush.h>
14 #include <asm/checksum.h>
15 #include <linux/uaccess.h>
16 #include <asm/epapr_hcalls.h>
17 #include <asm/dcr.h>
18 #include <asm/mmu_context.h>
19 #include <asm/ultravisor-api.h>
20 
21 #include <uapi/asm/ucontext.h>
22 
23 /* Ultravisor */
24 #if defined(CONFIG_PPC_POWERNV) || defined(CONFIG_PPC_SVM)
25 long ucall_norets(unsigned long opcode, ...);
26 #else
ucall_norets(unsigned long opcode,...)27 static inline long ucall_norets(unsigned long opcode, ...)
28 {
29 	return U_NOT_AVAILABLE;
30 }
31 #endif
32 
33 /* OPAL */
34 int64_t __opal_call(int64_t a0, int64_t a1, int64_t a2, int64_t a3,
35 		    int64_t a4, int64_t a5, int64_t a6, int64_t a7,
36 		    int64_t opcode, uint64_t msr);
37 
38 /* misc runtime */
39 void enable_machine_check(void);
40 extern u64 __bswapdi2(u64);
41 extern s64 __lshrdi3(s64, int);
42 extern s64 __ashldi3(s64, int);
43 extern s64 __ashrdi3(s64, int);
44 extern int __cmpdi2(s64, s64);
45 extern int __ucmpdi2(u64, u64);
46 
47 /* tracing */
48 void _mcount(void);
49 
50 /* Transaction memory related */
51 void tm_enable(void);
52 void tm_disable(void);
53 void tm_abort(uint8_t cause);
54 
55 struct kvm_vcpu;
56 void _kvmppc_restore_tm_pr(struct kvm_vcpu *vcpu, u64 guest_msr);
57 void _kvmppc_save_tm_pr(struct kvm_vcpu *vcpu, u64 guest_msr);
58 
59 #ifdef CONFIG_PPC_TRANSACTIONAL_MEM
60 void kvmppc_save_tm_hv(struct kvm_vcpu *vcpu, u64 msr, bool preserve_nv);
61 void kvmppc_restore_tm_hv(struct kvm_vcpu *vcpu, u64 msr, bool preserve_nv);
62 #else
kvmppc_save_tm_hv(struct kvm_vcpu * vcpu,u64 msr,bool preserve_nv)63 static inline void kvmppc_save_tm_hv(struct kvm_vcpu *vcpu, u64 msr,
64 				     bool preserve_nv) { }
kvmppc_restore_tm_hv(struct kvm_vcpu * vcpu,u64 msr,bool preserve_nv)65 static inline void kvmppc_restore_tm_hv(struct kvm_vcpu *vcpu, u64 msr,
66 					bool preserve_nv) { }
67 #endif /* CONFIG_PPC_TRANSACTIONAL_MEM */
68 
69 void kvmppc_p9_enter_guest(struct kvm_vcpu *vcpu);
70 
71 long kvmppc_h_set_dabr(struct kvm_vcpu *vcpu, unsigned long dabr);
72 long kvmppc_h_set_xdabr(struct kvm_vcpu *vcpu, unsigned long dabr,
73 			unsigned long dabrx);
74 
75 #endif /* _ASM_POWERPC_ASM_PROTOTYPES_H */
76