1 /*
2  * Copyright 2019 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  */
23 #ifndef __AMDGPU_NBIO_H__
24 #define __AMDGPU_NBIO_H__
25 
26 /*
27  * amdgpu nbio functions
28  */
29 struct nbio_hdp_flush_reg {
30 	u32 ref_and_mask_cp0;
31 	u32 ref_and_mask_cp1;
32 	u32 ref_and_mask_cp2;
33 	u32 ref_and_mask_cp3;
34 	u32 ref_and_mask_cp4;
35 	u32 ref_and_mask_cp5;
36 	u32 ref_and_mask_cp6;
37 	u32 ref_and_mask_cp7;
38 	u32 ref_and_mask_cp8;
39 	u32 ref_and_mask_cp9;
40 	u32 ref_and_mask_sdma0;
41 	u32 ref_and_mask_sdma1;
42 	u32 ref_and_mask_sdma2;
43 	u32 ref_and_mask_sdma3;
44 	u32 ref_and_mask_sdma4;
45 	u32 ref_and_mask_sdma5;
46 	u32 ref_and_mask_sdma6;
47 	u32 ref_and_mask_sdma7;
48 };
49 
50 struct amdgpu_nbio_ras {
51 	struct amdgpu_ras_block_object ras_block;
52 	void (*handle_ras_controller_intr_no_bifring)(struct amdgpu_device *adev);
53 	void (*handle_ras_err_event_athub_intr_no_bifring)(struct amdgpu_device *adev);
54 	int (*init_ras_controller_interrupt)(struct amdgpu_device *adev);
55 	int (*init_ras_err_event_athub_interrupt)(struct amdgpu_device *adev);
56 };
57 
58 struct amdgpu_nbio_funcs {
59 	const struct nbio_hdp_flush_reg *hdp_flush_reg;
60 	u32 (*get_hdp_flush_req_offset)(struct amdgpu_device *adev);
61 	u32 (*get_hdp_flush_done_offset)(struct amdgpu_device *adev);
62 	u32 (*get_pcie_index_offset)(struct amdgpu_device *adev);
63 	u32 (*get_pcie_data_offset)(struct amdgpu_device *adev);
64 	u32 (*get_pcie_index_hi_offset)(struct amdgpu_device *adev);
65 	u32 (*get_pcie_port_index_offset)(struct amdgpu_device *adev);
66 	u32 (*get_pcie_port_data_offset)(struct amdgpu_device *adev);
67 	u32 (*get_rev_id)(struct amdgpu_device *adev);
68 	void (*mc_access_enable)(struct amdgpu_device *adev, bool enable);
69 	u32 (*get_memsize)(struct amdgpu_device *adev);
70 	void (*sdma_doorbell_range)(struct amdgpu_device *adev, int instance,
71 			bool use_doorbell, int doorbell_index, int doorbell_size);
72 	void (*vcn_doorbell_range)(struct amdgpu_device *adev, bool use_doorbell,
73 				   int doorbell_index, int instance);
74 	void (*gc_doorbell_init)(struct amdgpu_device *adev);
75 	void (*enable_doorbell_aperture)(struct amdgpu_device *adev,
76 					 bool enable);
77 	void (*enable_doorbell_selfring_aperture)(struct amdgpu_device *adev,
78 						  bool enable);
79 	void (*ih_doorbell_range)(struct amdgpu_device *adev,
80 				  bool use_doorbell, int doorbell_index);
81 	void (*enable_doorbell_interrupt)(struct amdgpu_device *adev,
82 					  bool enable);
83 	void (*update_medium_grain_clock_gating)(struct amdgpu_device *adev,
84 						 bool enable);
85 	void (*update_medium_grain_light_sleep)(struct amdgpu_device *adev,
86 						bool enable);
87 	void (*get_clockgating_state)(struct amdgpu_device *adev,
88 				      u64 *flags);
89 	void (*ih_control)(struct amdgpu_device *adev);
90 	void (*init_registers)(struct amdgpu_device *adev);
91 	void (*remap_hdp_registers)(struct amdgpu_device *adev);
92 	void (*enable_aspm)(struct amdgpu_device *adev,
93 			    bool enable);
94 	void (*program_aspm)(struct amdgpu_device *adev);
95 	void (*apply_lc_spc_mode_wa)(struct amdgpu_device *adev);
96 	void (*apply_l1_link_width_reconfig_wa)(struct amdgpu_device *adev);
97 	void (*clear_doorbell_interrupt)(struct amdgpu_device *adev);
98 	u32 (*get_rom_offset)(struct amdgpu_device *adev);
99 	int (*get_compute_partition_mode)(struct amdgpu_device *adev);
100 	u32 (*get_memory_partition_mode)(struct amdgpu_device *adev,
101 					 u32 *supp_modes);
102 	u64 (*get_pcie_replay_count)(struct amdgpu_device *adev);
103 	void (*get_pcie_usage)(struct amdgpu_device *adev, uint64_t *count0,
104 					uint64_t *count1);
105 };
106 
107 struct amdgpu_nbio {
108 	const struct nbio_hdp_flush_reg *hdp_flush_reg;
109 	struct amdgpu_irq_src ras_controller_irq;
110 	struct amdgpu_irq_src ras_err_event_athub_irq;
111 	struct ras_common_if *ras_if;
112 	const struct amdgpu_nbio_funcs *funcs;
113 	struct amdgpu_nbio_ras  *ras;
114 };
115 
116 int amdgpu_nbio_ras_sw_init(struct amdgpu_device *adev);
117 void amdgpu_nbio_get_pcie_usage(struct amdgpu_device *adev, uint64_t *count0, uint64_t *count1);
118 int amdgpu_nbio_ras_late_init(struct amdgpu_device *adev, struct ras_common_if *ras_block);
119 u64 amdgpu_nbio_get_pcie_replay_count(struct amdgpu_device *adev);
120 
121 #endif
122