1 /*
2  * Copyright 2019 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  */
23 #ifndef __AMDGPU_NBIO_H__
24 #define __AMDGPU_NBIO_H__
25 
26 /*
27  * amdgpu nbio functions
28  */
29 struct nbio_hdp_flush_reg {
30 	u32 ref_and_mask_cp0;
31 	u32 ref_and_mask_cp1;
32 	u32 ref_and_mask_cp2;
33 	u32 ref_and_mask_cp3;
34 	u32 ref_and_mask_cp4;
35 	u32 ref_and_mask_cp5;
36 	u32 ref_and_mask_cp6;
37 	u32 ref_and_mask_cp7;
38 	u32 ref_and_mask_cp8;
39 	u32 ref_and_mask_cp9;
40 	u32 ref_and_mask_sdma0;
41 	u32 ref_and_mask_sdma1;
42 	u32 ref_and_mask_sdma2;
43 	u32 ref_and_mask_sdma3;
44 	u32 ref_and_mask_sdma4;
45 	u32 ref_and_mask_sdma5;
46 	u32 ref_and_mask_sdma6;
47 	u32 ref_and_mask_sdma7;
48 };
49 
50 struct amdgpu_nbio_funcs {
51 	const struct nbio_hdp_flush_reg *hdp_flush_reg;
52 	u32 (*get_hdp_flush_req_offset)(struct amdgpu_device *adev);
53 	u32 (*get_hdp_flush_done_offset)(struct amdgpu_device *adev);
54 	u32 (*get_pcie_index_offset)(struct amdgpu_device *adev);
55 	u32 (*get_pcie_data_offset)(struct amdgpu_device *adev);
56 	u32 (*get_rev_id)(struct amdgpu_device *adev);
57 	void (*mc_access_enable)(struct amdgpu_device *adev, bool enable);
58 	void (*hdp_flush)(struct amdgpu_device *adev, struct amdgpu_ring *ring);
59 	u32 (*get_memsize)(struct amdgpu_device *adev);
60 	void (*sdma_doorbell_range)(struct amdgpu_device *adev, int instance,
61 			bool use_doorbell, int doorbell_index, int doorbell_size);
62 	void (*vcn_doorbell_range)(struct amdgpu_device *adev, bool use_doorbell,
63 				   int doorbell_index, int instance);
64 	void (*enable_doorbell_aperture)(struct amdgpu_device *adev,
65 					 bool enable);
66 	void (*enable_doorbell_selfring_aperture)(struct amdgpu_device *adev,
67 						  bool enable);
68 	void (*ih_doorbell_range)(struct amdgpu_device *adev,
69 				  bool use_doorbell, int doorbell_index);
70 	void (*enable_doorbell_interrupt)(struct amdgpu_device *adev,
71 					  bool enable);
72 	void (*update_medium_grain_clock_gating)(struct amdgpu_device *adev,
73 						 bool enable);
74 	void (*update_medium_grain_light_sleep)(struct amdgpu_device *adev,
75 						bool enable);
76 	void (*get_clockgating_state)(struct amdgpu_device *adev,
77 				      u32 *flags);
78 	void (*ih_control)(struct amdgpu_device *adev);
79 	void (*init_registers)(struct amdgpu_device *adev);
80 	void (*remap_hdp_registers)(struct amdgpu_device *adev);
81 	void (*handle_ras_controller_intr_no_bifring)(struct amdgpu_device *adev);
82 	void (*handle_ras_err_event_athub_intr_no_bifring)(struct amdgpu_device *adev);
83 	int (*init_ras_controller_interrupt)(struct amdgpu_device *adev);
84 	int (*init_ras_err_event_athub_interrupt)(struct amdgpu_device *adev);
85 	void (*query_ras_error_count)(struct amdgpu_device *adev,
86 					void *ras_error_status);
87 	int (*ras_late_init)(struct amdgpu_device *adev);
88 };
89 
90 struct amdgpu_nbio {
91 	const struct nbio_hdp_flush_reg *hdp_flush_reg;
92 	struct amdgpu_irq_src ras_controller_irq;
93 	struct amdgpu_irq_src ras_err_event_athub_irq;
94 	struct ras_common_if *ras_if;
95 	const struct amdgpu_nbio_funcs *funcs;
96 };
97 
98 int amdgpu_nbio_ras_late_init(struct amdgpu_device *adev);
99 void amdgpu_nbio_ras_fini(struct amdgpu_device *adev);
100 #endif
101