1 /* 2 * Copyright 2008 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * Copyright 2009 Jerome Glisse. 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a 7 * copy of this software and associated documentation files (the "Software"), 8 * to deal in the Software without restriction, including without limitation 9 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 10 * and/or sell copies of the Software, and to permit persons to whom the 11 * Software is furnished to do so, subject to the following conditions: 12 * 13 * The above copyright notice and this permission notice shall be included in 14 * all copies or substantial portions of the Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 * OTHER DEALINGS IN THE SOFTWARE. 23 * 24 * Authors: Dave Airlie 25 * Alex Deucher 26 * Jerome Glisse 27 */ 28 #include "drmP.h" 29 #include "drm_sarea.h" 30 #include "radeon.h" 31 #include "radeon_drm.h" 32 33 34 /* 35 * Driver load/unload 36 */ 37 int radeon_driver_load_kms(struct drm_device *dev, unsigned long flags) 38 { 39 struct radeon_device *rdev; 40 int r; 41 42 rdev = kzalloc(sizeof(struct radeon_device), GFP_KERNEL); 43 if (rdev == NULL) { 44 return -ENOMEM; 45 } 46 dev->dev_private = (void *)rdev; 47 48 /* update BUS flag */ 49 if (drm_device_is_agp(dev)) { 50 flags |= RADEON_IS_AGP; 51 } else if (drm_device_is_pcie(dev)) { 52 flags |= RADEON_IS_PCIE; 53 } else { 54 flags |= RADEON_IS_PCI; 55 } 56 57 /* radeon_device_init should report only fatal error 58 * like memory allocation failure or iomapping failure, 59 * or memory manager initialization failure, it must 60 * properly initialize the GPU MC controller and permit 61 * VRAM allocation 62 */ 63 r = radeon_device_init(rdev, dev, dev->pdev, flags); 64 if (r) { 65 DRM_ERROR("Fatal error while trying to initialize radeon.\n"); 66 return r; 67 } 68 /* Again modeset_init should fail only on fatal error 69 * otherwise it should provide enough functionalities 70 * for shadowfb to run 71 */ 72 r = radeon_modeset_init(rdev); 73 if (r) { 74 return r; 75 } 76 return 0; 77 } 78 79 int radeon_driver_unload_kms(struct drm_device *dev) 80 { 81 struct radeon_device *rdev = dev->dev_private; 82 83 if (rdev == NULL) 84 return 0; 85 radeon_modeset_fini(rdev); 86 radeon_device_fini(rdev); 87 kfree(rdev); 88 dev->dev_private = NULL; 89 return 0; 90 } 91 92 93 /* 94 * Userspace get informations ioctl 95 */ 96 int radeon_info_ioctl(struct drm_device *dev, void *data, struct drm_file *filp) 97 { 98 struct radeon_device *rdev = dev->dev_private; 99 struct drm_radeon_info *info; 100 uint32_t *value_ptr; 101 uint32_t value; 102 103 info = data; 104 value_ptr = (uint32_t *)((unsigned long)info->value); 105 switch (info->request) { 106 case RADEON_INFO_DEVICE_ID: 107 value = dev->pci_device; 108 break; 109 case RADEON_INFO_NUM_GB_PIPES: 110 value = rdev->num_gb_pipes; 111 break; 112 case RADEON_INFO_NUM_Z_PIPES: 113 value = rdev->num_z_pipes; 114 break; 115 case RADEON_INFO_ACCEL_WORKING: 116 value = rdev->accel_working; 117 break; 118 default: 119 DRM_DEBUG("Invalid request %d\n", info->request); 120 return -EINVAL; 121 } 122 if (DRM_COPY_TO_USER(value_ptr, &value, sizeof(uint32_t))) { 123 DRM_ERROR("copy_to_user\n"); 124 return -EFAULT; 125 } 126 return 0; 127 } 128 129 130 /* 131 * Outdated mess for old drm with Xorg being in charge (void function now). 132 */ 133 int radeon_driver_firstopen_kms(struct drm_device *dev) 134 { 135 return 0; 136 } 137 138 139 void radeon_driver_lastclose_kms(struct drm_device *dev) 140 { 141 } 142 143 int radeon_driver_open_kms(struct drm_device *dev, struct drm_file *file_priv) 144 { 145 return 0; 146 } 147 148 void radeon_driver_postclose_kms(struct drm_device *dev, 149 struct drm_file *file_priv) 150 { 151 } 152 153 void radeon_driver_preclose_kms(struct drm_device *dev, 154 struct drm_file *file_priv) 155 { 156 } 157 158 159 /* 160 * VBlank related functions. 161 */ 162 u32 radeon_get_vblank_counter_kms(struct drm_device *dev, int crtc) 163 { 164 struct radeon_device *rdev = dev->dev_private; 165 166 if (crtc < 0 || crtc > 1) { 167 DRM_ERROR("Invalid crtc %d\n", crtc); 168 return -EINVAL; 169 } 170 171 return radeon_get_vblank_counter(rdev, crtc); 172 } 173 174 int radeon_enable_vblank_kms(struct drm_device *dev, int crtc) 175 { 176 struct radeon_device *rdev = dev->dev_private; 177 178 if (crtc < 0 || crtc > 1) { 179 DRM_ERROR("Invalid crtc %d\n", crtc); 180 return -EINVAL; 181 } 182 183 rdev->irq.crtc_vblank_int[crtc] = true; 184 185 return radeon_irq_set(rdev); 186 } 187 188 void radeon_disable_vblank_kms(struct drm_device *dev, int crtc) 189 { 190 struct radeon_device *rdev = dev->dev_private; 191 192 if (crtc < 0 || crtc > 1) { 193 DRM_ERROR("Invalid crtc %d\n", crtc); 194 return; 195 } 196 197 rdev->irq.crtc_vblank_int[crtc] = false; 198 199 radeon_irq_set(rdev); 200 } 201 202 203 /* 204 * IOCTL. 205 */ 206 int radeon_dma_ioctl_kms(struct drm_device *dev, void *data, 207 struct drm_file *file_priv) 208 { 209 /* Not valid in KMS. */ 210 return -EINVAL; 211 } 212 213 #define KMS_INVALID_IOCTL(name) \ 214 int name(struct drm_device *dev, void *data, struct drm_file *file_priv)\ 215 { \ 216 DRM_ERROR("invalid ioctl with kms %s\n", __func__); \ 217 return -EINVAL; \ 218 } 219 220 /* 221 * All these ioctls are invalid in kms world. 222 */ 223 KMS_INVALID_IOCTL(radeon_cp_init_kms) 224 KMS_INVALID_IOCTL(radeon_cp_start_kms) 225 KMS_INVALID_IOCTL(radeon_cp_stop_kms) 226 KMS_INVALID_IOCTL(radeon_cp_reset_kms) 227 KMS_INVALID_IOCTL(radeon_cp_idle_kms) 228 KMS_INVALID_IOCTL(radeon_cp_resume_kms) 229 KMS_INVALID_IOCTL(radeon_engine_reset_kms) 230 KMS_INVALID_IOCTL(radeon_fullscreen_kms) 231 KMS_INVALID_IOCTL(radeon_cp_swap_kms) 232 KMS_INVALID_IOCTL(radeon_cp_clear_kms) 233 KMS_INVALID_IOCTL(radeon_cp_vertex_kms) 234 KMS_INVALID_IOCTL(radeon_cp_indices_kms) 235 KMS_INVALID_IOCTL(radeon_cp_texture_kms) 236 KMS_INVALID_IOCTL(radeon_cp_stipple_kms) 237 KMS_INVALID_IOCTL(radeon_cp_indirect_kms) 238 KMS_INVALID_IOCTL(radeon_cp_vertex2_kms) 239 KMS_INVALID_IOCTL(radeon_cp_cmdbuf_kms) 240 KMS_INVALID_IOCTL(radeon_cp_getparam_kms) 241 KMS_INVALID_IOCTL(radeon_cp_flip_kms) 242 KMS_INVALID_IOCTL(radeon_mem_alloc_kms) 243 KMS_INVALID_IOCTL(radeon_mem_free_kms) 244 KMS_INVALID_IOCTL(radeon_mem_init_heap_kms) 245 KMS_INVALID_IOCTL(radeon_irq_emit_kms) 246 KMS_INVALID_IOCTL(radeon_irq_wait_kms) 247 KMS_INVALID_IOCTL(radeon_cp_setparam_kms) 248 KMS_INVALID_IOCTL(radeon_surface_alloc_kms) 249 KMS_INVALID_IOCTL(radeon_surface_free_kms) 250 251 252 struct drm_ioctl_desc radeon_ioctls_kms[] = { 253 DRM_IOCTL_DEF(DRM_RADEON_CP_INIT, radeon_cp_init_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 254 DRM_IOCTL_DEF(DRM_RADEON_CP_START, radeon_cp_start_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 255 DRM_IOCTL_DEF(DRM_RADEON_CP_STOP, radeon_cp_stop_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 256 DRM_IOCTL_DEF(DRM_RADEON_CP_RESET, radeon_cp_reset_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 257 DRM_IOCTL_DEF(DRM_RADEON_CP_IDLE, radeon_cp_idle_kms, DRM_AUTH), 258 DRM_IOCTL_DEF(DRM_RADEON_CP_RESUME, radeon_cp_resume_kms, DRM_AUTH), 259 DRM_IOCTL_DEF(DRM_RADEON_RESET, radeon_engine_reset_kms, DRM_AUTH), 260 DRM_IOCTL_DEF(DRM_RADEON_FULLSCREEN, radeon_fullscreen_kms, DRM_AUTH), 261 DRM_IOCTL_DEF(DRM_RADEON_SWAP, radeon_cp_swap_kms, DRM_AUTH), 262 DRM_IOCTL_DEF(DRM_RADEON_CLEAR, radeon_cp_clear_kms, DRM_AUTH), 263 DRM_IOCTL_DEF(DRM_RADEON_VERTEX, radeon_cp_vertex_kms, DRM_AUTH), 264 DRM_IOCTL_DEF(DRM_RADEON_INDICES, radeon_cp_indices_kms, DRM_AUTH), 265 DRM_IOCTL_DEF(DRM_RADEON_TEXTURE, radeon_cp_texture_kms, DRM_AUTH), 266 DRM_IOCTL_DEF(DRM_RADEON_STIPPLE, radeon_cp_stipple_kms, DRM_AUTH), 267 DRM_IOCTL_DEF(DRM_RADEON_INDIRECT, radeon_cp_indirect_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 268 DRM_IOCTL_DEF(DRM_RADEON_VERTEX2, radeon_cp_vertex2_kms, DRM_AUTH), 269 DRM_IOCTL_DEF(DRM_RADEON_CMDBUF, radeon_cp_cmdbuf_kms, DRM_AUTH), 270 DRM_IOCTL_DEF(DRM_RADEON_GETPARAM, radeon_cp_getparam_kms, DRM_AUTH), 271 DRM_IOCTL_DEF(DRM_RADEON_FLIP, radeon_cp_flip_kms, DRM_AUTH), 272 DRM_IOCTL_DEF(DRM_RADEON_ALLOC, radeon_mem_alloc_kms, DRM_AUTH), 273 DRM_IOCTL_DEF(DRM_RADEON_FREE, radeon_mem_free_kms, DRM_AUTH), 274 DRM_IOCTL_DEF(DRM_RADEON_INIT_HEAP, radeon_mem_init_heap_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 275 DRM_IOCTL_DEF(DRM_RADEON_IRQ_EMIT, radeon_irq_emit_kms, DRM_AUTH), 276 DRM_IOCTL_DEF(DRM_RADEON_IRQ_WAIT, radeon_irq_wait_kms, DRM_AUTH), 277 DRM_IOCTL_DEF(DRM_RADEON_SETPARAM, radeon_cp_setparam_kms, DRM_AUTH), 278 DRM_IOCTL_DEF(DRM_RADEON_SURF_ALLOC, radeon_surface_alloc_kms, DRM_AUTH), 279 DRM_IOCTL_DEF(DRM_RADEON_SURF_FREE, radeon_surface_free_kms, DRM_AUTH), 280 /* KMS */ 281 DRM_IOCTL_DEF(DRM_RADEON_GEM_INFO, radeon_gem_info_ioctl, DRM_AUTH), 282 DRM_IOCTL_DEF(DRM_RADEON_GEM_CREATE, radeon_gem_create_ioctl, DRM_AUTH), 283 DRM_IOCTL_DEF(DRM_RADEON_GEM_MMAP, radeon_gem_mmap_ioctl, DRM_AUTH), 284 DRM_IOCTL_DEF(DRM_RADEON_GEM_SET_DOMAIN, radeon_gem_set_domain_ioctl, DRM_AUTH), 285 DRM_IOCTL_DEF(DRM_RADEON_GEM_PREAD, radeon_gem_pread_ioctl, DRM_AUTH), 286 DRM_IOCTL_DEF(DRM_RADEON_GEM_PWRITE, radeon_gem_pwrite_ioctl, DRM_AUTH), 287 DRM_IOCTL_DEF(DRM_RADEON_GEM_WAIT_IDLE, radeon_gem_wait_idle_ioctl, DRM_AUTH), 288 DRM_IOCTL_DEF(DRM_RADEON_CS, radeon_cs_ioctl, DRM_AUTH), 289 DRM_IOCTL_DEF(DRM_RADEON_INFO, radeon_info_ioctl, DRM_AUTH), 290 DRM_IOCTL_DEF(DRM_RADEON_GEM_SET_TILING, radeon_gem_set_tiling_ioctl, DRM_AUTH), 291 DRM_IOCTL_DEF(DRM_RADEON_GEM_GET_TILING, radeon_gem_get_tiling_ioctl, DRM_AUTH), 292 DRM_IOCTL_DEF(DRM_RADEON_GEM_BUSY, radeon_gem_busy_ioctl, DRM_AUTH), 293 }; 294 int radeon_max_kms_ioctl = DRM_ARRAY_SIZE(radeon_ioctls_kms); 295