1 /* 2 * Copyright 2008 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * Copyright 2009 Jerome Glisse. 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a 7 * copy of this software and associated documentation files (the "Software"), 8 * to deal in the Software without restriction, including without limitation 9 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 10 * and/or sell copies of the Software, and to permit persons to whom the 11 * Software is furnished to do so, subject to the following conditions: 12 * 13 * The above copyright notice and this permission notice shall be included in 14 * all copies or substantial portions of the Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 * OTHER DEALINGS IN THE SOFTWARE. 23 * 24 * Authors: Dave Airlie 25 * Alex Deucher 26 * Jerome Glisse 27 */ 28 #include "drmP.h" 29 #include "drm_sarea.h" 30 #include "radeon.h" 31 #include "radeon_drm.h" 32 33 int radeon_driver_unload_kms(struct drm_device *dev) 34 { 35 struct radeon_device *rdev = dev->dev_private; 36 37 if (rdev == NULL) 38 return 0; 39 radeon_modeset_fini(rdev); 40 radeon_device_fini(rdev); 41 kfree(rdev); 42 dev->dev_private = NULL; 43 return 0; 44 } 45 46 int radeon_driver_load_kms(struct drm_device *dev, unsigned long flags) 47 { 48 struct radeon_device *rdev; 49 int r; 50 51 rdev = kzalloc(sizeof(struct radeon_device), GFP_KERNEL); 52 if (rdev == NULL) { 53 return -ENOMEM; 54 } 55 dev->dev_private = (void *)rdev; 56 57 /* update BUS flag */ 58 if (drm_device_is_agp(dev)) { 59 flags |= RADEON_IS_AGP; 60 } else if (drm_device_is_pcie(dev)) { 61 flags |= RADEON_IS_PCIE; 62 } else { 63 flags |= RADEON_IS_PCI; 64 } 65 66 /* radeon_device_init should report only fatal error 67 * like memory allocation failure or iomapping failure, 68 * or memory manager initialization failure, it must 69 * properly initialize the GPU MC controller and permit 70 * VRAM allocation 71 */ 72 r = radeon_device_init(rdev, dev, dev->pdev, flags); 73 if (r) { 74 dev_err(&dev->pdev->dev, "Fatal error during GPU init\n"); 75 goto out; 76 } 77 /* Again modeset_init should fail only on fatal error 78 * otherwise it should provide enough functionalities 79 * for shadowfb to run 80 */ 81 r = radeon_modeset_init(rdev); 82 if (r) 83 dev_err(&dev->pdev->dev, "Fatal error during modeset init\n"); 84 out: 85 if (r) 86 radeon_driver_unload_kms(dev); 87 return r; 88 } 89 90 91 /* 92 * Userspace get informations ioctl 93 */ 94 int radeon_info_ioctl(struct drm_device *dev, void *data, struct drm_file *filp) 95 { 96 struct radeon_device *rdev = dev->dev_private; 97 struct drm_radeon_info *info; 98 uint32_t *value_ptr; 99 uint32_t value; 100 101 info = data; 102 value_ptr = (uint32_t *)((unsigned long)info->value); 103 switch (info->request) { 104 case RADEON_INFO_DEVICE_ID: 105 value = dev->pci_device; 106 break; 107 case RADEON_INFO_NUM_GB_PIPES: 108 value = rdev->num_gb_pipes; 109 break; 110 case RADEON_INFO_NUM_Z_PIPES: 111 value = rdev->num_z_pipes; 112 break; 113 case RADEON_INFO_ACCEL_WORKING: 114 value = rdev->accel_working; 115 break; 116 default: 117 DRM_DEBUG("Invalid request %d\n", info->request); 118 return -EINVAL; 119 } 120 if (DRM_COPY_TO_USER(value_ptr, &value, sizeof(uint32_t))) { 121 DRM_ERROR("copy_to_user\n"); 122 return -EFAULT; 123 } 124 return 0; 125 } 126 127 128 /* 129 * Outdated mess for old drm with Xorg being in charge (void function now). 130 */ 131 int radeon_driver_firstopen_kms(struct drm_device *dev) 132 { 133 return 0; 134 } 135 136 137 void radeon_driver_lastclose_kms(struct drm_device *dev) 138 { 139 } 140 141 int radeon_driver_open_kms(struct drm_device *dev, struct drm_file *file_priv) 142 { 143 return 0; 144 } 145 146 void radeon_driver_postclose_kms(struct drm_device *dev, 147 struct drm_file *file_priv) 148 { 149 } 150 151 void radeon_driver_preclose_kms(struct drm_device *dev, 152 struct drm_file *file_priv) 153 { 154 } 155 156 157 /* 158 * VBlank related functions. 159 */ 160 u32 radeon_get_vblank_counter_kms(struct drm_device *dev, int crtc) 161 { 162 struct radeon_device *rdev = dev->dev_private; 163 164 if (crtc < 0 || crtc > 1) { 165 DRM_ERROR("Invalid crtc %d\n", crtc); 166 return -EINVAL; 167 } 168 169 return radeon_get_vblank_counter(rdev, crtc); 170 } 171 172 int radeon_enable_vblank_kms(struct drm_device *dev, int crtc) 173 { 174 struct radeon_device *rdev = dev->dev_private; 175 176 if (crtc < 0 || crtc > 1) { 177 DRM_ERROR("Invalid crtc %d\n", crtc); 178 return -EINVAL; 179 } 180 181 rdev->irq.crtc_vblank_int[crtc] = true; 182 183 return radeon_irq_set(rdev); 184 } 185 186 void radeon_disable_vblank_kms(struct drm_device *dev, int crtc) 187 { 188 struct radeon_device *rdev = dev->dev_private; 189 190 if (crtc < 0 || crtc > 1) { 191 DRM_ERROR("Invalid crtc %d\n", crtc); 192 return; 193 } 194 195 rdev->irq.crtc_vblank_int[crtc] = false; 196 197 radeon_irq_set(rdev); 198 } 199 200 201 /* 202 * IOCTL. 203 */ 204 int radeon_dma_ioctl_kms(struct drm_device *dev, void *data, 205 struct drm_file *file_priv) 206 { 207 /* Not valid in KMS. */ 208 return -EINVAL; 209 } 210 211 #define KMS_INVALID_IOCTL(name) \ 212 int name(struct drm_device *dev, void *data, struct drm_file *file_priv)\ 213 { \ 214 DRM_ERROR("invalid ioctl with kms %s\n", __func__); \ 215 return -EINVAL; \ 216 } 217 218 /* 219 * All these ioctls are invalid in kms world. 220 */ 221 KMS_INVALID_IOCTL(radeon_cp_init_kms) 222 KMS_INVALID_IOCTL(radeon_cp_start_kms) 223 KMS_INVALID_IOCTL(radeon_cp_stop_kms) 224 KMS_INVALID_IOCTL(radeon_cp_reset_kms) 225 KMS_INVALID_IOCTL(radeon_cp_idle_kms) 226 KMS_INVALID_IOCTL(radeon_cp_resume_kms) 227 KMS_INVALID_IOCTL(radeon_engine_reset_kms) 228 KMS_INVALID_IOCTL(radeon_fullscreen_kms) 229 KMS_INVALID_IOCTL(radeon_cp_swap_kms) 230 KMS_INVALID_IOCTL(radeon_cp_clear_kms) 231 KMS_INVALID_IOCTL(radeon_cp_vertex_kms) 232 KMS_INVALID_IOCTL(radeon_cp_indices_kms) 233 KMS_INVALID_IOCTL(radeon_cp_texture_kms) 234 KMS_INVALID_IOCTL(radeon_cp_stipple_kms) 235 KMS_INVALID_IOCTL(radeon_cp_indirect_kms) 236 KMS_INVALID_IOCTL(radeon_cp_vertex2_kms) 237 KMS_INVALID_IOCTL(radeon_cp_cmdbuf_kms) 238 KMS_INVALID_IOCTL(radeon_cp_getparam_kms) 239 KMS_INVALID_IOCTL(radeon_cp_flip_kms) 240 KMS_INVALID_IOCTL(radeon_mem_alloc_kms) 241 KMS_INVALID_IOCTL(radeon_mem_free_kms) 242 KMS_INVALID_IOCTL(radeon_mem_init_heap_kms) 243 KMS_INVALID_IOCTL(radeon_irq_emit_kms) 244 KMS_INVALID_IOCTL(radeon_irq_wait_kms) 245 KMS_INVALID_IOCTL(radeon_cp_setparam_kms) 246 KMS_INVALID_IOCTL(radeon_surface_alloc_kms) 247 KMS_INVALID_IOCTL(radeon_surface_free_kms) 248 249 250 struct drm_ioctl_desc radeon_ioctls_kms[] = { 251 DRM_IOCTL_DEF(DRM_RADEON_CP_INIT, radeon_cp_init_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 252 DRM_IOCTL_DEF(DRM_RADEON_CP_START, radeon_cp_start_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 253 DRM_IOCTL_DEF(DRM_RADEON_CP_STOP, radeon_cp_stop_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 254 DRM_IOCTL_DEF(DRM_RADEON_CP_RESET, radeon_cp_reset_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 255 DRM_IOCTL_DEF(DRM_RADEON_CP_IDLE, radeon_cp_idle_kms, DRM_AUTH), 256 DRM_IOCTL_DEF(DRM_RADEON_CP_RESUME, radeon_cp_resume_kms, DRM_AUTH), 257 DRM_IOCTL_DEF(DRM_RADEON_RESET, radeon_engine_reset_kms, DRM_AUTH), 258 DRM_IOCTL_DEF(DRM_RADEON_FULLSCREEN, radeon_fullscreen_kms, DRM_AUTH), 259 DRM_IOCTL_DEF(DRM_RADEON_SWAP, radeon_cp_swap_kms, DRM_AUTH), 260 DRM_IOCTL_DEF(DRM_RADEON_CLEAR, radeon_cp_clear_kms, DRM_AUTH), 261 DRM_IOCTL_DEF(DRM_RADEON_VERTEX, radeon_cp_vertex_kms, DRM_AUTH), 262 DRM_IOCTL_DEF(DRM_RADEON_INDICES, radeon_cp_indices_kms, DRM_AUTH), 263 DRM_IOCTL_DEF(DRM_RADEON_TEXTURE, radeon_cp_texture_kms, DRM_AUTH), 264 DRM_IOCTL_DEF(DRM_RADEON_STIPPLE, radeon_cp_stipple_kms, DRM_AUTH), 265 DRM_IOCTL_DEF(DRM_RADEON_INDIRECT, radeon_cp_indirect_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 266 DRM_IOCTL_DEF(DRM_RADEON_VERTEX2, radeon_cp_vertex2_kms, DRM_AUTH), 267 DRM_IOCTL_DEF(DRM_RADEON_CMDBUF, radeon_cp_cmdbuf_kms, DRM_AUTH), 268 DRM_IOCTL_DEF(DRM_RADEON_GETPARAM, radeon_cp_getparam_kms, DRM_AUTH), 269 DRM_IOCTL_DEF(DRM_RADEON_FLIP, radeon_cp_flip_kms, DRM_AUTH), 270 DRM_IOCTL_DEF(DRM_RADEON_ALLOC, radeon_mem_alloc_kms, DRM_AUTH), 271 DRM_IOCTL_DEF(DRM_RADEON_FREE, radeon_mem_free_kms, DRM_AUTH), 272 DRM_IOCTL_DEF(DRM_RADEON_INIT_HEAP, radeon_mem_init_heap_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 273 DRM_IOCTL_DEF(DRM_RADEON_IRQ_EMIT, radeon_irq_emit_kms, DRM_AUTH), 274 DRM_IOCTL_DEF(DRM_RADEON_IRQ_WAIT, radeon_irq_wait_kms, DRM_AUTH), 275 DRM_IOCTL_DEF(DRM_RADEON_SETPARAM, radeon_cp_setparam_kms, DRM_AUTH), 276 DRM_IOCTL_DEF(DRM_RADEON_SURF_ALLOC, radeon_surface_alloc_kms, DRM_AUTH), 277 DRM_IOCTL_DEF(DRM_RADEON_SURF_FREE, radeon_surface_free_kms, DRM_AUTH), 278 /* KMS */ 279 DRM_IOCTL_DEF(DRM_RADEON_GEM_INFO, radeon_gem_info_ioctl, DRM_AUTH), 280 DRM_IOCTL_DEF(DRM_RADEON_GEM_CREATE, radeon_gem_create_ioctl, DRM_AUTH), 281 DRM_IOCTL_DEF(DRM_RADEON_GEM_MMAP, radeon_gem_mmap_ioctl, DRM_AUTH), 282 DRM_IOCTL_DEF(DRM_RADEON_GEM_SET_DOMAIN, radeon_gem_set_domain_ioctl, DRM_AUTH), 283 DRM_IOCTL_DEF(DRM_RADEON_GEM_PREAD, radeon_gem_pread_ioctl, DRM_AUTH), 284 DRM_IOCTL_DEF(DRM_RADEON_GEM_PWRITE, radeon_gem_pwrite_ioctl, DRM_AUTH), 285 DRM_IOCTL_DEF(DRM_RADEON_GEM_WAIT_IDLE, radeon_gem_wait_idle_ioctl, DRM_AUTH), 286 DRM_IOCTL_DEF(DRM_RADEON_CS, radeon_cs_ioctl, DRM_AUTH), 287 DRM_IOCTL_DEF(DRM_RADEON_INFO, radeon_info_ioctl, DRM_AUTH), 288 DRM_IOCTL_DEF(DRM_RADEON_GEM_SET_TILING, radeon_gem_set_tiling_ioctl, DRM_AUTH), 289 DRM_IOCTL_DEF(DRM_RADEON_GEM_GET_TILING, radeon_gem_get_tiling_ioctl, DRM_AUTH), 290 DRM_IOCTL_DEF(DRM_RADEON_GEM_BUSY, radeon_gem_busy_ioctl, DRM_AUTH), 291 }; 292 int radeon_max_kms_ioctl = DRM_ARRAY_SIZE(radeon_ioctls_kms); 293