1 /* 2 * drivers/usb/musb/ux500_dma.c 3 * 4 * U8500 and U5500 DMA support code 5 * 6 * Copyright (C) 2009 STMicroelectronics 7 * Copyright (C) 2011 ST-Ericsson SA 8 * Authors: 9 * Mian Yousaf Kaukab <mian.yousaf.kaukab@stericsson.com> 10 * Praveena Nadahally <praveen.nadahally@stericsson.com> 11 * Rajaram Regupathy <ragupathy.rajaram@stericsson.com> 12 * 13 * This program is free software: you can redistribute it and/or modify 14 * it under the terms of the GNU General Public License as published by 15 * the Free Software Foundation, either version 2 of the License, or 16 * (at your option) any later version. 17 * 18 * This program is distributed in the hope that it will be useful, 19 * but WITHOUT ANY WARRANTY; without even the implied warranty of 20 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 21 * GNU General Public License for more details. 22 * 23 * You should have received a copy of the GNU General Public License 24 * along with this program. If not, see <http://www.gnu.org/licenses/>. 25 */ 26 27 #include <linux/device.h> 28 #include <linux/interrupt.h> 29 #include <linux/platform_device.h> 30 #include <linux/dma-mapping.h> 31 #include <linux/dmaengine.h> 32 #include <linux/pfn.h> 33 #include <mach/usb.h> 34 #include "musb_core.h" 35 36 struct ux500_dma_channel { 37 struct dma_channel channel; 38 struct ux500_dma_controller *controller; 39 struct musb_hw_ep *hw_ep; 40 struct work_struct channel_work; 41 struct dma_chan *dma_chan; 42 unsigned int cur_len; 43 dma_cookie_t cookie; 44 u8 ch_num; 45 u8 is_tx; 46 u8 is_allocated; 47 }; 48 49 struct ux500_dma_controller { 50 struct dma_controller controller; 51 struct ux500_dma_channel rx_channel[UX500_MUSB_DMA_NUM_RX_CHANNELS]; 52 struct ux500_dma_channel tx_channel[UX500_MUSB_DMA_NUM_TX_CHANNELS]; 53 u32 num_rx_channels; 54 u32 num_tx_channels; 55 void *private_data; 56 dma_addr_t phy_base; 57 }; 58 59 /* Work function invoked from DMA callback to handle tx transfers. */ 60 static void ux500_tx_work(struct work_struct *data) 61 { 62 struct ux500_dma_channel *ux500_channel = container_of(data, 63 struct ux500_dma_channel, channel_work); 64 struct musb_hw_ep *hw_ep = ux500_channel->hw_ep; 65 struct musb *musb = hw_ep->musb; 66 unsigned long flags; 67 68 DBG(4, "DMA tx transfer done on hw_ep=%d\n", hw_ep->epnum); 69 70 spin_lock_irqsave(&musb->lock, flags); 71 ux500_channel->channel.actual_len = ux500_channel->cur_len; 72 ux500_channel->channel.status = MUSB_DMA_STATUS_FREE; 73 musb_dma_completion(musb, hw_ep->epnum, 74 ux500_channel->is_tx); 75 spin_unlock_irqrestore(&musb->lock, flags); 76 } 77 78 /* Work function invoked from DMA callback to handle rx transfers. */ 79 static void ux500_rx_work(struct work_struct *data) 80 { 81 struct ux500_dma_channel *ux500_channel = container_of(data, 82 struct ux500_dma_channel, channel_work); 83 struct musb_hw_ep *hw_ep = ux500_channel->hw_ep; 84 struct musb *musb = hw_ep->musb; 85 unsigned long flags; 86 87 DBG(4, "DMA rx transfer done on hw_ep=%d\n", hw_ep->epnum); 88 89 spin_lock_irqsave(&musb->lock, flags); 90 ux500_channel->channel.actual_len = ux500_channel->cur_len; 91 ux500_channel->channel.status = MUSB_DMA_STATUS_FREE; 92 musb_dma_completion(musb, hw_ep->epnum, 93 ux500_channel->is_tx); 94 spin_unlock_irqrestore(&musb->lock, flags); 95 } 96 97 void ux500_dma_callback(void *private_data) 98 { 99 struct dma_channel *channel = (struct dma_channel *)private_data; 100 struct ux500_dma_channel *ux500_channel = channel->private_data; 101 102 schedule_work(&ux500_channel->channel_work); 103 } 104 105 static bool ux500_configure_channel(struct dma_channel *channel, 106 u16 packet_sz, u8 mode, 107 dma_addr_t dma_addr, u32 len) 108 { 109 struct ux500_dma_channel *ux500_channel = channel->private_data; 110 struct musb_hw_ep *hw_ep = ux500_channel->hw_ep; 111 struct dma_chan *dma_chan = ux500_channel->dma_chan; 112 struct dma_async_tx_descriptor *dma_desc; 113 enum dma_data_direction direction; 114 struct scatterlist sg; 115 struct dma_slave_config slave_conf; 116 enum dma_slave_buswidth addr_width; 117 dma_addr_t usb_fifo_addr = (MUSB_FIFO_OFFSET(hw_ep->epnum) + 118 ux500_channel->controller->phy_base); 119 120 DBG(4, "packet_sz=%d, mode=%d, dma_addr=0x%x, len=%d is_tx=%d\n", 121 packet_sz, mode, dma_addr, len, ux500_channel->is_tx); 122 123 ux500_channel->cur_len = len; 124 125 sg_init_table(&sg, 1); 126 sg_set_page(&sg, pfn_to_page(PFN_DOWN(dma_addr)), len, 127 offset_in_page(dma_addr)); 128 sg_dma_address(&sg) = dma_addr; 129 sg_dma_len(&sg) = len; 130 131 direction = ux500_channel->is_tx ? DMA_TO_DEVICE : DMA_FROM_DEVICE; 132 addr_width = (len & 0x3) ? DMA_SLAVE_BUSWIDTH_1_BYTE : 133 DMA_SLAVE_BUSWIDTH_4_BYTES; 134 135 slave_conf.direction = direction; 136 if (direction == DMA_FROM_DEVICE) { 137 slave_conf.src_addr = usb_fifo_addr; 138 slave_conf.src_addr_width = addr_width; 139 slave_conf.src_maxburst = 16; 140 } else { 141 slave_conf.dst_addr = usb_fifo_addr; 142 slave_conf.dst_addr_width = addr_width; 143 slave_conf.dst_maxburst = 16; 144 } 145 dma_chan->device->device_control(dma_chan, DMA_SLAVE_CONFIG, 146 (unsigned long) &slave_conf); 147 148 dma_desc = dma_chan->device-> 149 device_prep_slave_sg(dma_chan, &sg, 1, direction, 150 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 151 if (!dma_desc) 152 return false; 153 154 dma_desc->callback = ux500_dma_callback; 155 dma_desc->callback_param = channel; 156 ux500_channel->cookie = dma_desc->tx_submit(dma_desc); 157 158 dma_async_issue_pending(dma_chan); 159 160 return true; 161 } 162 163 static struct dma_channel *ux500_dma_channel_allocate(struct dma_controller *c, 164 struct musb_hw_ep *hw_ep, u8 is_tx) 165 { 166 struct ux500_dma_controller *controller = container_of(c, 167 struct ux500_dma_controller, controller); 168 struct ux500_dma_channel *ux500_channel = NULL; 169 u8 ch_num = hw_ep->epnum - 1; 170 u32 max_ch; 171 172 /* Max 8 DMA channels (0 - 7). Each DMA channel can only be allocated 173 * to specified hw_ep. For example DMA channel 0 can only be allocated 174 * to hw_ep 1 and 9. 175 */ 176 if (ch_num > 7) 177 ch_num -= 8; 178 179 max_ch = is_tx ? controller->num_tx_channels : 180 controller->num_rx_channels; 181 182 if (ch_num >= max_ch) 183 return NULL; 184 185 ux500_channel = is_tx ? &(controller->tx_channel[ch_num]) : 186 &(controller->rx_channel[ch_num]) ; 187 188 /* Check if channel is already used. */ 189 if (ux500_channel->is_allocated) 190 return NULL; 191 192 ux500_channel->hw_ep = hw_ep; 193 ux500_channel->is_allocated = 1; 194 195 DBG(7, "hw_ep=%d, is_tx=0x%x, channel=%d\n", 196 hw_ep->epnum, is_tx, ch_num); 197 198 return &(ux500_channel->channel); 199 } 200 201 static void ux500_dma_channel_release(struct dma_channel *channel) 202 { 203 struct ux500_dma_channel *ux500_channel = channel->private_data; 204 205 DBG(7, "channel=%d\n", ux500_channel->ch_num); 206 207 if (ux500_channel->is_allocated) { 208 ux500_channel->is_allocated = 0; 209 channel->status = MUSB_DMA_STATUS_FREE; 210 channel->actual_len = 0; 211 } 212 } 213 214 static int ux500_dma_is_compatible(struct dma_channel *channel, 215 u16 maxpacket, void *buf, u32 length) 216 { 217 if ((maxpacket & 0x3) || 218 ((int)buf & 0x3) || 219 (length < 512) || 220 (length & 0x3)) 221 return false; 222 else 223 return true; 224 } 225 226 static int ux500_dma_channel_program(struct dma_channel *channel, 227 u16 packet_sz, u8 mode, 228 dma_addr_t dma_addr, u32 len) 229 { 230 int ret; 231 232 BUG_ON(channel->status == MUSB_DMA_STATUS_UNKNOWN || 233 channel->status == MUSB_DMA_STATUS_BUSY); 234 235 if (!ux500_dma_is_compatible(channel, packet_sz, (void *)dma_addr, len)) 236 return false; 237 238 channel->status = MUSB_DMA_STATUS_BUSY; 239 channel->actual_len = 0; 240 ret = ux500_configure_channel(channel, packet_sz, mode, dma_addr, len); 241 if (!ret) 242 channel->status = MUSB_DMA_STATUS_FREE; 243 244 return ret; 245 } 246 247 static int ux500_dma_channel_abort(struct dma_channel *channel) 248 { 249 struct ux500_dma_channel *ux500_channel = channel->private_data; 250 struct ux500_dma_controller *controller = ux500_channel->controller; 251 struct musb *musb = controller->private_data; 252 void __iomem *epio = musb->endpoints[ux500_channel->hw_ep->epnum].regs; 253 u16 csr; 254 255 DBG(4, "channel=%d, is_tx=%d\n", ux500_channel->ch_num, 256 ux500_channel->is_tx); 257 258 if (channel->status == MUSB_DMA_STATUS_BUSY) { 259 if (ux500_channel->is_tx) { 260 csr = musb_readw(epio, MUSB_TXCSR); 261 csr &= ~(MUSB_TXCSR_AUTOSET | 262 MUSB_TXCSR_DMAENAB | 263 MUSB_TXCSR_DMAMODE); 264 musb_writew(epio, MUSB_TXCSR, csr); 265 } else { 266 csr = musb_readw(epio, MUSB_RXCSR); 267 csr &= ~(MUSB_RXCSR_AUTOCLEAR | 268 MUSB_RXCSR_DMAENAB | 269 MUSB_RXCSR_DMAMODE); 270 musb_writew(epio, MUSB_RXCSR, csr); 271 } 272 273 ux500_channel->dma_chan->device-> 274 device_control(ux500_channel->dma_chan, 275 DMA_TERMINATE_ALL, 0); 276 channel->status = MUSB_DMA_STATUS_FREE; 277 } 278 return 0; 279 } 280 281 static int ux500_dma_controller_stop(struct dma_controller *c) 282 { 283 struct ux500_dma_controller *controller = container_of(c, 284 struct ux500_dma_controller, controller); 285 struct ux500_dma_channel *ux500_channel; 286 struct dma_channel *channel; 287 u8 ch_num; 288 289 for (ch_num = 0; ch_num < controller->num_rx_channels; ch_num++) { 290 channel = &controller->rx_channel[ch_num].channel; 291 ux500_channel = channel->private_data; 292 293 ux500_dma_channel_release(channel); 294 295 if (ux500_channel->dma_chan) 296 dma_release_channel(ux500_channel->dma_chan); 297 } 298 299 for (ch_num = 0; ch_num < controller->num_tx_channels; ch_num++) { 300 channel = &controller->tx_channel[ch_num].channel; 301 ux500_channel = channel->private_data; 302 303 ux500_dma_channel_release(channel); 304 305 if (ux500_channel->dma_chan) 306 dma_release_channel(ux500_channel->dma_chan); 307 } 308 309 return 0; 310 } 311 312 static int ux500_dma_controller_start(struct dma_controller *c) 313 { 314 struct ux500_dma_controller *controller = container_of(c, 315 struct ux500_dma_controller, controller); 316 struct ux500_dma_channel *ux500_channel = NULL; 317 struct musb *musb = controller->private_data; 318 struct device *dev = musb->controller; 319 struct musb_hdrc_platform_data *plat = dev->platform_data; 320 struct ux500_musb_board_data *data = plat->board_data; 321 struct dma_channel *dma_channel = NULL; 322 u32 ch_num; 323 u8 dir; 324 u8 is_tx = 0; 325 326 void **param_array; 327 struct ux500_dma_channel *channel_array; 328 u32 ch_count; 329 void (*musb_channel_work)(struct work_struct *); 330 dma_cap_mask_t mask; 331 332 if ((data->num_rx_channels > UX500_MUSB_DMA_NUM_RX_CHANNELS) || 333 (data->num_tx_channels > UX500_MUSB_DMA_NUM_TX_CHANNELS)) 334 return -EINVAL; 335 336 controller->num_rx_channels = data->num_rx_channels; 337 controller->num_tx_channels = data->num_tx_channels; 338 339 dma_cap_zero(mask); 340 dma_cap_set(DMA_SLAVE, mask); 341 342 /* Prepare the loop for RX channels */ 343 channel_array = controller->rx_channel; 344 ch_count = data->num_rx_channels; 345 param_array = data->dma_rx_param_array; 346 musb_channel_work = ux500_rx_work; 347 348 for (dir = 0; dir < 2; dir++) { 349 for (ch_num = 0; ch_num < ch_count; ch_num++) { 350 ux500_channel = &channel_array[ch_num]; 351 ux500_channel->controller = controller; 352 ux500_channel->ch_num = ch_num; 353 ux500_channel->is_tx = is_tx; 354 355 dma_channel = &(ux500_channel->channel); 356 dma_channel->private_data = ux500_channel; 357 dma_channel->status = MUSB_DMA_STATUS_FREE; 358 dma_channel->max_len = SZ_16M; 359 360 ux500_channel->dma_chan = dma_request_channel(mask, 361 data->dma_filter, 362 param_array[ch_num]); 363 if (!ux500_channel->dma_chan) { 364 ERR("Dma pipe allocation error dir=%d ch=%d\n", 365 dir, ch_num); 366 367 /* Release already allocated channels */ 368 ux500_dma_controller_stop(c); 369 370 return -EBUSY; 371 } 372 373 INIT_WORK(&ux500_channel->channel_work, 374 musb_channel_work); 375 } 376 377 /* Prepare the loop for TX channels */ 378 channel_array = controller->tx_channel; 379 ch_count = data->num_tx_channels; 380 param_array = data->dma_tx_param_array; 381 musb_channel_work = ux500_tx_work; 382 is_tx = 1; 383 } 384 385 return 0; 386 } 387 388 void dma_controller_destroy(struct dma_controller *c) 389 { 390 struct ux500_dma_controller *controller = container_of(c, 391 struct ux500_dma_controller, controller); 392 393 kfree(controller); 394 } 395 396 struct dma_controller *__init 397 dma_controller_create(struct musb *musb, void __iomem *base) 398 { 399 struct ux500_dma_controller *controller; 400 struct platform_device *pdev = to_platform_device(musb->controller); 401 struct resource *iomem; 402 403 controller = kzalloc(sizeof(*controller), GFP_KERNEL); 404 if (!controller) 405 return NULL; 406 407 controller->private_data = musb; 408 409 /* Save physical address for DMA controller. */ 410 iomem = platform_get_resource(pdev, IORESOURCE_MEM, 0); 411 controller->phy_base = (dma_addr_t) iomem->start; 412 413 controller->controller.start = ux500_dma_controller_start; 414 controller->controller.stop = ux500_dma_controller_stop; 415 controller->controller.channel_alloc = ux500_dma_channel_allocate; 416 controller->controller.channel_release = ux500_dma_channel_release; 417 controller->controller.channel_program = ux500_dma_channel_program; 418 controller->controller.channel_abort = ux500_dma_channel_abort; 419 controller->controller.is_compatible = ux500_dma_is_compatible; 420 421 return &controller->controller; 422 } 423