musbhsdma.c (12359B)
1// SPDX-License-Identifier: GPL-2.0 2/* 3 * MUSB OTG driver - support for Mentor's DMA controller 4 * 5 * Copyright 2005 Mentor Graphics Corporation 6 * Copyright (C) 2005-2007 by Texas Instruments 7 */ 8#include <linux/device.h> 9#include <linux/interrupt.h> 10#include <linux/platform_device.h> 11#include <linux/slab.h> 12#include "musb_core.h" 13#include "musb_dma.h" 14 15#define MUSB_HSDMA_CHANNEL_OFFSET(_bchannel, _offset) \ 16 (MUSB_HSDMA_BASE + (_bchannel << 4) + _offset) 17 18#define musb_read_hsdma_addr(mbase, bchannel) \ 19 musb_readl(mbase, \ 20 MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_ADDRESS)) 21 22#define musb_write_hsdma_addr(mbase, bchannel, addr) \ 23 musb_writel(mbase, \ 24 MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_ADDRESS), \ 25 addr) 26 27#define musb_read_hsdma_count(mbase, bchannel) \ 28 musb_readl(mbase, \ 29 MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_COUNT)) 30 31#define musb_write_hsdma_count(mbase, bchannel, len) \ 32 musb_writel(mbase, \ 33 MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_COUNT), \ 34 len) 35/* control register (16-bit): */ 36#define MUSB_HSDMA_ENABLE_SHIFT 0 37#define MUSB_HSDMA_TRANSMIT_SHIFT 1 38#define MUSB_HSDMA_MODE1_SHIFT 2 39#define MUSB_HSDMA_IRQENABLE_SHIFT 3 40#define MUSB_HSDMA_ENDPOINT_SHIFT 4 41#define MUSB_HSDMA_BUSERROR_SHIFT 8 42#define MUSB_HSDMA_BURSTMODE_SHIFT 9 43#define MUSB_HSDMA_BURSTMODE (3 << MUSB_HSDMA_BURSTMODE_SHIFT) 44#define MUSB_HSDMA_BURSTMODE_UNSPEC 0 45#define MUSB_HSDMA_BURSTMODE_INCR4 1 46#define MUSB_HSDMA_BURSTMODE_INCR8 2 47#define MUSB_HSDMA_BURSTMODE_INCR16 3 48 49#define MUSB_HSDMA_CHANNELS 8 50 51struct musb_dma_controller; 52 53struct musb_dma_channel { 54 struct dma_channel channel; 55 struct musb_dma_controller *controller; 56 u32 start_addr; 57 u32 len; 58 u16 max_packet_sz; 59 u8 idx; 60 u8 epnum; 61 u8 transmit; 62}; 63 64struct musb_dma_controller { 65 struct dma_controller controller; 66 struct musb_dma_channel channel[MUSB_HSDMA_CHANNELS]; 67 void *private_data; 68 void __iomem *base; 69 u8 channel_count; 70 u8 used_channels; 71 int irq; 72}; 73 74static void dma_channel_release(struct dma_channel *channel); 75 76static void dma_controller_stop(struct musb_dma_controller *controller) 77{ 78 struct musb *musb = controller->private_data; 79 struct dma_channel *channel; 80 u8 bit; 81 82 if (controller->used_channels != 0) { 83 dev_err(musb->controller, 84 "Stopping DMA controller while channel active\n"); 85 86 for (bit = 0; bit < MUSB_HSDMA_CHANNELS; bit++) { 87 if (controller->used_channels & (1 << bit)) { 88 channel = &controller->channel[bit].channel; 89 dma_channel_release(channel); 90 91 if (!controller->used_channels) 92 break; 93 } 94 } 95 } 96} 97 98static struct dma_channel *dma_channel_allocate(struct dma_controller *c, 99 struct musb_hw_ep *hw_ep, u8 transmit) 100{ 101 struct musb_dma_controller *controller = container_of(c, 102 struct musb_dma_controller, controller); 103 struct musb_dma_channel *musb_channel = NULL; 104 struct dma_channel *channel = NULL; 105 u8 bit; 106 107 for (bit = 0; bit < MUSB_HSDMA_CHANNELS; bit++) { 108 if (!(controller->used_channels & (1 << bit))) { 109 controller->used_channels |= (1 << bit); 110 musb_channel = &(controller->channel[bit]); 111 musb_channel->controller = controller; 112 musb_channel->idx = bit; 113 musb_channel->epnum = hw_ep->epnum; 114 musb_channel->transmit = transmit; 115 channel = &(musb_channel->channel); 116 channel->private_data = musb_channel; 117 channel->status = MUSB_DMA_STATUS_FREE; 118 channel->max_len = 0x100000; 119 /* Tx => mode 1; Rx => mode 0 */ 120 channel->desired_mode = transmit; 121 channel->actual_len = 0; 122 break; 123 } 124 } 125 126 return channel; 127} 128 129static void dma_channel_release(struct dma_channel *channel) 130{ 131 struct musb_dma_channel *musb_channel = channel->private_data; 132 133 channel->actual_len = 0; 134 musb_channel->start_addr = 0; 135 musb_channel->len = 0; 136 137 musb_channel->controller->used_channels &= 138 ~(1 << musb_channel->idx); 139 140 channel->status = MUSB_DMA_STATUS_UNKNOWN; 141} 142 143static void configure_channel(struct dma_channel *channel, 144 u16 packet_sz, u8 mode, 145 dma_addr_t dma_addr, u32 len) 146{ 147 struct musb_dma_channel *musb_channel = channel->private_data; 148 struct musb_dma_controller *controller = musb_channel->controller; 149 struct musb *musb = controller->private_data; 150 void __iomem *mbase = controller->base; 151 u8 bchannel = musb_channel->idx; 152 u16 csr = 0; 153 154 musb_dbg(musb, "%p, pkt_sz %d, addr %pad, len %d, mode %d", 155 channel, packet_sz, &dma_addr, len, mode); 156 157 if (mode) { 158 csr |= 1 << MUSB_HSDMA_MODE1_SHIFT; 159 BUG_ON(len < packet_sz); 160 } 161 csr |= MUSB_HSDMA_BURSTMODE_INCR16 162 << MUSB_HSDMA_BURSTMODE_SHIFT; 163 164 csr |= (musb_channel->epnum << MUSB_HSDMA_ENDPOINT_SHIFT) 165 | (1 << MUSB_HSDMA_ENABLE_SHIFT) 166 | (1 << MUSB_HSDMA_IRQENABLE_SHIFT) 167 | (musb_channel->transmit 168 ? (1 << MUSB_HSDMA_TRANSMIT_SHIFT) 169 : 0); 170 171 /* address/count */ 172 musb_write_hsdma_addr(mbase, bchannel, dma_addr); 173 musb_write_hsdma_count(mbase, bchannel, len); 174 175 /* control (this should start things) */ 176 musb_writew(mbase, 177 MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_CONTROL), 178 csr); 179} 180 181static int dma_channel_program(struct dma_channel *channel, 182 u16 packet_sz, u8 mode, 183 dma_addr_t dma_addr, u32 len) 184{ 185 struct musb_dma_channel *musb_channel = channel->private_data; 186 struct musb_dma_controller *controller = musb_channel->controller; 187 struct musb *musb = controller->private_data; 188 189 musb_dbg(musb, "ep%d-%s pkt_sz %d, dma_addr %pad length %d, mode %d", 190 musb_channel->epnum, 191 musb_channel->transmit ? "Tx" : "Rx", 192 packet_sz, &dma_addr, len, mode); 193 194 BUG_ON(channel->status == MUSB_DMA_STATUS_UNKNOWN || 195 channel->status == MUSB_DMA_STATUS_BUSY); 196 197 /* 198 * The DMA engine in RTL1.8 and above cannot handle 199 * DMA addresses that are not aligned to a 4 byte boundary. 200 * It ends up masking the last two bits of the address 201 * programmed in DMA_ADDR. 202 * 203 * Fail such DMA transfers, so that the backup PIO mode 204 * can carry out the transfer 205 */ 206 if ((musb->hwvers >= MUSB_HWVERS_1800) && (dma_addr % 4)) 207 return false; 208 209 channel->actual_len = 0; 210 musb_channel->start_addr = dma_addr; 211 musb_channel->len = len; 212 musb_channel->max_packet_sz = packet_sz; 213 channel->status = MUSB_DMA_STATUS_BUSY; 214 215 configure_channel(channel, packet_sz, mode, dma_addr, len); 216 217 return true; 218} 219 220static int dma_channel_abort(struct dma_channel *channel) 221{ 222 struct musb_dma_channel *musb_channel = channel->private_data; 223 void __iomem *mbase = musb_channel->controller->base; 224 struct musb *musb = musb_channel->controller->private_data; 225 226 u8 bchannel = musb_channel->idx; 227 int offset; 228 u16 csr; 229 230 if (channel->status == MUSB_DMA_STATUS_BUSY) { 231 if (musb_channel->transmit) { 232 offset = musb->io.ep_offset(musb_channel->epnum, 233 MUSB_TXCSR); 234 235 /* 236 * The programming guide says that we must clear 237 * the DMAENAB bit before the DMAMODE bit... 238 */ 239 csr = musb_readw(mbase, offset); 240 csr &= ~(MUSB_TXCSR_AUTOSET | MUSB_TXCSR_DMAENAB); 241 musb_writew(mbase, offset, csr); 242 csr &= ~MUSB_TXCSR_DMAMODE; 243 musb_writew(mbase, offset, csr); 244 } else { 245 offset = musb->io.ep_offset(musb_channel->epnum, 246 MUSB_RXCSR); 247 248 csr = musb_readw(mbase, offset); 249 csr &= ~(MUSB_RXCSR_AUTOCLEAR | 250 MUSB_RXCSR_DMAENAB | 251 MUSB_RXCSR_DMAMODE); 252 musb_writew(mbase, offset, csr); 253 } 254 255 musb_writew(mbase, 256 MUSB_HSDMA_CHANNEL_OFFSET(bchannel, MUSB_HSDMA_CONTROL), 257 0); 258 musb_write_hsdma_addr(mbase, bchannel, 0); 259 musb_write_hsdma_count(mbase, bchannel, 0); 260 channel->status = MUSB_DMA_STATUS_FREE; 261 } 262 263 return 0; 264} 265 266irqreturn_t dma_controller_irq(int irq, void *private_data) 267{ 268 struct musb_dma_controller *controller = private_data; 269 struct musb *musb = controller->private_data; 270 struct musb_dma_channel *musb_channel; 271 struct dma_channel *channel; 272 273 void __iomem *mbase = controller->base; 274 275 irqreturn_t retval = IRQ_NONE; 276 277 unsigned long flags; 278 279 u8 bchannel; 280 u8 int_hsdma; 281 282 u32 addr, count; 283 u16 csr; 284 285 spin_lock_irqsave(&musb->lock, flags); 286 287 int_hsdma = musb_clearb(mbase, MUSB_HSDMA_INTR); 288 289 if (!int_hsdma) { 290 musb_dbg(musb, "spurious DMA irq"); 291 292 for (bchannel = 0; bchannel < MUSB_HSDMA_CHANNELS; bchannel++) { 293 musb_channel = (struct musb_dma_channel *) 294 &(controller->channel[bchannel]); 295 channel = &musb_channel->channel; 296 if (channel->status == MUSB_DMA_STATUS_BUSY) { 297 count = musb_read_hsdma_count(mbase, bchannel); 298 299 if (count == 0) 300 int_hsdma |= (1 << bchannel); 301 } 302 } 303 304 musb_dbg(musb, "int_hsdma = 0x%x", int_hsdma); 305 306 if (!int_hsdma) 307 goto done; 308 } 309 310 for (bchannel = 0; bchannel < MUSB_HSDMA_CHANNELS; bchannel++) { 311 if (int_hsdma & (1 << bchannel)) { 312 musb_channel = (struct musb_dma_channel *) 313 &(controller->channel[bchannel]); 314 channel = &musb_channel->channel; 315 316 csr = musb_readw(mbase, 317 MUSB_HSDMA_CHANNEL_OFFSET(bchannel, 318 MUSB_HSDMA_CONTROL)); 319 320 if (csr & (1 << MUSB_HSDMA_BUSERROR_SHIFT)) { 321 musb_channel->channel.status = 322 MUSB_DMA_STATUS_BUS_ABORT; 323 } else { 324 addr = musb_read_hsdma_addr(mbase, 325 bchannel); 326 channel->actual_len = addr 327 - musb_channel->start_addr; 328 329 musb_dbg(musb, "ch %p, 0x%x -> 0x%x (%zu / %d) %s", 330 channel, musb_channel->start_addr, 331 addr, channel->actual_len, 332 musb_channel->len, 333 (channel->actual_len 334 < musb_channel->len) ? 335 "=> reconfig 0" : "=> complete"); 336 337 channel->status = MUSB_DMA_STATUS_FREE; 338 339 /* completed */ 340 if (musb_channel->transmit && 341 (!channel->desired_mode || 342 (channel->actual_len % 343 musb_channel->max_packet_sz))) { 344 u8 epnum = musb_channel->epnum; 345 int offset = musb->io.ep_offset(epnum, 346 MUSB_TXCSR); 347 u16 txcsr; 348 349 /* 350 * The programming guide says that we 351 * must clear DMAENAB before DMAMODE. 352 */ 353 musb_ep_select(mbase, epnum); 354 txcsr = musb_readw(mbase, offset); 355 if (channel->desired_mode == 1) { 356 txcsr &= ~(MUSB_TXCSR_DMAENAB 357 | MUSB_TXCSR_AUTOSET); 358 musb_writew(mbase, offset, txcsr); 359 /* Send out the packet */ 360 txcsr &= ~MUSB_TXCSR_DMAMODE; 361 txcsr |= MUSB_TXCSR_DMAENAB; 362 } 363 txcsr |= MUSB_TXCSR_TXPKTRDY; 364 musb_writew(mbase, offset, txcsr); 365 } 366 musb_dma_completion(musb, musb_channel->epnum, 367 musb_channel->transmit); 368 } 369 } 370 } 371 372 retval = IRQ_HANDLED; 373done: 374 spin_unlock_irqrestore(&musb->lock, flags); 375 return retval; 376} 377EXPORT_SYMBOL_GPL(dma_controller_irq); 378 379void musbhs_dma_controller_destroy(struct dma_controller *c) 380{ 381 struct musb_dma_controller *controller = container_of(c, 382 struct musb_dma_controller, controller); 383 384 dma_controller_stop(controller); 385 386 if (controller->irq) 387 free_irq(controller->irq, c); 388 389 kfree(controller); 390} 391EXPORT_SYMBOL_GPL(musbhs_dma_controller_destroy); 392 393static struct musb_dma_controller * 394dma_controller_alloc(struct musb *musb, void __iomem *base) 395{ 396 struct musb_dma_controller *controller; 397 398 controller = kzalloc(sizeof(*controller), GFP_KERNEL); 399 if (!controller) 400 return NULL; 401 402 controller->channel_count = MUSB_HSDMA_CHANNELS; 403 controller->private_data = musb; 404 controller->base = base; 405 406 controller->controller.channel_alloc = dma_channel_allocate; 407 controller->controller.channel_release = dma_channel_release; 408 controller->controller.channel_program = dma_channel_program; 409 controller->controller.channel_abort = dma_channel_abort; 410 return controller; 411} 412 413struct dma_controller * 414musbhs_dma_controller_create(struct musb *musb, void __iomem *base) 415{ 416 struct musb_dma_controller *controller; 417 struct device *dev = musb->controller; 418 struct platform_device *pdev = to_platform_device(dev); 419 int irq = platform_get_irq_byname(pdev, "dma"); 420 421 if (irq <= 0) { 422 dev_err(dev, "No DMA interrupt line!\n"); 423 return NULL; 424 } 425 426 controller = dma_controller_alloc(musb, base); 427 if (!controller) 428 return NULL; 429 430 if (request_irq(irq, dma_controller_irq, 0, 431 dev_name(musb->controller), controller)) { 432 dev_err(dev, "request_irq %d failed!\n", irq); 433 musb_dma_controller_destroy(&controller->controller); 434 435 return NULL; 436 } 437 438 controller->irq = irq; 439 440 return &controller->controller; 441} 442EXPORT_SYMBOL_GPL(musbhs_dma_controller_create); 443 444struct dma_controller * 445musbhs_dma_controller_create_noirq(struct musb *musb, void __iomem *base) 446{ 447 struct musb_dma_controller *controller; 448 449 controller = dma_controller_alloc(musb, base); 450 if (!controller) 451 return NULL; 452 453 return &controller->controller; 454} 455EXPORT_SYMBOL_GPL(musbhs_dma_controller_create_noirq);