serial: imx: disable TDMAEN in imx_flush_buffer()
Terminating the DMA, make sure the interrupt is disabled, too. This fixes random kernel Oops due to dma_tx_call() called for invalid transmissions. If we disable the TDMAEN, make sure it's enabled again if a TX DMA is started. Signed-off-by: Jiada Wang <jiada_wang@mentor.com> Signed-off-by: Dirk Behme <dirk.behme@de.bosch.com> Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
This commit is contained in:
parent
0bbc9b81c1
commit
a2c718ce6a
|
@ -502,11 +502,16 @@ static void dma_tx_callback(void *data)
|
||||||
struct scatterlist *sgl = &sport->tx_sgl[0];
|
struct scatterlist *sgl = &sport->tx_sgl[0];
|
||||||
struct circ_buf *xmit = &sport->port.state->xmit;
|
struct circ_buf *xmit = &sport->port.state->xmit;
|
||||||
unsigned long flags;
|
unsigned long flags;
|
||||||
|
unsigned long temp;
|
||||||
|
|
||||||
spin_lock_irqsave(&sport->port.lock, flags);
|
spin_lock_irqsave(&sport->port.lock, flags);
|
||||||
|
|
||||||
dma_unmap_sg(sport->port.dev, sgl, sport->dma_tx_nents, DMA_TO_DEVICE);
|
dma_unmap_sg(sport->port.dev, sgl, sport->dma_tx_nents, DMA_TO_DEVICE);
|
||||||
|
|
||||||
|
temp = readl(sport->port.membase + UCR1);
|
||||||
|
temp &= ~UCR1_TDMAEN;
|
||||||
|
writel(temp, sport->port.membase + UCR1);
|
||||||
|
|
||||||
/* update the stat */
|
/* update the stat */
|
||||||
xmit->tail = (xmit->tail + sport->tx_bytes) & (UART_XMIT_SIZE - 1);
|
xmit->tail = (xmit->tail + sport->tx_bytes) & (UART_XMIT_SIZE - 1);
|
||||||
sport->port.icount.tx += sport->tx_bytes;
|
sport->port.icount.tx += sport->tx_bytes;
|
||||||
|
@ -539,6 +544,7 @@ static void imx_dma_tx(struct imx_port *sport)
|
||||||
struct dma_async_tx_descriptor *desc;
|
struct dma_async_tx_descriptor *desc;
|
||||||
struct dma_chan *chan = sport->dma_chan_tx;
|
struct dma_chan *chan = sport->dma_chan_tx;
|
||||||
struct device *dev = sport->port.dev;
|
struct device *dev = sport->port.dev;
|
||||||
|
unsigned long temp;
|
||||||
int ret;
|
int ret;
|
||||||
|
|
||||||
if (sport->dma_is_txing)
|
if (sport->dma_is_txing)
|
||||||
|
@ -575,6 +581,11 @@ static void imx_dma_tx(struct imx_port *sport)
|
||||||
|
|
||||||
dev_dbg(dev, "TX: prepare to send %lu bytes by DMA.\n",
|
dev_dbg(dev, "TX: prepare to send %lu bytes by DMA.\n",
|
||||||
uart_circ_chars_pending(xmit));
|
uart_circ_chars_pending(xmit));
|
||||||
|
|
||||||
|
temp = readl(sport->port.membase + UCR1);
|
||||||
|
temp |= UCR1_TDMAEN;
|
||||||
|
writel(temp, sport->port.membase + UCR1);
|
||||||
|
|
||||||
/* fire it */
|
/* fire it */
|
||||||
sport->dma_is_txing = 1;
|
sport->dma_is_txing = 1;
|
||||||
dmaengine_submit(desc);
|
dmaengine_submit(desc);
|
||||||
|
@ -1258,6 +1269,7 @@ static void imx_flush_buffer(struct uart_port *port)
|
||||||
{
|
{
|
||||||
struct imx_port *sport = (struct imx_port *)port;
|
struct imx_port *sport = (struct imx_port *)port;
|
||||||
struct scatterlist *sgl = &sport->tx_sgl[0];
|
struct scatterlist *sgl = &sport->tx_sgl[0];
|
||||||
|
unsigned long temp;
|
||||||
|
|
||||||
if (!sport->dma_chan_tx)
|
if (!sport->dma_chan_tx)
|
||||||
return;
|
return;
|
||||||
|
@ -1267,6 +1279,9 @@ static void imx_flush_buffer(struct uart_port *port)
|
||||||
if (sport->dma_is_txing) {
|
if (sport->dma_is_txing) {
|
||||||
dma_unmap_sg(sport->port.dev, sgl, sport->dma_tx_nents,
|
dma_unmap_sg(sport->port.dev, sgl, sport->dma_tx_nents,
|
||||||
DMA_TO_DEVICE);
|
DMA_TO_DEVICE);
|
||||||
|
temp = readl(sport->port.membase + UCR1);
|
||||||
|
temp &= ~UCR1_TDMAEN;
|
||||||
|
writel(temp, sport->port.membase + UCR1);
|
||||||
sport->dma_is_txing = false;
|
sport->dma_is_txing = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue