|
@@ -502,11 +502,16 @@ static void dma_tx_callback(void *data)
|
|
|
struct scatterlist *sgl = &sport->tx_sgl[0];
|
|
|
struct circ_buf *xmit = &sport->port.state->xmit;
|
|
|
unsigned long flags;
|
|
|
+ unsigned long temp;
|
|
|
|
|
|
spin_lock_irqsave(&sport->port.lock, flags);
|
|
|
|
|
|
dma_unmap_sg(sport->port.dev, sgl, sport->dma_tx_nents, DMA_TO_DEVICE);
|
|
|
|
|
|
+ temp = readl(sport->port.membase + UCR1);
|
|
|
+ temp &= ~UCR1_TDMAEN;
|
|
|
+ writel(temp, sport->port.membase + UCR1);
|
|
|
+
|
|
|
/* update the stat */
|
|
|
xmit->tail = (xmit->tail + sport->tx_bytes) & (UART_XMIT_SIZE - 1);
|
|
|
sport->port.icount.tx += sport->tx_bytes;
|
|
@@ -539,6 +544,7 @@ static void imx_dma_tx(struct imx_port *sport)
|
|
|
struct dma_async_tx_descriptor *desc;
|
|
|
struct dma_chan *chan = sport->dma_chan_tx;
|
|
|
struct device *dev = sport->port.dev;
|
|
|
+ unsigned long temp;
|
|
|
int ret;
|
|
|
|
|
|
if (sport->dma_is_txing)
|
|
@@ -575,6 +581,11 @@ static void imx_dma_tx(struct imx_port *sport)
|
|
|
|
|
|
dev_dbg(dev, "TX: prepare to send %lu bytes by DMA.\n",
|
|
|
uart_circ_chars_pending(xmit));
|
|
|
+
|
|
|
+ temp = readl(sport->port.membase + UCR1);
|
|
|
+ temp |= UCR1_TDMAEN;
|
|
|
+ writel(temp, sport->port.membase + UCR1);
|
|
|
+
|
|
|
/* fire it */
|
|
|
sport->dma_is_txing = 1;
|
|
|
dmaengine_submit(desc);
|
|
@@ -1258,6 +1269,7 @@ static void imx_flush_buffer(struct uart_port *port)
|
|
|
{
|
|
|
struct imx_port *sport = (struct imx_port *)port;
|
|
|
struct scatterlist *sgl = &sport->tx_sgl[0];
|
|
|
+ unsigned long temp;
|
|
|
|
|
|
if (!sport->dma_chan_tx)
|
|
|
return;
|
|
@@ -1267,6 +1279,9 @@ static void imx_flush_buffer(struct uart_port *port)
|
|
|
if (sport->dma_is_txing) {
|
|
|
dma_unmap_sg(sport->port.dev, sgl, sport->dma_tx_nents,
|
|
|
DMA_TO_DEVICE);
|
|
|
+ temp = readl(sport->port.membase + UCR1);
|
|
|
+ temp &= ~UCR1_TDMAEN;
|
|
|
+ writel(temp, sport->port.membase + UCR1);
|
|
|
sport->dma_is_txing = false;
|
|
|
}
|
|
|
}
|