|
@@ -3195,14 +3195,16 @@ static netdev_tx_t stmmac_xmit(struct sk_buff *skb, struct net_device *dev)
|
|
|
stmmac_prepare_tx_desc(priv, first, 1, nopaged_len,
|
|
stmmac_prepare_tx_desc(priv, first, 1, nopaged_len,
|
|
|
csum_insertion, priv->mode, 1, last_segment,
|
|
csum_insertion, priv->mode, 1, last_segment,
|
|
|
skb->len);
|
|
skb->len);
|
|
|
-
|
|
|
|
|
- /* The own bit must be the latest setting done when prepare the
|
|
|
|
|
- * descriptor and then barrier is needed to make sure that
|
|
|
|
|
- * all is coherent before granting the DMA engine.
|
|
|
|
|
- */
|
|
|
|
|
- wmb();
|
|
|
|
|
|
|
+ } else {
|
|
|
|
|
+ stmmac_set_tx_owner(priv, first);
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
+ /* The own bit must be the latest setting done when prepare the
|
|
|
|
|
+ * descriptor and then barrier is needed to make sure that
|
|
|
|
|
+ * all is coherent before granting the DMA engine.
|
|
|
|
|
+ */
|
|
|
|
|
+ wmb();
|
|
|
|
|
+
|
|
|
netdev_tx_sent_queue(netdev_get_tx_queue(dev, queue), skb->len);
|
|
netdev_tx_sent_queue(netdev_get_tx_queue(dev, queue), skb->len);
|
|
|
|
|
|
|
|
stmmac_enable_dma_transmission(priv, priv->ioaddr);
|
|
stmmac_enable_dma_transmission(priv, priv->ioaddr);
|