|
@@ -2997,7 +2997,7 @@ static netdev_tx_t stmmac_tso_xmit(struct sk_buff *skb, struct net_device *dev)
|
|
* descriptor and then barrier is needed to make sure that
|
|
* descriptor and then barrier is needed to make sure that
|
|
* all is coherent before granting the DMA engine.
|
|
* all is coherent before granting the DMA engine.
|
|
*/
|
|
*/
|
|
- dma_wmb();
|
|
|
|
|
|
+ wmb();
|
|
|
|
|
|
if (netif_msg_pktdata(priv)) {
|
|
if (netif_msg_pktdata(priv)) {
|
|
pr_info("%s: curr=%d dirty=%d f=%d, e=%d, f_p=%p, nfrags %d\n",
|
|
pr_info("%s: curr=%d dirty=%d f=%d, e=%d, f_p=%p, nfrags %d\n",
|
|
@@ -3221,7 +3221,7 @@ static netdev_tx_t stmmac_xmit(struct sk_buff *skb, struct net_device *dev)
|
|
* descriptor and then barrier is needed to make sure that
|
|
* descriptor and then barrier is needed to make sure that
|
|
* all is coherent before granting the DMA engine.
|
|
* all is coherent before granting the DMA engine.
|
|
*/
|
|
*/
|
|
- dma_wmb();
|
|
|
|
|
|
+ wmb();
|
|
}
|
|
}
|
|
|
|
|
|
netdev_tx_sent_queue(netdev_get_tx_queue(dev, queue), skb->len);
|
|
netdev_tx_sent_queue(netdev_get_tx_queue(dev, queue), skb->len);
|