|
@@ -2068,10 +2068,9 @@ static inline void i40evf_tx_map(struct i40e_ring *tx_ring, struct sk_buff *skb,
|
|
I40E_TXD_QW1_CMD_SHIFT);
|
|
I40E_TXD_QW1_CMD_SHIFT);
|
|
|
|
|
|
/* notify HW of packet */
|
|
/* notify HW of packet */
|
|
- if (!tail_bump)
|
|
|
|
|
|
+ if (!tail_bump) {
|
|
prefetchw(tx_desc + 1);
|
|
prefetchw(tx_desc + 1);
|
|
-
|
|
|
|
- if (tail_bump) {
|
|
|
|
|
|
+ } else {
|
|
/* Force memory writes to complete before letting h/w
|
|
/* Force memory writes to complete before letting h/w
|
|
* know there are new descriptors to fetch. (Only
|
|
* know there are new descriptors to fetch. (Only
|
|
* applicable for weak-ordered memory model archs,
|
|
* applicable for weak-ordered memory model archs,
|
|
@@ -2080,7 +2079,6 @@ static inline void i40evf_tx_map(struct i40e_ring *tx_ring, struct sk_buff *skb,
|
|
wmb();
|
|
wmb();
|
|
writel(i, tx_ring->tail);
|
|
writel(i, tx_ring->tail);
|
|
}
|
|
}
|
|
-
|
|
|
|
return;
|
|
return;
|
|
|
|
|
|
dma_error:
|
|
dma_error:
|