|
@@ -578,17 +578,13 @@ int ath10k_ce_revoke_recv_next(struct ath10k_ce_pipe *ce_state,
|
|
* The caller takes responsibility for any necessary locking.
|
|
* The caller takes responsibility for any necessary locking.
|
|
*/
|
|
*/
|
|
int ath10k_ce_completed_send_next_nolock(struct ath10k_ce_pipe *ce_state,
|
|
int ath10k_ce_completed_send_next_nolock(struct ath10k_ce_pipe *ce_state,
|
|
- void **per_transfer_contextp,
|
|
|
|
- u32 *bufferp,
|
|
|
|
- unsigned int *nbytesp,
|
|
|
|
- unsigned int *transfer_idp)
|
|
|
|
|
|
+ void **per_transfer_contextp)
|
|
{
|
|
{
|
|
struct ath10k_ce_ring *src_ring = ce_state->src_ring;
|
|
struct ath10k_ce_ring *src_ring = ce_state->src_ring;
|
|
u32 ctrl_addr = ce_state->ctrl_addr;
|
|
u32 ctrl_addr = ce_state->ctrl_addr;
|
|
struct ath10k *ar = ce_state->ar;
|
|
struct ath10k *ar = ce_state->ar;
|
|
unsigned int nentries_mask = src_ring->nentries_mask;
|
|
unsigned int nentries_mask = src_ring->nentries_mask;
|
|
unsigned int sw_index = src_ring->sw_index;
|
|
unsigned int sw_index = src_ring->sw_index;
|
|
- struct ce_desc *sdesc, *sbase;
|
|
|
|
unsigned int read_index;
|
|
unsigned int read_index;
|
|
|
|
|
|
if (src_ring->hw_index == sw_index) {
|
|
if (src_ring->hw_index == sw_index) {
|
|
@@ -613,15 +609,6 @@ int ath10k_ce_completed_send_next_nolock(struct ath10k_ce_pipe *ce_state,
|
|
if (read_index == sw_index)
|
|
if (read_index == sw_index)
|
|
return -EIO;
|
|
return -EIO;
|
|
|
|
|
|
- sbase = src_ring->base_addr_owner_space;
|
|
|
|
- sdesc = CE_SRC_RING_TO_DESC(sbase, sw_index);
|
|
|
|
-
|
|
|
|
- /* Return data from completed source descriptor */
|
|
|
|
- *bufferp = __le32_to_cpu(sdesc->addr);
|
|
|
|
- *nbytesp = __le16_to_cpu(sdesc->nbytes);
|
|
|
|
- *transfer_idp = MS(__le16_to_cpu(sdesc->flags),
|
|
|
|
- CE_DESC_FLAGS_META_DATA);
|
|
|
|
-
|
|
|
|
if (per_transfer_contextp)
|
|
if (per_transfer_contextp)
|
|
*per_transfer_contextp =
|
|
*per_transfer_contextp =
|
|
src_ring->per_transfer_context[sw_index];
|
|
src_ring->per_transfer_context[sw_index];
|
|
@@ -696,10 +683,7 @@ int ath10k_ce_cancel_send_next(struct ath10k_ce_pipe *ce_state,
|
|
}
|
|
}
|
|
|
|
|
|
int ath10k_ce_completed_send_next(struct ath10k_ce_pipe *ce_state,
|
|
int ath10k_ce_completed_send_next(struct ath10k_ce_pipe *ce_state,
|
|
- void **per_transfer_contextp,
|
|
|
|
- u32 *bufferp,
|
|
|
|
- unsigned int *nbytesp,
|
|
|
|
- unsigned int *transfer_idp)
|
|
|
|
|
|
+ void **per_transfer_contextp)
|
|
{
|
|
{
|
|
struct ath10k *ar = ce_state->ar;
|
|
struct ath10k *ar = ce_state->ar;
|
|
struct ath10k_pci *ar_pci = ath10k_pci_priv(ar);
|
|
struct ath10k_pci *ar_pci = ath10k_pci_priv(ar);
|
|
@@ -707,9 +691,7 @@ int ath10k_ce_completed_send_next(struct ath10k_ce_pipe *ce_state,
|
|
|
|
|
|
spin_lock_bh(&ar_pci->ce_lock);
|
|
spin_lock_bh(&ar_pci->ce_lock);
|
|
ret = ath10k_ce_completed_send_next_nolock(ce_state,
|
|
ret = ath10k_ce_completed_send_next_nolock(ce_state,
|
|
- per_transfer_contextp,
|
|
|
|
- bufferp, nbytesp,
|
|
|
|
- transfer_idp);
|
|
|
|
|
|
+ per_transfer_contextp);
|
|
spin_unlock_bh(&ar_pci->ce_lock);
|
|
spin_unlock_bh(&ar_pci->ce_lock);
|
|
|
|
|
|
return ret;
|
|
return ret;
|