|
@@ -170,7 +170,7 @@ void wcn36xx_dxe_free_ctl_blks(struct wcn36xx *wcn)
|
|
wcn36xx_dxe_free_ctl_block(&wcn->dxe_rx_h_ch);
|
|
wcn36xx_dxe_free_ctl_block(&wcn->dxe_rx_h_ch);
|
|
}
|
|
}
|
|
|
|
|
|
-static int wcn36xx_dxe_init_descs(struct wcn36xx_dxe_ch *wcn_ch)
|
|
|
|
|
|
+static int wcn36xx_dxe_init_descs(struct device *dev, struct wcn36xx_dxe_ch *wcn_ch)
|
|
{
|
|
{
|
|
struct wcn36xx_dxe_desc *cur_dxe = NULL;
|
|
struct wcn36xx_dxe_desc *cur_dxe = NULL;
|
|
struct wcn36xx_dxe_desc *prev_dxe = NULL;
|
|
struct wcn36xx_dxe_desc *prev_dxe = NULL;
|
|
@@ -179,7 +179,7 @@ static int wcn36xx_dxe_init_descs(struct wcn36xx_dxe_ch *wcn_ch)
|
|
int i;
|
|
int i;
|
|
|
|
|
|
size = wcn_ch->desc_num * sizeof(struct wcn36xx_dxe_desc);
|
|
size = wcn_ch->desc_num * sizeof(struct wcn36xx_dxe_desc);
|
|
- wcn_ch->cpu_addr = dma_alloc_coherent(NULL, size, &wcn_ch->dma_addr,
|
|
|
|
|
|
+ wcn_ch->cpu_addr = dma_alloc_coherent(dev, size, &wcn_ch->dma_addr,
|
|
GFP_KERNEL);
|
|
GFP_KERNEL);
|
|
if (!wcn_ch->cpu_addr)
|
|
if (!wcn_ch->cpu_addr)
|
|
return -ENOMEM;
|
|
return -ENOMEM;
|
|
@@ -271,7 +271,7 @@ static int wcn36xx_dxe_enable_ch_int(struct wcn36xx *wcn, u16 wcn_ch)
|
|
return 0;
|
|
return 0;
|
|
}
|
|
}
|
|
|
|
|
|
-static int wcn36xx_dxe_fill_skb(struct wcn36xx_dxe_ctl *ctl)
|
|
|
|
|
|
+static int wcn36xx_dxe_fill_skb(struct device *dev, struct wcn36xx_dxe_ctl *ctl)
|
|
{
|
|
{
|
|
struct wcn36xx_dxe_desc *dxe = ctl->desc;
|
|
struct wcn36xx_dxe_desc *dxe = ctl->desc;
|
|
struct sk_buff *skb;
|
|
struct sk_buff *skb;
|
|
@@ -280,7 +280,7 @@ static int wcn36xx_dxe_fill_skb(struct wcn36xx_dxe_ctl *ctl)
|
|
if (skb == NULL)
|
|
if (skb == NULL)
|
|
return -ENOMEM;
|
|
return -ENOMEM;
|
|
|
|
|
|
- dxe->dst_addr_l = dma_map_single(NULL,
|
|
|
|
|
|
+ dxe->dst_addr_l = dma_map_single(dev,
|
|
skb_tail_pointer(skb),
|
|
skb_tail_pointer(skb),
|
|
WCN36XX_PKT_SIZE,
|
|
WCN36XX_PKT_SIZE,
|
|
DMA_FROM_DEVICE);
|
|
DMA_FROM_DEVICE);
|
|
@@ -298,7 +298,7 @@ static int wcn36xx_dxe_ch_alloc_skb(struct wcn36xx *wcn,
|
|
cur_ctl = wcn_ch->head_blk_ctl;
|
|
cur_ctl = wcn_ch->head_blk_ctl;
|
|
|
|
|
|
for (i = 0; i < wcn_ch->desc_num; i++) {
|
|
for (i = 0; i < wcn_ch->desc_num; i++) {
|
|
- wcn36xx_dxe_fill_skb(cur_ctl);
|
|
|
|
|
|
+ wcn36xx_dxe_fill_skb(wcn->dev, cur_ctl);
|
|
cur_ctl = cur_ctl->next;
|
|
cur_ctl = cur_ctl->next;
|
|
}
|
|
}
|
|
|
|
|
|
@@ -361,7 +361,7 @@ static void reap_tx_dxes(struct wcn36xx *wcn, struct wcn36xx_dxe_ch *ch)
|
|
if (ctl->desc->ctrl & WCN36XX_DXE_CTRL_VALID_MASK)
|
|
if (ctl->desc->ctrl & WCN36XX_DXE_CTRL_VALID_MASK)
|
|
break;
|
|
break;
|
|
if (ctl->skb) {
|
|
if (ctl->skb) {
|
|
- dma_unmap_single(NULL, ctl->desc->src_addr_l,
|
|
|
|
|
|
+ dma_unmap_single(wcn->dev, ctl->desc->src_addr_l,
|
|
ctl->skb->len, DMA_TO_DEVICE);
|
|
ctl->skb->len, DMA_TO_DEVICE);
|
|
info = IEEE80211_SKB_CB(ctl->skb);
|
|
info = IEEE80211_SKB_CB(ctl->skb);
|
|
if (!(info->flags & IEEE80211_TX_CTL_REQ_TX_STATUS)) {
|
|
if (!(info->flags & IEEE80211_TX_CTL_REQ_TX_STATUS)) {
|
|
@@ -478,7 +478,7 @@ static int wcn36xx_rx_handle_packets(struct wcn36xx *wcn,
|
|
while (!(dxe->ctrl & WCN36XX_DXE_CTRL_VALID_MASK)) {
|
|
while (!(dxe->ctrl & WCN36XX_DXE_CTRL_VALID_MASK)) {
|
|
skb = ctl->skb;
|
|
skb = ctl->skb;
|
|
dma_addr = dxe->dst_addr_l;
|
|
dma_addr = dxe->dst_addr_l;
|
|
- wcn36xx_dxe_fill_skb(ctl);
|
|
|
|
|
|
+ wcn36xx_dxe_fill_skb(wcn->dev, ctl);
|
|
|
|
|
|
switch (ch->ch_type) {
|
|
switch (ch->ch_type) {
|
|
case WCN36XX_DXE_CH_RX_L:
|
|
case WCN36XX_DXE_CH_RX_L:
|
|
@@ -495,7 +495,7 @@ static int wcn36xx_rx_handle_packets(struct wcn36xx *wcn,
|
|
wcn36xx_warn("Unknown channel\n");
|
|
wcn36xx_warn("Unknown channel\n");
|
|
}
|
|
}
|
|
|
|
|
|
- dma_unmap_single(NULL, dma_addr, WCN36XX_PKT_SIZE,
|
|
|
|
|
|
+ dma_unmap_single(wcn->dev, dma_addr, WCN36XX_PKT_SIZE,
|
|
DMA_FROM_DEVICE);
|
|
DMA_FROM_DEVICE);
|
|
wcn36xx_rx_skb(wcn, skb);
|
|
wcn36xx_rx_skb(wcn, skb);
|
|
ctl = ctl->next;
|
|
ctl = ctl->next;
|
|
@@ -544,7 +544,7 @@ int wcn36xx_dxe_allocate_mem_pools(struct wcn36xx *wcn)
|
|
16 - (WCN36XX_BD_CHUNK_SIZE % 8);
|
|
16 - (WCN36XX_BD_CHUNK_SIZE % 8);
|
|
|
|
|
|
s = wcn->mgmt_mem_pool.chunk_size * WCN36XX_DXE_CH_DESC_NUMB_TX_H;
|
|
s = wcn->mgmt_mem_pool.chunk_size * WCN36XX_DXE_CH_DESC_NUMB_TX_H;
|
|
- cpu_addr = dma_alloc_coherent(NULL, s, &wcn->mgmt_mem_pool.phy_addr,
|
|
|
|
|
|
+ cpu_addr = dma_alloc_coherent(wcn->dev, s, &wcn->mgmt_mem_pool.phy_addr,
|
|
GFP_KERNEL);
|
|
GFP_KERNEL);
|
|
if (!cpu_addr)
|
|
if (!cpu_addr)
|
|
goto out_err;
|
|
goto out_err;
|
|
@@ -559,7 +559,7 @@ int wcn36xx_dxe_allocate_mem_pools(struct wcn36xx *wcn)
|
|
16 - (WCN36XX_BD_CHUNK_SIZE % 8);
|
|
16 - (WCN36XX_BD_CHUNK_SIZE % 8);
|
|
|
|
|
|
s = wcn->data_mem_pool.chunk_size * WCN36XX_DXE_CH_DESC_NUMB_TX_L;
|
|
s = wcn->data_mem_pool.chunk_size * WCN36XX_DXE_CH_DESC_NUMB_TX_L;
|
|
- cpu_addr = dma_alloc_coherent(NULL, s, &wcn->data_mem_pool.phy_addr,
|
|
|
|
|
|
+ cpu_addr = dma_alloc_coherent(wcn->dev, s, &wcn->data_mem_pool.phy_addr,
|
|
GFP_KERNEL);
|
|
GFP_KERNEL);
|
|
if (!cpu_addr)
|
|
if (!cpu_addr)
|
|
goto out_err;
|
|
goto out_err;
|
|
@@ -578,13 +578,13 @@ out_err:
|
|
void wcn36xx_dxe_free_mem_pools(struct wcn36xx *wcn)
|
|
void wcn36xx_dxe_free_mem_pools(struct wcn36xx *wcn)
|
|
{
|
|
{
|
|
if (wcn->mgmt_mem_pool.virt_addr)
|
|
if (wcn->mgmt_mem_pool.virt_addr)
|
|
- dma_free_coherent(NULL, wcn->mgmt_mem_pool.chunk_size *
|
|
|
|
|
|
+ dma_free_coherent(wcn->dev, wcn->mgmt_mem_pool.chunk_size *
|
|
WCN36XX_DXE_CH_DESC_NUMB_TX_H,
|
|
WCN36XX_DXE_CH_DESC_NUMB_TX_H,
|
|
wcn->mgmt_mem_pool.virt_addr,
|
|
wcn->mgmt_mem_pool.virt_addr,
|
|
wcn->mgmt_mem_pool.phy_addr);
|
|
wcn->mgmt_mem_pool.phy_addr);
|
|
|
|
|
|
if (wcn->data_mem_pool.virt_addr) {
|
|
if (wcn->data_mem_pool.virt_addr) {
|
|
- dma_free_coherent(NULL, wcn->data_mem_pool.chunk_size *
|
|
|
|
|
|
+ dma_free_coherent(wcn->dev, wcn->data_mem_pool.chunk_size *
|
|
WCN36XX_DXE_CH_DESC_NUMB_TX_L,
|
|
WCN36XX_DXE_CH_DESC_NUMB_TX_L,
|
|
wcn->data_mem_pool.virt_addr,
|
|
wcn->data_mem_pool.virt_addr,
|
|
wcn->data_mem_pool.phy_addr);
|
|
wcn->data_mem_pool.phy_addr);
|
|
@@ -651,7 +651,7 @@ int wcn36xx_dxe_tx_frame(struct wcn36xx *wcn,
|
|
goto unlock;
|
|
goto unlock;
|
|
}
|
|
}
|
|
|
|
|
|
- desc->src_addr_l = dma_map_single(NULL,
|
|
|
|
|
|
+ desc->src_addr_l = dma_map_single(wcn->dev,
|
|
ctl->skb->data,
|
|
ctl->skb->data,
|
|
ctl->skb->len,
|
|
ctl->skb->len,
|
|
DMA_TO_DEVICE);
|
|
DMA_TO_DEVICE);
|
|
@@ -707,7 +707,7 @@ int wcn36xx_dxe_init(struct wcn36xx *wcn)
|
|
/***************************************/
|
|
/***************************************/
|
|
/* Init descriptors for TX LOW channel */
|
|
/* Init descriptors for TX LOW channel */
|
|
/***************************************/
|
|
/***************************************/
|
|
- wcn36xx_dxe_init_descs(&wcn->dxe_tx_l_ch);
|
|
|
|
|
|
+ wcn36xx_dxe_init_descs(wcn->dev, &wcn->dxe_tx_l_ch);
|
|
wcn36xx_dxe_init_tx_bd(&wcn->dxe_tx_l_ch, &wcn->data_mem_pool);
|
|
wcn36xx_dxe_init_tx_bd(&wcn->dxe_tx_l_ch, &wcn->data_mem_pool);
|
|
|
|
|
|
/* Write channel head to a NEXT register */
|
|
/* Write channel head to a NEXT register */
|
|
@@ -725,7 +725,7 @@ int wcn36xx_dxe_init(struct wcn36xx *wcn)
|
|
/***************************************/
|
|
/***************************************/
|
|
/* Init descriptors for TX HIGH channel */
|
|
/* Init descriptors for TX HIGH channel */
|
|
/***************************************/
|
|
/***************************************/
|
|
- wcn36xx_dxe_init_descs(&wcn->dxe_tx_h_ch);
|
|
|
|
|
|
+ wcn36xx_dxe_init_descs(wcn->dev, &wcn->dxe_tx_h_ch);
|
|
wcn36xx_dxe_init_tx_bd(&wcn->dxe_tx_h_ch, &wcn->mgmt_mem_pool);
|
|
wcn36xx_dxe_init_tx_bd(&wcn->dxe_tx_h_ch, &wcn->mgmt_mem_pool);
|
|
|
|
|
|
/* Write channel head to a NEXT register */
|
|
/* Write channel head to a NEXT register */
|
|
@@ -745,7 +745,7 @@ int wcn36xx_dxe_init(struct wcn36xx *wcn)
|
|
/***************************************/
|
|
/***************************************/
|
|
/* Init descriptors for RX LOW channel */
|
|
/* Init descriptors for RX LOW channel */
|
|
/***************************************/
|
|
/***************************************/
|
|
- wcn36xx_dxe_init_descs(&wcn->dxe_rx_l_ch);
|
|
|
|
|
|
+ wcn36xx_dxe_init_descs(wcn->dev, &wcn->dxe_rx_l_ch);
|
|
|
|
|
|
/* For RX we need to preallocated buffers */
|
|
/* For RX we need to preallocated buffers */
|
|
wcn36xx_dxe_ch_alloc_skb(wcn, &wcn->dxe_rx_l_ch);
|
|
wcn36xx_dxe_ch_alloc_skb(wcn, &wcn->dxe_rx_l_ch);
|
|
@@ -775,7 +775,7 @@ int wcn36xx_dxe_init(struct wcn36xx *wcn)
|
|
/***************************************/
|
|
/***************************************/
|
|
/* Init descriptors for RX HIGH channel */
|
|
/* Init descriptors for RX HIGH channel */
|
|
/***************************************/
|
|
/***************************************/
|
|
- wcn36xx_dxe_init_descs(&wcn->dxe_rx_h_ch);
|
|
|
|
|
|
+ wcn36xx_dxe_init_descs(wcn->dev, &wcn->dxe_rx_h_ch);
|
|
|
|
|
|
/* For RX we need to prealocat buffers */
|
|
/* For RX we need to prealocat buffers */
|
|
wcn36xx_dxe_ch_alloc_skb(wcn, &wcn->dxe_rx_h_ch);
|
|
wcn36xx_dxe_ch_alloc_skb(wcn, &wcn->dxe_rx_h_ch);
|