|
@@ -3946,8 +3946,11 @@ static int nv_request_irq(struct net_device *dev, int intr_test)
|
|
if (np->msi_flags & NV_MSI_X_CAPABLE) {
|
|
if (np->msi_flags & NV_MSI_X_CAPABLE) {
|
|
for (i = 0; i < (np->msi_flags & NV_MSI_X_VECTORS_MASK); i++)
|
|
for (i = 0; i < (np->msi_flags & NV_MSI_X_VECTORS_MASK); i++)
|
|
np->msi_x_entry[i].entry = i;
|
|
np->msi_x_entry[i].entry = i;
|
|
- ret = pci_enable_msix(np->pci_dev, np->msi_x_entry, (np->msi_flags & NV_MSI_X_VECTORS_MASK));
|
|
|
|
- if (ret == 0) {
|
|
|
|
|
|
+ ret = pci_enable_msix_range(np->pci_dev,
|
|
|
|
+ np->msi_x_entry,
|
|
|
|
+ np->msi_flags & NV_MSI_X_VECTORS_MASK,
|
|
|
|
+ np->msi_flags & NV_MSI_X_VECTORS_MASK);
|
|
|
|
+ if (ret > 0) {
|
|
np->msi_flags |= NV_MSI_X_ENABLED;
|
|
np->msi_flags |= NV_MSI_X_ENABLED;
|
|
if (optimization_mode == NV_OPTIMIZATION_MODE_THROUGHPUT && !intr_test) {
|
|
if (optimization_mode == NV_OPTIMIZATION_MODE_THROUGHPUT && !intr_test) {
|
|
/* Request irq for rx handling */
|
|
/* Request irq for rx handling */
|