|
|
@@ -1265,9 +1265,17 @@ static int ioat_xor_val_self_test(struct ioatdma_device *device)
|
|
|
op = IOAT_OP_XOR;
|
|
|
|
|
|
dest_dma = dma_map_page(dev, dest, 0, PAGE_SIZE, DMA_FROM_DEVICE);
|
|
|
+ if (dma_mapping_error(dev, dest_dma))
|
|
|
+ goto dma_unmap;
|
|
|
+
|
|
|
for (i = 0; i < IOAT_NUM_SRC_TEST; i++)
|
|
|
+ dma_srcs[i] = DMA_ERROR_CODE;
|
|
|
+ for (i = 0; i < IOAT_NUM_SRC_TEST; i++) {
|
|
|
dma_srcs[i] = dma_map_page(dev, xor_srcs[i], 0, PAGE_SIZE,
|
|
|
DMA_TO_DEVICE);
|
|
|
+ if (dma_mapping_error(dev, dma_srcs[i]))
|
|
|
+ goto dma_unmap;
|
|
|
+ }
|
|
|
tx = dma->device_prep_dma_xor(dma_chan, dest_dma, dma_srcs,
|
|
|
IOAT_NUM_SRC_TEST, PAGE_SIZE,
|
|
|
DMA_PREP_INTERRUPT);
|
|
|
@@ -1298,7 +1306,6 @@ static int ioat_xor_val_self_test(struct ioatdma_device *device)
|
|
|
goto dma_unmap;
|
|
|
}
|
|
|
|
|
|
- dma_unmap_page(dev, dest_dma, PAGE_SIZE, DMA_FROM_DEVICE);
|
|
|
for (i = 0; i < IOAT_NUM_SRC_TEST; i++)
|
|
|
dma_unmap_page(dev, dma_srcs[i], PAGE_SIZE, DMA_TO_DEVICE);
|
|
|
|
|
|
@@ -1313,6 +1320,8 @@ static int ioat_xor_val_self_test(struct ioatdma_device *device)
|
|
|
}
|
|
|
dma_sync_single_for_device(dev, dest_dma, PAGE_SIZE, DMA_FROM_DEVICE);
|
|
|
|
|
|
+ dma_unmap_page(dev, dest_dma, PAGE_SIZE, DMA_FROM_DEVICE);
|
|
|
+
|
|
|
/* skip validate if the capability is not present */
|
|
|
if (!dma_has_cap(DMA_XOR_VAL, dma_chan->device->cap_mask))
|
|
|
goto free_resources;
|
|
|
@@ -1327,8 +1336,13 @@ static int ioat_xor_val_self_test(struct ioatdma_device *device)
|
|
|
xor_val_result = 1;
|
|
|
|
|
|
for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++)
|
|
|
+ dma_srcs[i] = DMA_ERROR_CODE;
|
|
|
+ for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++) {
|
|
|
dma_srcs[i] = dma_map_page(dev, xor_val_srcs[i], 0, PAGE_SIZE,
|
|
|
DMA_TO_DEVICE);
|
|
|
+ if (dma_mapping_error(dev, dma_srcs[i]))
|
|
|
+ goto dma_unmap;
|
|
|
+ }
|
|
|
tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs,
|
|
|
IOAT_NUM_SRC_TEST + 1, PAGE_SIZE,
|
|
|
&xor_val_result, DMA_PREP_INTERRUPT);
|
|
|
@@ -1374,8 +1388,13 @@ static int ioat_xor_val_self_test(struct ioatdma_device *device)
|
|
|
|
|
|
xor_val_result = 0;
|
|
|
for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++)
|
|
|
+ dma_srcs[i] = DMA_ERROR_CODE;
|
|
|
+ for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++) {
|
|
|
dma_srcs[i] = dma_map_page(dev, xor_val_srcs[i], 0, PAGE_SIZE,
|
|
|
DMA_TO_DEVICE);
|
|
|
+ if (dma_mapping_error(dev, dma_srcs[i]))
|
|
|
+ goto dma_unmap;
|
|
|
+ }
|
|
|
tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs,
|
|
|
IOAT_NUM_SRC_TEST + 1, PAGE_SIZE,
|
|
|
&xor_val_result, DMA_PREP_INTERRUPT);
|
|
|
@@ -1417,14 +1436,18 @@ static int ioat_xor_val_self_test(struct ioatdma_device *device)
|
|
|
goto free_resources;
|
|
|
dma_unmap:
|
|
|
if (op == IOAT_OP_XOR) {
|
|
|
- dma_unmap_page(dev, dest_dma, PAGE_SIZE, DMA_FROM_DEVICE);
|
|
|
+ if (dest_dma != DMA_ERROR_CODE)
|
|
|
+ dma_unmap_page(dev, dest_dma, PAGE_SIZE,
|
|
|
+ DMA_FROM_DEVICE);
|
|
|
for (i = 0; i < IOAT_NUM_SRC_TEST; i++)
|
|
|
- dma_unmap_page(dev, dma_srcs[i], PAGE_SIZE,
|
|
|
- DMA_TO_DEVICE);
|
|
|
+ if (dma_srcs[i] != DMA_ERROR_CODE)
|
|
|
+ dma_unmap_page(dev, dma_srcs[i], PAGE_SIZE,
|
|
|
+ DMA_TO_DEVICE);
|
|
|
} else if (op == IOAT_OP_XOR_VAL) {
|
|
|
for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++)
|
|
|
- dma_unmap_page(dev, dma_srcs[i], PAGE_SIZE,
|
|
|
- DMA_TO_DEVICE);
|
|
|
+ if (dma_srcs[i] != DMA_ERROR_CODE)
|
|
|
+ dma_unmap_page(dev, dma_srcs[i], PAGE_SIZE,
|
|
|
+ DMA_TO_DEVICE);
|
|
|
}
|
|
|
free_resources:
|
|
|
dma->device_free_chan_resources(dma_chan);
|