|
@@ -57,7 +57,8 @@ static u32 xdr_padsize(u32 len)
|
|
|
|
|
|
int svc_rdma_map_xdr(struct svcxprt_rdma *xprt,
|
|
|
struct xdr_buf *xdr,
|
|
|
- struct svc_rdma_req_map *vec)
|
|
|
+ struct svc_rdma_req_map *vec,
|
|
|
+ bool write_chunk_present)
|
|
|
{
|
|
|
int sge_no;
|
|
|
u32 sge_bytes;
|
|
@@ -97,9 +98,20 @@ int svc_rdma_map_xdr(struct svcxprt_rdma *xprt,
|
|
|
|
|
|
/* Tail SGE */
|
|
|
if (xdr->tail[0].iov_len) {
|
|
|
- vec->sge[sge_no].iov_base = xdr->tail[0].iov_base;
|
|
|
- vec->sge[sge_no].iov_len = xdr->tail[0].iov_len;
|
|
|
- sge_no++;
|
|
|
+ unsigned char *base = xdr->tail[0].iov_base;
|
|
|
+ size_t len = xdr->tail[0].iov_len;
|
|
|
+ u32 xdr_pad = xdr_padsize(xdr->page_len);
|
|
|
+
|
|
|
+ if (write_chunk_present && xdr_pad) {
|
|
|
+ base += xdr_pad;
|
|
|
+ len -= xdr_pad;
|
|
|
+ }
|
|
|
+
|
|
|
+ if (len) {
|
|
|
+ vec->sge[sge_no].iov_base = base;
|
|
|
+ vec->sge[sge_no].iov_len = len;
|
|
|
+ sge_no++;
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
dprintk("svcrdma: %s: sge_no %d page_no %d "
|
|
@@ -594,7 +606,7 @@ int svc_rdma_sendto(struct svc_rqst *rqstp)
|
|
|
ctxt = svc_rdma_get_context(rdma);
|
|
|
ctxt->direction = DMA_TO_DEVICE;
|
|
|
vec = svc_rdma_get_req_map(rdma);
|
|
|
- ret = svc_rdma_map_xdr(rdma, &rqstp->rq_res, vec);
|
|
|
+ ret = svc_rdma_map_xdr(rdma, &rqstp->rq_res, vec, wr_ary != NULL);
|
|
|
if (ret)
|
|
|
goto err0;
|
|
|
inline_bytes = rqstp->rq_res.len;
|