crypto: nx - use the new scatterwalk functions
- In nx_walk_and_build(), use scatterwalk_start_at_pos() instead of a more complex way to achieve the same result. - Also in nx_walk_and_build(), use the new functions scatterwalk_next() which consolidates scatterwalk_clamp() and scatterwalk_map(), and use scatterwalk_done_src() which consolidates scatterwalk_unmap(), scatterwalk_advance(), and scatterwalk_done(). Remove unnecessary code that seemed to be intended to advance to the next sg entry, which is already handled by the scatterwalk functions. Note that nx_walk_and_build() does not actually read or write the mapped virtual address, and thus it is misusing the scatter_walk API. It really should just access the scatterlist directly. This patch does not try to address this existing issue. - In nx_gca(), use memcpy_from_sglist() instead of a more complex way to achieve the same result. - In various functions, replace calls to scatterwalk_map_and_copy() with memcpy_from_sglist() or memcpy_to_sglist() as appropriate. Note that this eliminates the confusing 'out' argument (which this driver had tried to work around by defining the missing constants for it...) Cc: Christophe Leroy <christophe.leroy@csgroup.eu> Cc: Madhavan Srinivasan <maddy@linux.ibm.com> Cc: Michael Ellerman <mpe@ellerman.id.au> Cc: Naveen N Rao <naveen@kernel.org> Cc: Nicholas Piggin <npiggin@gmail.com> Cc: linuxppc-dev@lists.ozlabs.org Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
8fd0eecd55
commit
422bf8fc99
|
|
@ -217,13 +217,11 @@ static int generate_pat(u8 *iv,
|
|||
memset(b1, 0, 16);
|
||||
if (assoclen <= 65280) {
|
||||
*(u16 *)b1 = assoclen;
|
||||
scatterwalk_map_and_copy(b1 + 2, req->src, 0,
|
||||
iauth_len, SCATTERWALK_FROM_SG);
|
||||
memcpy_from_sglist(b1 + 2, req->src, 0, iauth_len);
|
||||
} else {
|
||||
*(u16 *)b1 = (u16)(0xfffe);
|
||||
*(u32 *)&b1[2] = assoclen;
|
||||
scatterwalk_map_and_copy(b1 + 6, req->src, 0,
|
||||
iauth_len, SCATTERWALK_FROM_SG);
|
||||
memcpy_from_sglist(b1 + 6, req->src, 0, iauth_len);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -341,9 +339,8 @@ static int ccm_nx_decrypt(struct aead_request *req,
|
|||
nbytes -= authsize;
|
||||
|
||||
/* copy out the auth tag to compare with later */
|
||||
scatterwalk_map_and_copy(priv->oauth_tag,
|
||||
req->src, nbytes + req->assoclen, authsize,
|
||||
SCATTERWALK_FROM_SG);
|
||||
memcpy_from_sglist(priv->oauth_tag, req->src, nbytes + req->assoclen,
|
||||
authsize);
|
||||
|
||||
rc = generate_pat(iv, req, nx_ctx, authsize, nbytes, assoclen,
|
||||
csbcpb->cpb.aes_ccm.in_pat_or_b0);
|
||||
|
|
@ -465,9 +462,8 @@ static int ccm_nx_encrypt(struct aead_request *req,
|
|||
} while (processed < nbytes);
|
||||
|
||||
/* copy out the auth tag */
|
||||
scatterwalk_map_and_copy(csbcpb->cpb.aes_ccm.out_pat_or_mac,
|
||||
req->dst, nbytes + req->assoclen, authsize,
|
||||
SCATTERWALK_TO_SG);
|
||||
memcpy_to_sglist(req->dst, nbytes + req->assoclen,
|
||||
csbcpb->cpb.aes_ccm.out_pat_or_mac, authsize);
|
||||
|
||||
out:
|
||||
spin_unlock_irqrestore(&nx_ctx->lock, irq_flags);
|
||||
|
|
|
|||
|
|
@ -103,16 +103,13 @@ static int nx_gca(struct nx_crypto_ctx *nx_ctx,
|
|||
{
|
||||
int rc;
|
||||
struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead;
|
||||
struct scatter_walk walk;
|
||||
struct nx_sg *nx_sg = nx_ctx->in_sg;
|
||||
unsigned int nbytes = assoclen;
|
||||
unsigned int processed = 0, to_process;
|
||||
unsigned int max_sg_len;
|
||||
|
||||
if (nbytes <= AES_BLOCK_SIZE) {
|
||||
scatterwalk_start(&walk, req->src);
|
||||
scatterwalk_copychunks(out, &walk, nbytes, SCATTERWALK_FROM_SG);
|
||||
scatterwalk_done(&walk, SCATTERWALK_FROM_SG, 0);
|
||||
memcpy_from_sglist(out, req->src, 0, nbytes);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
|
@ -391,19 +388,17 @@ static int gcm_aes_nx_crypt(struct aead_request *req, int enc,
|
|||
mac:
|
||||
if (enc) {
|
||||
/* copy out the auth tag */
|
||||
scatterwalk_map_and_copy(
|
||||
csbcpb->cpb.aes_gcm.out_pat_or_mac,
|
||||
memcpy_to_sglist(
|
||||
req->dst, req->assoclen + nbytes,
|
||||
crypto_aead_authsize(crypto_aead_reqtfm(req)),
|
||||
SCATTERWALK_TO_SG);
|
||||
csbcpb->cpb.aes_gcm.out_pat_or_mac,
|
||||
crypto_aead_authsize(crypto_aead_reqtfm(req)));
|
||||
} else {
|
||||
u8 *itag = nx_ctx->priv.gcm.iauth_tag;
|
||||
u8 *otag = csbcpb->cpb.aes_gcm.out_pat_or_mac;
|
||||
|
||||
scatterwalk_map_and_copy(
|
||||
memcpy_from_sglist(
|
||||
itag, req->src, req->assoclen + nbytes,
|
||||
crypto_aead_authsize(crypto_aead_reqtfm(req)),
|
||||
SCATTERWALK_FROM_SG);
|
||||
crypto_aead_authsize(crypto_aead_reqtfm(req)));
|
||||
rc = crypto_memneq(itag, otag,
|
||||
crypto_aead_authsize(crypto_aead_reqtfm(req))) ?
|
||||
-EBADMSG : 0;
|
||||
|
|
|
|||
|
|
@ -153,40 +153,19 @@ struct nx_sg *nx_walk_and_build(struct nx_sg *nx_dst,
|
|||
{
|
||||
struct scatter_walk walk;
|
||||
struct nx_sg *nx_sg = nx_dst;
|
||||
unsigned int n, offset = 0, len = *src_len;
|
||||
unsigned int n, len = *src_len;
|
||||
char *dst;
|
||||
|
||||
/* we need to fast forward through @start bytes first */
|
||||
for (;;) {
|
||||
scatterwalk_start(&walk, sg_src);
|
||||
|
||||
if (start < offset + sg_src->length)
|
||||
break;
|
||||
|
||||
offset += sg_src->length;
|
||||
sg_src = sg_next(sg_src);
|
||||
}
|
||||
|
||||
/* start - offset is the number of bytes to advance in the scatterlist
|
||||
* element we're currently looking at */
|
||||
scatterwalk_advance(&walk, start - offset);
|
||||
scatterwalk_start_at_pos(&walk, sg_src, start);
|
||||
|
||||
while (len && (nx_sg - nx_dst) < sglen) {
|
||||
n = scatterwalk_clamp(&walk, len);
|
||||
if (!n) {
|
||||
/* In cases where we have scatterlist chain sg_next
|
||||
* handles with it properly */
|
||||
scatterwalk_start(&walk, sg_next(walk.sg));
|
||||
n = scatterwalk_clamp(&walk, len);
|
||||
}
|
||||
dst = scatterwalk_map(&walk);
|
||||
dst = scatterwalk_next(&walk, len, &n);
|
||||
|
||||
nx_sg = nx_build_sg_list(nx_sg, dst, &n, sglen - (nx_sg - nx_dst));
|
||||
len -= n;
|
||||
|
||||
scatterwalk_unmap(dst);
|
||||
scatterwalk_advance(&walk, n);
|
||||
scatterwalk_done(&walk, SCATTERWALK_FROM_SG, len);
|
||||
scatterwalk_done_src(&walk, dst, n);
|
||||
len -= n;
|
||||
}
|
||||
/* update to_process */
|
||||
*src_len -= len;
|
||||
|
|
|
|||
|
|
@ -189,7 +189,4 @@ extern struct shash_alg nx_shash_sha256_alg;
|
|||
|
||||
extern struct nx_crypto_driver nx_driver;
|
||||
|
||||
#define SCATTERWALK_TO_SG 1
|
||||
#define SCATTERWALK_FROM_SG 0
|
||||
|
||||
#endif
|
||||
|
|
|
|||
Loading…
Reference in New Issue