diff options
Diffstat (limited to 'drivers/crypto/caam/caamalg.c')
| -rw-r--r-- | drivers/crypto/caam/caamalg.c | 88 | 
1 files changed, 46 insertions, 42 deletions
diff --git a/drivers/crypto/caam/caamalg.c b/drivers/crypto/caam/caamalg.c index d3d8bb0a6990..4a9b998a8d26 100644 --- a/drivers/crypto/caam/caamalg.c +++ b/drivers/crypto/caam/caamalg.c @@ -59,6 +59,8 @@  #include <crypto/engine.h>  #include <crypto/xts.h>  #include <asm/unaligned.h> +#include <linux/dma-mapping.h> +#include <linux/kernel.h>  /*   * crypto alg @@ -131,7 +133,7 @@ struct caam_aead_req_ctx {  static int aead_null_set_sh_desc(struct crypto_aead *aead)  { -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	struct device *jrdev = ctx->jrdev;  	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);  	u32 *desc; @@ -184,7 +186,7 @@ static int aead_set_sh_desc(struct crypto_aead *aead)  	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),  						 struct caam_aead_alg, aead);  	unsigned int ivsize = crypto_aead_ivsize(aead); -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	struct device *jrdev = ctx->jrdev;  	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);  	u32 ctx1_iv_off = 0; @@ -312,7 +314,7 @@ skip_givenc:  static int aead_setauthsize(struct crypto_aead *authenc,  				    unsigned int authsize)  { -	struct caam_ctx *ctx = crypto_aead_ctx(authenc); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);  	ctx->authsize = authsize;  	aead_set_sh_desc(authenc); @@ -322,7 +324,7 @@ static int aead_setauthsize(struct crypto_aead *authenc,  static int gcm_set_sh_desc(struct crypto_aead *aead)  { -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	struct device *jrdev = ctx->jrdev;  	unsigned int ivsize = crypto_aead_ivsize(aead);  	u32 *desc; @@ -372,7 +374,7 @@ static int gcm_set_sh_desc(struct crypto_aead *aead)  static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)  { -	struct caam_ctx *ctx = crypto_aead_ctx(authenc); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);  	int err;  	err = crypto_gcm_check_authsize(authsize); @@ -387,7 +389,7 @@ static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)  static int rfc4106_set_sh_desc(struct crypto_aead *aead)  { -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	struct device *jrdev = ctx->jrdev;  	unsigned int ivsize = crypto_aead_ivsize(aead);  	u32 *desc; @@ -440,7 +442,7 @@ static int rfc4106_set_sh_desc(struct crypto_aead *aead)  static int rfc4106_setauthsize(struct crypto_aead *authenc,  			       unsigned int authsize)  { -	struct caam_ctx *ctx = crypto_aead_ctx(authenc); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);  	int err;  	err = crypto_rfc4106_check_authsize(authsize); @@ -455,7 +457,7 @@ static int rfc4106_setauthsize(struct crypto_aead *authenc,  static int rfc4543_set_sh_desc(struct crypto_aead *aead)  { -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	struct device *jrdev = ctx->jrdev;  	unsigned int ivsize = crypto_aead_ivsize(aead);  	u32 *desc; @@ -508,7 +510,7 @@ static int rfc4543_set_sh_desc(struct crypto_aead *aead)  static int rfc4543_setauthsize(struct crypto_aead *authenc,  			       unsigned int authsize)  { -	struct caam_ctx *ctx = crypto_aead_ctx(authenc); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);  	if (authsize != 16)  		return -EINVAL; @@ -521,7 +523,7 @@ static int rfc4543_setauthsize(struct crypto_aead *authenc,  static int chachapoly_set_sh_desc(struct crypto_aead *aead)  { -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	struct device *jrdev = ctx->jrdev;  	unsigned int ivsize = crypto_aead_ivsize(aead);  	u32 *desc; @@ -547,7 +549,7 @@ static int chachapoly_set_sh_desc(struct crypto_aead *aead)  static int chachapoly_setauthsize(struct crypto_aead *aead,  				  unsigned int authsize)  { -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	if (authsize != POLY1305_DIGEST_SIZE)  		return -EINVAL; @@ -559,7 +561,7 @@ static int chachapoly_setauthsize(struct crypto_aead *aead,  static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,  			     unsigned int keylen)  { -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	unsigned int ivsize = crypto_aead_ivsize(aead);  	unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize; @@ -575,7 +577,7 @@ static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,  static int aead_setkey(struct crypto_aead *aead,  			       const u8 *key, unsigned int keylen)  { -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	struct device *jrdev = ctx->jrdev;  	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);  	struct crypto_authenc_keys keys; @@ -656,7 +658,7 @@ static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,  static int gcm_setkey(struct crypto_aead *aead,  		      const u8 *key, unsigned int keylen)  { -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	struct device *jrdev = ctx->jrdev;  	int err; @@ -677,7 +679,7 @@ static int gcm_setkey(struct crypto_aead *aead,  static int rfc4106_setkey(struct crypto_aead *aead,  			  const u8 *key, unsigned int keylen)  { -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	struct device *jrdev = ctx->jrdev;  	int err; @@ -703,7 +705,7 @@ static int rfc4106_setkey(struct crypto_aead *aead,  static int rfc4543_setkey(struct crypto_aead *aead,  			  const u8 *key, unsigned int keylen)  { -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	struct device *jrdev = ctx->jrdev;  	int err; @@ -729,7 +731,7 @@ static int rfc4543_setkey(struct crypto_aead *aead,  static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,  			   unsigned int keylen, const u32 ctx1_iv_off)  { -	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); +	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);  	struct caam_skcipher_alg *alg =  		container_of(crypto_skcipher_alg(skcipher), typeof(*alg),  			     skcipher); @@ -832,7 +834,7 @@ static int des3_skcipher_setkey(struct crypto_skcipher *skcipher,  static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,  			       unsigned int keylen)  { -	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); +	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);  	struct device *jrdev = ctx->jrdev;  	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);  	u32 *desc; @@ -1057,7 +1059,7 @@ static void init_aead_job(struct aead_request *req,  			  bool all_contig, bool encrypt)  {  	struct crypto_aead *aead = crypto_aead_reqtfm(req); -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	int authsize = ctx->authsize;  	u32 *desc = edesc->hw_desc;  	u32 out_options, in_options; @@ -1118,7 +1120,7 @@ static void init_gcm_job(struct aead_request *req,  			 bool all_contig, bool encrypt)  {  	struct crypto_aead *aead = crypto_aead_reqtfm(req); -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	unsigned int ivsize = crypto_aead_ivsize(aead);  	u32 *desc = edesc->hw_desc;  	bool generic_gcm = (ivsize == GCM_AES_IV_SIZE); @@ -1185,7 +1187,7 @@ static void init_authenc_job(struct aead_request *req,  	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),  						 struct caam_aead_alg, aead);  	unsigned int ivsize = crypto_aead_ivsize(aead); -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);  	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==  			       OP_ALG_AAI_CTR_MOD128); @@ -1234,7 +1236,7 @@ static void init_skcipher_job(struct skcipher_request *req,  			      const bool encrypt)  {  	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); -	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); +	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);  	struct device *jrdev = ctx->jrdev;  	int ivsize = crypto_skcipher_ivsize(skcipher);  	u32 *desc = edesc->hw_desc; @@ -1290,7 +1292,7 @@ static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,  					   bool encrypt)  {  	struct crypto_aead *aead = crypto_aead_reqtfm(req); -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	struct device *jrdev = ctx->jrdev;  	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);  	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? @@ -1379,8 +1381,7 @@ static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,  	sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);  	/* allocate space for base edesc and hw desc commands, link tables */ -	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, -			GFP_DMA | flags); +	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, flags);  	if (!edesc) {  		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,  			   0, 0, 0); @@ -1457,7 +1458,7 @@ static inline int chachapoly_crypt(struct aead_request *req, bool encrypt)  {  	struct aead_edesc *edesc;  	struct crypto_aead *aead = crypto_aead_reqtfm(req); -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	struct device *jrdev = ctx->jrdev;  	bool all_contig;  	u32 *desc; @@ -1491,7 +1492,7 @@ static inline int aead_crypt(struct aead_request *req, bool encrypt)  {  	struct aead_edesc *edesc;  	struct crypto_aead *aead = crypto_aead_reqtfm(req); -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	struct device *jrdev = ctx->jrdev;  	bool all_contig; @@ -1524,7 +1525,7 @@ static int aead_decrypt(struct aead_request *req)  static int aead_do_one_req(struct crypto_engine *engine, void *areq)  {  	struct aead_request *req = aead_request_cast(areq); -	struct caam_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(crypto_aead_reqtfm(req));  	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);  	u32 *desc = rctx->edesc->hw_desc;  	int ret; @@ -1550,7 +1551,7 @@ static inline int gcm_crypt(struct aead_request *req, bool encrypt)  {  	struct aead_edesc *edesc;  	struct crypto_aead *aead = crypto_aead_reqtfm(req); -	struct caam_ctx *ctx = crypto_aead_ctx(aead); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);  	struct device *jrdev = ctx->jrdev;  	bool all_contig; @@ -1597,7 +1598,7 @@ static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,  						   int desc_bytes)  {  	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); -	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); +	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);  	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);  	struct device *jrdev = ctx->jrdev;  	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? @@ -1608,6 +1609,7 @@ static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,  	u8 *iv;  	int ivsize = crypto_skcipher_ivsize(skcipher);  	int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; +	unsigned int aligned_size;  	src_nents = sg_nents_for_len(req->src, req->cryptlen);  	if (unlikely(src_nents < 0)) { @@ -1681,15 +1683,18 @@ static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,  	/*  	 * allocate space for base edesc and hw desc commands, link tables, IV  	 */ -	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize, -			GFP_DMA | flags); -	if (!edesc) { +	aligned_size = ALIGN(ivsize, __alignof__(*edesc)); +	aligned_size += sizeof(*edesc) + desc_bytes + sec4_sg_bytes; +	aligned_size = ALIGN(aligned_size, dma_get_cache_alignment()); +	iv = kzalloc(aligned_size, flags); +	if (!iv) {  		dev_err(jrdev, "could not allocate extended descriptor\n");  		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,  			   0, 0, 0);  		return ERR_PTR(-ENOMEM);  	} +	edesc = (void *)(iv + ALIGN(ivsize, __alignof__(*edesc)));  	edesc->src_nents = src_nents;  	edesc->dst_nents = dst_nents;  	edesc->mapped_src_nents = mapped_src_nents; @@ -1701,7 +1706,6 @@ static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,  	/* Make sure IV is located in a DMAable area */  	if (ivsize) { -		iv = (u8 *)edesc->sec4_sg + sec4_sg_bytes;  		memcpy(iv, req->iv, ivsize);  		iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL); @@ -1756,7 +1760,7 @@ static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,  static int skcipher_do_one_req(struct crypto_engine *engine, void *areq)  {  	struct skcipher_request *req = skcipher_request_cast(areq); -	struct caam_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); +	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(crypto_skcipher_reqtfm(req));  	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);  	u32 *desc = rctx->edesc->hw_desc;  	int ret; @@ -1790,7 +1794,7 @@ static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)  {  	struct skcipher_edesc *edesc;  	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); -	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); +	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);  	struct device *jrdev = ctx->jrdev;  	struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);  	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); @@ -3397,7 +3401,7 @@ static int caam_cra_init(struct crypto_skcipher *tfm)  	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);  	struct caam_skcipher_alg *caam_alg =  		container_of(alg, typeof(*caam_alg), skcipher); -	struct caam_ctx *ctx = crypto_skcipher_ctx(tfm); +	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(tfm);  	u32 alg_aai = caam_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;  	int ret = 0; @@ -3434,7 +3438,7 @@ static int caam_aead_init(struct crypto_aead *tfm)  	struct aead_alg *alg = crypto_aead_alg(tfm);  	struct caam_aead_alg *caam_alg =  		 container_of(alg, struct caam_aead_alg, aead); -	struct caam_ctx *ctx = crypto_aead_ctx(tfm); +	struct caam_ctx *ctx = crypto_aead_ctx_dma(tfm);  	crypto_aead_set_reqsize(tfm, sizeof(struct caam_aead_req_ctx)); @@ -3454,7 +3458,7 @@ static void caam_exit_common(struct caam_ctx *ctx)  static void caam_cra_exit(struct crypto_skcipher *tfm)  { -	struct caam_ctx *ctx = crypto_skcipher_ctx(tfm); +	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(tfm);  	if (ctx->fallback)  		crypto_free_skcipher(ctx->fallback); @@ -3463,7 +3467,7 @@ static void caam_cra_exit(struct crypto_skcipher *tfm)  static void caam_aead_exit(struct crypto_aead *tfm)  { -	caam_exit_common(crypto_aead_ctx(tfm)); +	caam_exit_common(crypto_aead_ctx_dma(tfm));  }  void caam_algapi_exit(void) @@ -3491,7 +3495,7 @@ static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)  	alg->base.cra_module = THIS_MODULE;  	alg->base.cra_priority = CAAM_CRA_PRIORITY; -	alg->base.cra_ctxsize = sizeof(struct caam_ctx); +	alg->base.cra_ctxsize = sizeof(struct caam_ctx) + crypto_dma_padding();  	alg->base.cra_flags |= (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |  			      CRYPTO_ALG_KERN_DRIVER_ONLY); @@ -3505,7 +3509,7 @@ static void caam_aead_alg_init(struct caam_aead_alg *t_alg)  	alg->base.cra_module = THIS_MODULE;  	alg->base.cra_priority = CAAM_CRA_PRIORITY; -	alg->base.cra_ctxsize = sizeof(struct caam_ctx); +	alg->base.cra_ctxsize = sizeof(struct caam_ctx) + crypto_dma_padding();  	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |  			      CRYPTO_ALG_KERN_DRIVER_ONLY;  |