diff options
Diffstat (limited to 'drivers/crypto/inside-secure/safexcel_cipher.c')
| -rw-r--r-- | drivers/crypto/inside-secure/safexcel_cipher.c | 492 | 
1 files changed, 402 insertions, 90 deletions
diff --git a/drivers/crypto/inside-secure/safexcel_cipher.c b/drivers/crypto/inside-secure/safexcel_cipher.c index 6bb60fda2043..3aef1d43e435 100644 --- a/drivers/crypto/inside-secure/safexcel_cipher.c +++ b/drivers/crypto/inside-secure/safexcel_cipher.c @@ -1,11 +1,8 @@ +// SPDX-License-Identifier: GPL-2.0  /*   * Copyright (C) 2017 Marvell   *   * Antoine Tenart <[email protected]> - * - * This file is licensed under the terms of the GNU General Public - * License version 2. This program is licensed "as is" without any - * warranty of any kind, whether express or implied.   */  #include <linux/device.h> @@ -15,6 +12,7 @@  #include <crypto/aead.h>  #include <crypto/aes.h>  #include <crypto/authenc.h> +#include <crypto/des.h>  #include <crypto/sha.h>  #include <crypto/skcipher.h>  #include <crypto/internal/aead.h> @@ -27,21 +25,28 @@ enum safexcel_cipher_direction {  	SAFEXCEL_DECRYPT,  }; +enum safexcel_cipher_alg { +	SAFEXCEL_DES, +	SAFEXCEL_3DES, +	SAFEXCEL_AES, +}; +  struct safexcel_cipher_ctx {  	struct safexcel_context base;  	struct safexcel_crypto_priv *priv;  	u32 mode; +	enum safexcel_cipher_alg alg;  	bool aead;  	__le32 key[8];  	unsigned int key_len;  	/* All the below is AEAD specific */ -	u32 alg; +	u32 hash_alg;  	u32 state_sz; -	u32 ipad[SHA256_DIGEST_SIZE / sizeof(u32)]; -	u32 opad[SHA256_DIGEST_SIZE / sizeof(u32)]; +	u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)]; +	u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];  };  struct safexcel_cipher_req { @@ -57,10 +62,24 @@ static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,  	unsigned offset = 0;  	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) { -		offset = AES_BLOCK_SIZE / sizeof(u32); -		memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE); +		switch (ctx->alg) { +		case SAFEXCEL_DES: +			offset = DES_BLOCK_SIZE / sizeof(u32); +			memcpy(cdesc->control_data.token, iv, DES_BLOCK_SIZE); +			cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD; +			break; +		case SAFEXCEL_3DES: +			offset = DES3_EDE_BLOCK_SIZE / sizeof(u32); +			memcpy(cdesc->control_data.token, iv, DES3_EDE_BLOCK_SIZE); +			cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD; +			break; -		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD; +		case SAFEXCEL_AES: +			offset = AES_BLOCK_SIZE / sizeof(u32); +			memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE); +			cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD; +			break; +		}  	}  	token = (struct safexcel_token *)(cdesc->control_data.token + offset); @@ -145,7 +164,7 @@ static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,  		return ret;  	} -	if (priv->version == EIP197 && ctx->base.ctxr_dma) { +	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {  		for (i = 0; i < len / sizeof(u32); i++) {  			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {  				ctx->base.needs_inv = true; @@ -179,12 +198,12 @@ static int safexcel_aead_aes_setkey(struct crypto_aead *ctfm, const u8 *key,  		goto badkey;  	/* Encryption key */ -	if (priv->version == EIP197 && ctx->base.ctxr_dma && +	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&  	    memcmp(ctx->key, keys.enckey, keys.enckeylen))  		ctx->base.needs_inv = true;  	/* Auth key */ -	switch (ctx->alg) { +	switch (ctx->hash_alg) {  	case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:  		if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,  					 keys.authkeylen, &istate, &ostate)) @@ -200,6 +219,16 @@ static int safexcel_aead_aes_setkey(struct crypto_aead *ctfm, const u8 *key,  					 keys.authkeylen, &istate, &ostate))  			goto badkey;  		break; +	case CONTEXT_CONTROL_CRYPTO_ALG_SHA384: +		if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey, +					 keys.authkeylen, &istate, &ostate)) +			goto badkey; +		break; +	case CONTEXT_CONTROL_CRYPTO_ALG_SHA512: +		if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey, +					 keys.authkeylen, &istate, &ostate)) +			goto badkey; +		break;  	default:  		dev_err(priv->dev, "aead: unsupported hash algorithm\n");  		goto badkey; @@ -208,7 +237,7 @@ static int safexcel_aead_aes_setkey(struct crypto_aead *ctfm, const u8 *key,  	crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &  				    CRYPTO_TFM_RES_MASK); -	if (priv->version == EIP197 && ctx->base.ctxr_dma && +	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&  	    (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||  	     memcmp(ctx->opad, ostate.state, ctx->state_sz)))  		ctx->base.needs_inv = true; @@ -258,22 +287,28 @@ static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,  	if (ctx->aead)  		cdesc->control_data.control0 |= CONTEXT_CONTROL_DIGEST_HMAC | -						ctx->alg; - -	switch (ctx->key_len) { -	case AES_KEYSIZE_128: -		cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES128; -		break; -	case AES_KEYSIZE_192: -		cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES192; -		break; -	case AES_KEYSIZE_256: -		cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES256; -		break; -	default: -		dev_err(priv->dev, "aes keysize not supported: %u\n", -			ctx->key_len); -		return -EINVAL; +						ctx->hash_alg; + +	if (ctx->alg == SAFEXCEL_DES) { +		cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_DES; +	} else if (ctx->alg == SAFEXCEL_3DES) { +		cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_3DES; +	} else if (ctx->alg == SAFEXCEL_AES) { +		switch (ctx->key_len) { +		case AES_KEYSIZE_128: +			cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES128; +			break; +		case AES_KEYSIZE_192: +			cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES192; +			break; +		case AES_KEYSIZE_256: +			cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES256; +			break; +		default: +			dev_err(priv->dev, "aes keysize not supported: %u\n", +				ctx->key_len); +			return -EINVAL; +		}  	}  	ctrl_size = ctx->key_len / sizeof(u32); @@ -298,7 +333,6 @@ static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int rin  	*ret = 0; -	spin_lock_bh(&priv->ring[ring].egress_lock);  	do {  		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);  		if (IS_ERR(rdesc)) { @@ -315,7 +349,6 @@ static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int rin  	} while (!rdesc->last_seg);  	safexcel_complete(priv, ring); -	spin_unlock_bh(&priv->ring[ring].egress_lock);  	if (src == dst) {  		dma_unmap_sg(priv->dev, src, @@ -335,8 +368,7 @@ static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int rin  	return ndesc;  } -static int safexcel_aes_send(struct crypto_async_request *base, int ring, -			     struct safexcel_request *request, +static int safexcel_send_req(struct crypto_async_request *base, int ring,  			     struct safexcel_cipher_req *sreq,  			     struct scatterlist *src, struct scatterlist *dst,  			     unsigned int cryptlen, unsigned int assoclen, @@ -346,7 +378,7 @@ static int safexcel_aes_send(struct crypto_async_request *base, int ring,  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);  	struct safexcel_crypto_priv *priv = ctx->priv;  	struct safexcel_command_desc *cdesc; -	struct safexcel_result_desc *rdesc; +	struct safexcel_result_desc *rdesc, *first_rdesc = NULL;  	struct scatterlist *sg;  	unsigned int totlen = cryptlen + assoclen;  	int nr_src, nr_dst, n_cdesc = 0, n_rdesc = 0, queued = totlen; @@ -386,8 +418,6 @@ static int safexcel_aes_send(struct crypto_async_request *base, int ring,  		       ctx->opad, ctx->state_sz);  	} -	spin_lock_bh(&priv->ring[ring].egress_lock); -  	/* command descriptors */  	for_each_sg(src, sg, nr_src, i) {  		int len = sg_dma_len(sg); @@ -434,12 +464,12 @@ static int safexcel_aes_send(struct crypto_async_request *base, int ring,  			ret = PTR_ERR(rdesc);  			goto rdesc_rollback;  		} +		if (first) +			first_rdesc = rdesc;  		n_rdesc++;  	} -	spin_unlock_bh(&priv->ring[ring].egress_lock); - -	request->req = base; +	safexcel_rdr_req_set(priv, ring, first_rdesc, base);  	*commands = n_cdesc;  	*results = n_rdesc; @@ -452,8 +482,6 @@ cdesc_rollback:  	for (i = 0; i < n_cdesc; i++)  		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr); -	spin_unlock_bh(&priv->ring[ring].egress_lock); -  	if (src == dst) {  		dma_unmap_sg(priv->dev, src,  			     sg_nents_for_len(src, totlen), @@ -481,7 +509,6 @@ static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,  	*ret = 0; -	spin_lock_bh(&priv->ring[ring].egress_lock);  	do {  		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);  		if (IS_ERR(rdesc)) { @@ -491,17 +518,13 @@ static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,  			break;  		} -		if (rdesc->result_data.error_code) { -			dev_err(priv->dev, "cipher: invalidate: result descriptor error (%d)\n", -				rdesc->result_data.error_code); -			*ret = -EIO; -		} +		if (likely(!*ret)) +			*ret = safexcel_rdesc_check_errors(priv, rdesc);  		ndesc++;  	} while (!rdesc->last_seg);  	safexcel_complete(priv, ring); -	spin_unlock_bh(&priv->ring[ring].egress_lock);  	if (ctx->base.exit_inv) {  		dma_pool_free(priv->context_pool, ctx->base.ctxr, @@ -577,15 +600,13 @@ static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,  }  static int safexcel_cipher_send_inv(struct crypto_async_request *base, -				    int ring, struct safexcel_request *request, -				    int *commands, int *results) +				    int ring, int *commands, int *results)  {  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);  	struct safexcel_crypto_priv *priv = ctx->priv;  	int ret; -	ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring, -					request); +	ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);  	if (unlikely(ret))  		return ret; @@ -596,7 +617,6 @@ static int safexcel_cipher_send_inv(struct crypto_async_request *base,  }  static int safexcel_skcipher_send(struct crypto_async_request *async, int ring, -				  struct safexcel_request *request,  				  int *commands, int *results)  {  	struct skcipher_request *req = skcipher_request_cast(async); @@ -605,21 +625,19 @@ static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,  	struct safexcel_crypto_priv *priv = ctx->priv;  	int ret; -	BUG_ON(priv->version == EIP97 && sreq->needs_inv); +	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);  	if (sreq->needs_inv) -		ret = safexcel_cipher_send_inv(async, ring, request, commands, -					       results); +		ret = safexcel_cipher_send_inv(async, ring, commands, results);  	else -		ret = safexcel_aes_send(async, ring, request, sreq, req->src, +		ret = safexcel_send_req(async, ring, sreq, req->src,  					req->dst, req->cryptlen, 0, 0, req->iv,  					commands, results);  	return ret;  }  static int safexcel_aead_send(struct crypto_async_request *async, int ring, -			      struct safexcel_request *request, int *commands, -			      int *results) +			      int *commands, int *results)  {  	struct aead_request *req = aead_request_cast(async);  	struct crypto_aead *tfm = crypto_aead_reqtfm(req); @@ -628,14 +646,13 @@ static int safexcel_aead_send(struct crypto_async_request *async, int ring,  	struct safexcel_crypto_priv *priv = ctx->priv;  	int ret; -	BUG_ON(priv->version == EIP97 && sreq->needs_inv); +	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);  	if (sreq->needs_inv) -		ret = safexcel_cipher_send_inv(async, ring, request, commands, -					       results); +		ret = safexcel_cipher_send_inv(async, ring, commands, results);  	else -		ret = safexcel_aes_send(async, ring, request, sreq, req->src, -					req->dst, req->cryptlen, req->assoclen, +		ret = safexcel_send_req(async, ring, sreq, req->src, req->dst, +					req->cryptlen, req->assoclen,  					crypto_aead_authsize(tfm), req->iv,  					commands, results);  	return ret; @@ -705,9 +722,10 @@ static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)  	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);  } -static int safexcel_aes(struct crypto_async_request *base, +static int safexcel_queue_req(struct crypto_async_request *base,  			struct safexcel_cipher_req *sreq, -			enum safexcel_cipher_direction dir, u32 mode) +			enum safexcel_cipher_direction dir, u32 mode, +			enum safexcel_cipher_alg alg)  {  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);  	struct safexcel_crypto_priv *priv = ctx->priv; @@ -715,10 +733,11 @@ static int safexcel_aes(struct crypto_async_request *base,  	sreq->needs_inv = false;  	sreq->direction = dir; +	ctx->alg = alg;  	ctx->mode = mode;  	if (ctx->base.ctxr) { -		if (priv->version == EIP197 && ctx->base.needs_inv) { +		if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {  			sreq->needs_inv = true;  			ctx->base.needs_inv = false;  		} @@ -745,14 +764,16 @@ static int safexcel_aes(struct crypto_async_request *base,  static int safexcel_ecb_aes_encrypt(struct skcipher_request *req)  { -	return safexcel_aes(&req->base, skcipher_request_ctx(req), -			    SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB); +	return safexcel_queue_req(&req->base, skcipher_request_ctx(req), +			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB, +			SAFEXCEL_AES);  }  static int safexcel_ecb_aes_decrypt(struct skcipher_request *req)  { -	return safexcel_aes(&req->base, skcipher_request_ctx(req), -			    SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB); +	return safexcel_queue_req(&req->base, skcipher_request_ctx(req), +			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB, +			SAFEXCEL_AES);  }  static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm) @@ -795,7 +816,7 @@ static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)  	if (safexcel_cipher_cra_exit(tfm))  		return; -	if (priv->version == EIP197) { +	if (priv->flags & EIP197_TRC_CACHE) {  		ret = safexcel_skcipher_exit_inv(tfm);  		if (ret)  			dev_warn(priv->dev, "skcipher: invalidation error %d\n", @@ -815,7 +836,7 @@ static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)  	if (safexcel_cipher_cra_exit(tfm))  		return; -	if (priv->version == EIP197) { +	if (priv->flags & EIP197_TRC_CACHE) {  		ret = safexcel_aead_exit_inv(tfm);  		if (ret)  			dev_warn(priv->dev, "aead: invalidation error %d\n", @@ -828,6 +849,7 @@ static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)  struct safexcel_alg_template safexcel_alg_ecb_aes = {  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER, +	.engines = EIP97IES | EIP197B | EIP197D,  	.alg.skcipher = {  		.setkey = safexcel_skcipher_aes_setkey,  		.encrypt = safexcel_ecb_aes_encrypt, @@ -838,7 +860,7 @@ struct safexcel_alg_template safexcel_alg_ecb_aes = {  			.cra_name = "ecb(aes)",  			.cra_driver_name = "safexcel-ecb-aes",  			.cra_priority = 300, -			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC | +			.cra_flags = CRYPTO_ALG_ASYNC |  				     CRYPTO_ALG_KERN_DRIVER_ONLY,  			.cra_blocksize = AES_BLOCK_SIZE,  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx), @@ -852,18 +874,21 @@ struct safexcel_alg_template safexcel_alg_ecb_aes = {  static int safexcel_cbc_aes_encrypt(struct skcipher_request *req)  { -	return safexcel_aes(&req->base, skcipher_request_ctx(req), -			    SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC); +	return safexcel_queue_req(&req->base, skcipher_request_ctx(req), +			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC, +			SAFEXCEL_AES);  }  static int safexcel_cbc_aes_decrypt(struct skcipher_request *req)  { -	return safexcel_aes(&req->base, skcipher_request_ctx(req), -			    SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC); +	return safexcel_queue_req(&req->base, skcipher_request_ctx(req), +			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC, +			SAFEXCEL_AES);  }  struct safexcel_alg_template safexcel_alg_cbc_aes = {  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER, +	.engines = EIP97IES | EIP197B | EIP197D,  	.alg.skcipher = {  		.setkey = safexcel_skcipher_aes_setkey,  		.encrypt = safexcel_cbc_aes_encrypt, @@ -875,7 +900,7 @@ struct safexcel_alg_template safexcel_alg_cbc_aes = {  			.cra_name = "cbc(aes)",  			.cra_driver_name = "safexcel-cbc-aes",  			.cra_priority = 300, -			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC | +			.cra_flags = CRYPTO_ALG_ASYNC |  				     CRYPTO_ALG_KERN_DRIVER_ONLY,  			.cra_blocksize = AES_BLOCK_SIZE,  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx), @@ -887,20 +912,234 @@ struct safexcel_alg_template safexcel_alg_cbc_aes = {  	},  }; +static int safexcel_cbc_des_encrypt(struct skcipher_request *req) +{ +	return safexcel_queue_req(&req->base, skcipher_request_ctx(req), +			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC, +			SAFEXCEL_DES); +} + +static int safexcel_cbc_des_decrypt(struct skcipher_request *req) +{ +	return safexcel_queue_req(&req->base, skcipher_request_ctx(req), +			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC, +			SAFEXCEL_DES); +} + +static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key, +			       unsigned int len) +{ +	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm); +	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); +	u32 tmp[DES_EXPKEY_WORDS]; +	int ret; + +	if (len != DES_KEY_SIZE) { +		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN); +		return -EINVAL; +	} + +	ret = des_ekey(tmp, key); +	if (!ret && (tfm->crt_flags & CRYPTO_TFM_REQ_WEAK_KEY)) { +		tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY; +		return -EINVAL; +	} + +	/* if context exits and key changed, need to invalidate it */ +	if (ctx->base.ctxr_dma) +		if (memcmp(ctx->key, key, len)) +			ctx->base.needs_inv = true; + +	memcpy(ctx->key, key, len); +	ctx->key_len = len; + +	return 0; +} + +struct safexcel_alg_template safexcel_alg_cbc_des = { +	.type = SAFEXCEL_ALG_TYPE_SKCIPHER, +	.engines = EIP97IES | EIP197B | EIP197D, +	.alg.skcipher = { +		.setkey = safexcel_des_setkey, +		.encrypt = safexcel_cbc_des_encrypt, +		.decrypt = safexcel_cbc_des_decrypt, +		.min_keysize = DES_KEY_SIZE, +		.max_keysize = DES_KEY_SIZE, +		.ivsize = DES_BLOCK_SIZE, +		.base = { +			.cra_name = "cbc(des)", +			.cra_driver_name = "safexcel-cbc-des", +			.cra_priority = 300, +			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC | +				     CRYPTO_ALG_KERN_DRIVER_ONLY, +			.cra_blocksize = DES_BLOCK_SIZE, +			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx), +			.cra_alignmask = 0, +			.cra_init = safexcel_skcipher_cra_init, +			.cra_exit = safexcel_skcipher_cra_exit, +			.cra_module = THIS_MODULE, +		}, +	}, +}; + +static int safexcel_ecb_des_encrypt(struct skcipher_request *req) +{ +	return safexcel_queue_req(&req->base, skcipher_request_ctx(req), +			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB, +			SAFEXCEL_DES); +} + +static int safexcel_ecb_des_decrypt(struct skcipher_request *req) +{ +	return safexcel_queue_req(&req->base, skcipher_request_ctx(req), +			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB, +			SAFEXCEL_DES); +} + +struct safexcel_alg_template safexcel_alg_ecb_des = { +	.type = SAFEXCEL_ALG_TYPE_SKCIPHER, +	.engines = EIP97IES | EIP197B | EIP197D, +	.alg.skcipher = { +		.setkey = safexcel_des_setkey, +		.encrypt = safexcel_ecb_des_encrypt, +		.decrypt = safexcel_ecb_des_decrypt, +		.min_keysize = DES_KEY_SIZE, +		.max_keysize = DES_KEY_SIZE, +		.ivsize = DES_BLOCK_SIZE, +		.base = { +			.cra_name = "ecb(des)", +			.cra_driver_name = "safexcel-ecb-des", +			.cra_priority = 300, +			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC | +				     CRYPTO_ALG_KERN_DRIVER_ONLY, +			.cra_blocksize = DES_BLOCK_SIZE, +			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx), +			.cra_alignmask = 0, +			.cra_init = safexcel_skcipher_cra_init, +			.cra_exit = safexcel_skcipher_cra_exit, +			.cra_module = THIS_MODULE, +		}, +	}, +}; + +static int safexcel_cbc_des3_ede_encrypt(struct skcipher_request *req) +{ +	return safexcel_queue_req(&req->base, skcipher_request_ctx(req), +			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC, +			SAFEXCEL_3DES); +} + +static int safexcel_cbc_des3_ede_decrypt(struct skcipher_request *req) +{ +	return safexcel_queue_req(&req->base, skcipher_request_ctx(req), +			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC, +			SAFEXCEL_3DES); +} + +static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm, +				   const u8 *key, unsigned int len) +{ +	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm); +	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); + +	if (len != DES3_EDE_KEY_SIZE) { +		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN); +		return -EINVAL; +	} + +	/* if context exits and key changed, need to invalidate it */ +	if (ctx->base.ctxr_dma) { +		if (memcmp(ctx->key, key, len)) +			ctx->base.needs_inv = true; +	} + +	memcpy(ctx->key, key, len); + +	ctx->key_len = len; + +	return 0; +} + +struct safexcel_alg_template safexcel_alg_cbc_des3_ede = { +	.type = SAFEXCEL_ALG_TYPE_SKCIPHER, +	.engines = EIP97IES | EIP197B | EIP197D, +	.alg.skcipher = { +		.setkey = safexcel_des3_ede_setkey, +		.encrypt = safexcel_cbc_des3_ede_encrypt, +		.decrypt = safexcel_cbc_des3_ede_decrypt, +		.min_keysize = DES3_EDE_KEY_SIZE, +		.max_keysize = DES3_EDE_KEY_SIZE, +		.ivsize = DES3_EDE_BLOCK_SIZE, +		.base = { +			.cra_name = "cbc(des3_ede)", +			.cra_driver_name = "safexcel-cbc-des3_ede", +			.cra_priority = 300, +			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC | +				     CRYPTO_ALG_KERN_DRIVER_ONLY, +			.cra_blocksize = DES3_EDE_BLOCK_SIZE, +			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx), +			.cra_alignmask = 0, +			.cra_init = safexcel_skcipher_cra_init, +			.cra_exit = safexcel_skcipher_cra_exit, +			.cra_module = THIS_MODULE, +		}, +	}, +}; + +static int safexcel_ecb_des3_ede_encrypt(struct skcipher_request *req) +{ +	return safexcel_queue_req(&req->base, skcipher_request_ctx(req), +			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB, +			SAFEXCEL_3DES); +} + +static int safexcel_ecb_des3_ede_decrypt(struct skcipher_request *req) +{ +	return safexcel_queue_req(&req->base, skcipher_request_ctx(req), +			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB, +			SAFEXCEL_3DES); +} + +struct safexcel_alg_template safexcel_alg_ecb_des3_ede = { +	.type = SAFEXCEL_ALG_TYPE_SKCIPHER, +	.engines = EIP97IES | EIP197B | EIP197D, +	.alg.skcipher = { +		.setkey = safexcel_des3_ede_setkey, +		.encrypt = safexcel_ecb_des3_ede_encrypt, +		.decrypt = safexcel_ecb_des3_ede_decrypt, +		.min_keysize = DES3_EDE_KEY_SIZE, +		.max_keysize = DES3_EDE_KEY_SIZE, +		.ivsize = DES3_EDE_BLOCK_SIZE, +		.base = { +			.cra_name = "ecb(des3_ede)", +			.cra_driver_name = "safexcel-ecb-des3_ede", +			.cra_priority = 300, +			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC | +				     CRYPTO_ALG_KERN_DRIVER_ONLY, +			.cra_blocksize = DES3_EDE_BLOCK_SIZE, +			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx), +			.cra_alignmask = 0, +			.cra_init = safexcel_skcipher_cra_init, +			.cra_exit = safexcel_skcipher_cra_exit, +			.cra_module = THIS_MODULE, +		}, +	}, +}; +  static int safexcel_aead_encrypt(struct aead_request *req)  {  	struct safexcel_cipher_req *creq = aead_request_ctx(req); -	return safexcel_aes(&req->base, creq, SAFEXCEL_ENCRYPT, -			    CONTEXT_CONTROL_CRYPTO_MODE_CBC); +	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT, +			CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);  }  static int safexcel_aead_decrypt(struct aead_request *req)  {  	struct safexcel_cipher_req *creq = aead_request_ctx(req); -	return safexcel_aes(&req->base, creq, SAFEXCEL_DECRYPT, -			    CONTEXT_CONTROL_CRYPTO_MODE_CBC); +	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT, +			CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);  }  static int safexcel_aead_cra_init(struct crypto_tfm *tfm) @@ -926,13 +1165,14 @@ static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);  	safexcel_aead_cra_init(tfm); -	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1; +	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;  	ctx->state_sz = SHA1_DIGEST_SIZE;  	return 0;  }  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {  	.type = SAFEXCEL_ALG_TYPE_AEAD, +	.engines = EIP97IES | EIP197B | EIP197D,  	.alg.aead = {  		.setkey = safexcel_aead_aes_setkey,  		.encrypt = safexcel_aead_encrypt, @@ -943,7 +1183,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {  			.cra_name = "authenc(hmac(sha1),cbc(aes))",  			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",  			.cra_priority = 300, -			.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC | +			.cra_flags = CRYPTO_ALG_ASYNC |  				     CRYPTO_ALG_KERN_DRIVER_ONLY,  			.cra_blocksize = AES_BLOCK_SIZE,  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx), @@ -960,13 +1200,14 @@ static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);  	safexcel_aead_cra_init(tfm); -	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256; +	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;  	ctx->state_sz = SHA256_DIGEST_SIZE;  	return 0;  }  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {  	.type = SAFEXCEL_ALG_TYPE_AEAD, +	.engines = EIP97IES | EIP197B | EIP197D,  	.alg.aead = {  		.setkey = safexcel_aead_aes_setkey,  		.encrypt = safexcel_aead_encrypt, @@ -977,7 +1218,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {  			.cra_name = "authenc(hmac(sha256),cbc(aes))",  			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",  			.cra_priority = 300, -			.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC | +			.cra_flags = CRYPTO_ALG_ASYNC |  				     CRYPTO_ALG_KERN_DRIVER_ONLY,  			.cra_blocksize = AES_BLOCK_SIZE,  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx), @@ -994,13 +1235,14 @@ static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);  	safexcel_aead_cra_init(tfm); -	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224; +	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;  	ctx->state_sz = SHA256_DIGEST_SIZE;  	return 0;  }  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {  	.type = SAFEXCEL_ALG_TYPE_AEAD, +	.engines = EIP97IES | EIP197B | EIP197D,  	.alg.aead = {  		.setkey = safexcel_aead_aes_setkey,  		.encrypt = safexcel_aead_encrypt, @@ -1011,7 +1253,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {  			.cra_name = "authenc(hmac(sha224),cbc(aes))",  			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",  			.cra_priority = 300, -			.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC | +			.cra_flags = CRYPTO_ALG_ASYNC |  				     CRYPTO_ALG_KERN_DRIVER_ONLY,  			.cra_blocksize = AES_BLOCK_SIZE,  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx), @@ -1022,3 +1264,73 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {  		},  	},  }; + +static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm) +{ +	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); + +	safexcel_aead_cra_init(tfm); +	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512; +	ctx->state_sz = SHA512_DIGEST_SIZE; +	return 0; +} + +struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = { +	.type = SAFEXCEL_ALG_TYPE_AEAD, +	.engines = EIP97IES | EIP197B | EIP197D, +	.alg.aead = { +		.setkey = safexcel_aead_aes_setkey, +		.encrypt = safexcel_aead_encrypt, +		.decrypt = safexcel_aead_decrypt, +		.ivsize = AES_BLOCK_SIZE, +		.maxauthsize = SHA512_DIGEST_SIZE, +		.base = { +			.cra_name = "authenc(hmac(sha512),cbc(aes))", +			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes", +			.cra_priority = 300, +			.cra_flags = CRYPTO_ALG_ASYNC | +				     CRYPTO_ALG_KERN_DRIVER_ONLY, +			.cra_blocksize = AES_BLOCK_SIZE, +			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx), +			.cra_alignmask = 0, +			.cra_init = safexcel_aead_sha512_cra_init, +			.cra_exit = safexcel_aead_cra_exit, +			.cra_module = THIS_MODULE, +		}, +	}, +}; + +static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm) +{ +	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); + +	safexcel_aead_cra_init(tfm); +	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384; +	ctx->state_sz = SHA512_DIGEST_SIZE; +	return 0; +} + +struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = { +	.type = SAFEXCEL_ALG_TYPE_AEAD, +	.engines = EIP97IES | EIP197B | EIP197D, +	.alg.aead = { +		.setkey = safexcel_aead_aes_setkey, +		.encrypt = safexcel_aead_encrypt, +		.decrypt = safexcel_aead_decrypt, +		.ivsize = AES_BLOCK_SIZE, +		.maxauthsize = SHA384_DIGEST_SIZE, +		.base = { +			.cra_name = "authenc(hmac(sha384),cbc(aes))", +			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes", +			.cra_priority = 300, +			.cra_flags = CRYPTO_ALG_ASYNC | +				     CRYPTO_ALG_KERN_DRIVER_ONLY, +			.cra_blocksize = AES_BLOCK_SIZE, +			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx), +			.cra_alignmask = 0, +			.cra_init = safexcel_aead_sha384_cra_init, +			.cra_exit = safexcel_aead_cra_exit, +			.cra_module = THIS_MODULE, +		}, +	}, +};  |