diff options
Diffstat (limited to 'drivers/crypto/ccree')
-rw-r--r-- | drivers/crypto/ccree/cc_aead.c | 35 | ||||
-rw-r--r-- | drivers/crypto/ccree/cc_cipher.c | 104 | ||||
-rw-r--r-- | drivers/crypto/ccree/cc_crypto_ctx.h | 4 | ||||
-rw-r--r-- | drivers/crypto/ccree/cc_driver.c | 50 | ||||
-rw-r--r-- | drivers/crypto/ccree/cc_driver.h | 15 | ||||
-rw-r--r-- | drivers/crypto/ccree/cc_hash.c | 189 | ||||
-rw-r--r-- | drivers/crypto/ccree/cc_hw_queue_defs.h | 30 |
7 files changed, 355 insertions, 72 deletions
diff --git a/drivers/crypto/ccree/cc_aead.c b/drivers/crypto/ccree/cc_aead.c index 01b82b82f8b8..f2643cda45db 100644 --- a/drivers/crypto/ccree/cc_aead.c +++ b/drivers/crypto/ccree/cc_aead.c @@ -58,6 +58,7 @@ struct cc_aead_ctx { unsigned int enc_keylen; unsigned int auth_keylen; unsigned int authsize; /* Actual (reduced?) size of the MAC/ICv */ + unsigned int hash_len; enum drv_cipher_mode cipher_mode; enum cc_flow_mode flow_mode; enum drv_hash_mode auth_mode; @@ -122,6 +123,13 @@ static void cc_aead_exit(struct crypto_aead *tfm) } } +static unsigned int cc_get_aead_hash_len(struct crypto_aead *tfm) +{ + struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm); + + return cc_get_default_hash_len(ctx->drvdata); +} + static int cc_aead_init(struct crypto_aead *tfm) { struct aead_alg *alg = crypto_aead_alg(tfm); @@ -196,6 +204,7 @@ static int cc_aead_init(struct crypto_aead *tfm) ctx->auth_state.hmac.ipad_opad = NULL; ctx->auth_state.hmac.padded_authkey = NULL; } + ctx->hash_len = cc_get_aead_hash_len(tfm); return 0; @@ -327,7 +336,7 @@ static int hmac_setkey(struct cc_hw_desc *desc, struct cc_aead_ctx *ctx) /* Load the hash current length*/ hw_desc_init(&desc[idx]); set_cipher_mode(&desc[idx], hash_mode); - set_din_const(&desc[idx], 0, ctx->drvdata->hash_len_sz); + set_din_const(&desc[idx], 0, ctx->hash_len); set_flow_mode(&desc[idx], S_DIN_to_HASH); set_setup_mode(&desc[idx], SETUP_LOAD_KEY0); idx++; @@ -465,7 +474,7 @@ static int cc_get_plain_hmac_key(struct crypto_aead *tfm, const u8 *key, /* Load the hash current length*/ hw_desc_init(&desc[idx]); set_cipher_mode(&desc[idx], hashmode); - set_din_const(&desc[idx], 0, ctx->drvdata->hash_len_sz); + set_din_const(&desc[idx], 0, ctx->hash_len); set_cipher_config1(&desc[idx], HASH_PADDING_ENABLED); set_flow_mode(&desc[idx], S_DIN_to_HASH); set_setup_mode(&desc[idx], SETUP_LOAD_KEY0); @@ -1001,7 +1010,7 @@ static void cc_set_hmac_desc(struct aead_request *req, struct cc_hw_desc desc[], hw_desc_init(&desc[idx]); set_cipher_mode(&desc[idx], hash_mode); set_din_sram(&desc[idx], cc_digest_len_addr(ctx->drvdata, hash_mode), - ctx->drvdata->hash_len_sz); + ctx->hash_len); set_flow_mode(&desc[idx], S_DIN_to_HASH); set_setup_mode(&desc[idx], SETUP_LOAD_KEY0); idx++; @@ -1098,7 +1107,7 @@ static void cc_proc_scheme_desc(struct aead_request *req, hw_desc_init(&desc[idx]); set_cipher_mode(&desc[idx], hash_mode); set_dout_sram(&desc[idx], aead_handle->sram_workspace_addr, - ctx->drvdata->hash_len_sz); + ctx->hash_len); set_flow_mode(&desc[idx], S_HASH_to_DOUT); set_setup_mode(&desc[idx], SETUP_WRITE_STATE1); set_cipher_do(&desc[idx], DO_PAD); @@ -1128,7 +1137,7 @@ static void cc_proc_scheme_desc(struct aead_request *req, hw_desc_init(&desc[idx]); set_cipher_mode(&desc[idx], hash_mode); set_din_sram(&desc[idx], cc_digest_len_addr(ctx->drvdata, hash_mode), - ctx->drvdata->hash_len_sz); + ctx->hash_len); set_cipher_config1(&desc[idx], HASH_PADDING_ENABLED); set_flow_mode(&desc[idx], S_DIN_to_HASH); set_setup_mode(&desc[idx], SETUP_LOAD_KEY0); @@ -2358,6 +2367,7 @@ static struct cc_alg_template aead_algs[] = { .flow_mode = S_DIN_to_AES, .auth_mode = DRV_HASH_SHA1, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "authenc(hmac(sha1),cbc(des3_ede))", @@ -2377,6 +2387,7 @@ static struct cc_alg_template aead_algs[] = { .flow_mode = S_DIN_to_DES, .auth_mode = DRV_HASH_SHA1, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "authenc(hmac(sha256),cbc(aes))", @@ -2396,6 +2407,7 @@ static struct cc_alg_template aead_algs[] = { .flow_mode = S_DIN_to_AES, .auth_mode = DRV_HASH_SHA256, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "authenc(hmac(sha256),cbc(des3_ede))", @@ -2415,6 +2427,7 @@ static struct cc_alg_template aead_algs[] = { .flow_mode = S_DIN_to_DES, .auth_mode = DRV_HASH_SHA256, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "authenc(xcbc(aes),cbc(aes))", @@ -2434,6 +2447,7 @@ static struct cc_alg_template aead_algs[] = { .flow_mode = S_DIN_to_AES, .auth_mode = DRV_HASH_XCBC_MAC, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "authenc(hmac(sha1),rfc3686(ctr(aes)))", @@ -2453,6 +2467,7 @@ static struct cc_alg_template aead_algs[] = { .flow_mode = S_DIN_to_AES, .auth_mode = DRV_HASH_SHA1, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "authenc(hmac(sha256),rfc3686(ctr(aes)))", @@ -2472,6 +2487,7 @@ static struct cc_alg_template aead_algs[] = { .flow_mode = S_DIN_to_AES, .auth_mode = DRV_HASH_SHA256, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "authenc(xcbc(aes),rfc3686(ctr(aes)))", @@ -2491,6 +2507,7 @@ static struct cc_alg_template aead_algs[] = { .flow_mode = S_DIN_to_AES, .auth_mode = DRV_HASH_XCBC_MAC, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "ccm(aes)", @@ -2510,6 +2527,7 @@ static struct cc_alg_template aead_algs[] = { .flow_mode = S_DIN_to_AES, .auth_mode = DRV_HASH_NULL, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "rfc4309(ccm(aes))", @@ -2529,6 +2547,7 @@ static struct cc_alg_template aead_algs[] = { .flow_mode = S_DIN_to_AES, .auth_mode = DRV_HASH_NULL, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "gcm(aes)", @@ -2548,6 +2567,7 @@ static struct cc_alg_template aead_algs[] = { .flow_mode = S_DIN_to_AES, .auth_mode = DRV_HASH_NULL, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "rfc4106(gcm(aes))", @@ -2567,6 +2587,7 @@ static struct cc_alg_template aead_algs[] = { .flow_mode = S_DIN_to_AES, .auth_mode = DRV_HASH_NULL, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "rfc4543(gcm(aes))", @@ -2586,6 +2607,7 @@ static struct cc_alg_template aead_algs[] = { .flow_mode = S_DIN_to_AES, .auth_mode = DRV_HASH_NULL, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, }; @@ -2670,7 +2692,8 @@ int cc_aead_alloc(struct cc_drvdata *drvdata) /* Linux crypto */ for (alg = 0; alg < ARRAY_SIZE(aead_algs); alg++) { - if (aead_algs[alg].min_hw_rev > drvdata->hw_rev) + if ((aead_algs[alg].min_hw_rev > drvdata->hw_rev) || + !(drvdata->std_bodies & aead_algs[alg].std_body)) continue; t_alg = cc_create_aead_alg(&aead_algs[alg], dev); diff --git a/drivers/crypto/ccree/cc_cipher.c b/drivers/crypto/ccree/cc_cipher.c index 7623b29911af..cc92b031fad1 100644 --- a/drivers/crypto/ccree/cc_cipher.c +++ b/drivers/crypto/ccree/cc_cipher.c @@ -7,6 +7,7 @@ #include <crypto/internal/skcipher.h> #include <crypto/des.h> #include <crypto/xts.h> +#include <crypto/sm4.h> #include <crypto/scatterwalk.h> #include "cc_driver.h" @@ -83,6 +84,9 @@ static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size) if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE) return 0; break; + case S_DIN_to_SM4: + if (size == SM4_KEY_SIZE) + return 0; default: break; } @@ -122,6 +126,17 @@ static int validate_data_size(struct cc_cipher_ctx *ctx_p, if (IS_ALIGNED(size, DES_BLOCK_SIZE)) return 0; break; + case S_DIN_to_SM4: + switch (ctx_p->cipher_mode) { + case DRV_CIPHER_CTR: + return 0; + case DRV_CIPHER_ECB: + case DRV_CIPHER_CBC: + if (IS_ALIGNED(size, SM4_BLOCK_SIZE)) + return 0; + default: + break; + } default: break; } @@ -522,6 +537,9 @@ static void cc_setup_cipher_data(struct crypto_tfm *tfm, case S_DIN_to_DES: flow_mode = DIN_DES_DOUT; break; + case S_DIN_to_SM4: + flow_mode = DIN_SM4_DOUT; + break; default: dev_err(dev, "invalid flow mode, flow_mode = %d\n", flow_mode); return; @@ -815,6 +833,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_XTS, .flow_mode = S_DIN_to_AES, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "xts512(paes)", @@ -832,6 +851,7 @@ static const struct cc_alg_template skcipher_algs[] = { .flow_mode = S_DIN_to_AES, .data_unit = 512, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "xts4096(paes)", @@ -849,6 +869,7 @@ static const struct cc_alg_template skcipher_algs[] = { .flow_mode = S_DIN_to_AES, .data_unit = 4096, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "essiv(paes)", @@ -865,6 +886,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_ESSIV, .flow_mode = S_DIN_to_AES, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "essiv512(paes)", @@ -882,6 +904,7 @@ static const struct cc_alg_template skcipher_algs[] = { .flow_mode = S_DIN_to_AES, .data_unit = 512, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "essiv4096(paes)", @@ -899,6 +922,7 @@ static const struct cc_alg_template skcipher_algs[] = { .flow_mode = S_DIN_to_AES, .data_unit = 4096, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "bitlocker(paes)", @@ -915,6 +939,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_BITLOCKER, .flow_mode = S_DIN_to_AES, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "bitlocker512(paes)", @@ -932,6 +957,7 @@ static const struct cc_alg_template skcipher_algs[] = { .flow_mode = S_DIN_to_AES, .data_unit = 512, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "bitlocker4096(paes)", @@ -949,6 +975,7 @@ static const struct cc_alg_template skcipher_algs[] = { .flow_mode = S_DIN_to_AES, .data_unit = 4096, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "ecb(paes)", @@ -965,6 +992,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_ECB, .flow_mode = S_DIN_to_AES, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "cbc(paes)", @@ -981,6 +1009,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_CBC, .flow_mode = S_DIN_to_AES, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "ofb(paes)", @@ -997,6 +1026,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_OFB, .flow_mode = S_DIN_to_AES, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "cts(cbc(paes))", @@ -1013,6 +1043,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_CBC_CTS, .flow_mode = S_DIN_to_AES, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "ctr(paes)", @@ -1029,6 +1060,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_CTR, .flow_mode = S_DIN_to_AES, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "xts(aes)", @@ -1045,6 +1077,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_XTS, .flow_mode = S_DIN_to_AES, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "xts512(aes)", @@ -1062,6 +1095,7 @@ static const struct cc_alg_template skcipher_algs[] = { .flow_mode = S_DIN_to_AES, .data_unit = 512, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "xts4096(aes)", @@ -1079,6 +1113,7 @@ static const struct cc_alg_template skcipher_algs[] = { .flow_mode = S_DIN_to_AES, .data_unit = 4096, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "essiv(aes)", @@ -1095,6 +1130,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_ESSIV, .flow_mode = S_DIN_to_AES, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "essiv512(aes)", @@ -1112,6 +1148,7 @@ static const struct cc_alg_template skcipher_algs[] = { .flow_mode = S_DIN_to_AES, .data_unit = 512, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "essiv4096(aes)", @@ -1129,6 +1166,7 @@ static const struct cc_alg_template skcipher_algs[] = { .flow_mode = S_DIN_to_AES, .data_unit = 4096, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "bitlocker(aes)", @@ -1145,6 +1183,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_BITLOCKER, .flow_mode = S_DIN_to_AES, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "bitlocker512(aes)", @@ -1162,6 +1201,7 @@ static const struct cc_alg_template skcipher_algs[] = { .flow_mode = S_DIN_to_AES, .data_unit = 512, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "bitlocker4096(aes)", @@ -1179,6 +1219,7 @@ static const struct cc_alg_template skcipher_algs[] = { .flow_mode = S_DIN_to_AES, .data_unit = 4096, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "ecb(aes)", @@ -1195,6 +1236,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_ECB, .flow_mode = S_DIN_to_AES, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "cbc(aes)", @@ -1211,6 +1253,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_CBC, .flow_mode = S_DIN_to_AES, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "ofb(aes)", @@ -1227,6 +1270,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_OFB, .flow_mode = S_DIN_to_AES, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "cts(cbc(aes))", @@ -1243,6 +1287,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_CBC_CTS, .flow_mode = S_DIN_to_AES, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "ctr(aes)", @@ -1259,6 +1304,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_CTR, .flow_mode = S_DIN_to_AES, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "cbc(des3_ede)", @@ -1275,6 +1321,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_CBC, .flow_mode = S_DIN_to_DES, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "ecb(des3_ede)", @@ -1291,6 +1338,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_ECB, .flow_mode = S_DIN_to_DES, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "cbc(des)", @@ -1307,6 +1355,7 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_CBC, .flow_mode = S_DIN_to_DES, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "ecb(des)", @@ -1323,6 +1372,58 @@ static const struct cc_alg_template skcipher_algs[] = { .cipher_mode = DRV_CIPHER_ECB, .flow_mode = S_DIN_to_DES, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, + }, + { + .name = "cbc(sm4)", + .driver_name = "cbc-sm4-ccree", + .blocksize = SM4_BLOCK_SIZE, + .template_skcipher = { + .setkey = cc_cipher_setkey, + .encrypt = cc_cipher_encrypt, + .decrypt = cc_cipher_decrypt, + .min_keysize = SM4_KEY_SIZE, + .max_keysize = SM4_KEY_SIZE, + .ivsize = SM4_BLOCK_SIZE, + }, + .cipher_mode = DRV_CIPHER_CBC, + .flow_mode = S_DIN_to_SM4, + .min_hw_rev = CC_HW_REV_713, + .std_body = CC_STD_OSCCA, + }, + { + .name = "ecb(sm4)", + .driver_name = "ecb-sm4-ccree", + .blocksize = SM4_BLOCK_SIZE, + .template_skcipher = { + .setkey = cc_cipher_setkey, + .encrypt = cc_cipher_encrypt, + .decrypt = cc_cipher_decrypt, + .min_keysize = SM4_KEY_SIZE, + .max_keysize = SM4_KEY_SIZE, + .ivsize = 0, + }, + .cipher_mode = DRV_CIPHER_ECB, + .flow_mode = S_DIN_to_SM4, + .min_hw_rev = CC_HW_REV_713, + .std_body = CC_STD_OSCCA, + }, + { + .name = "ctr(sm4)", + .driver_name = "ctr-sm4-ccree", + .blocksize = SM4_BLOCK_SIZE, + .template_skcipher = { + .setkey = cc_cipher_setkey, + .encrypt = cc_cipher_encrypt, + .decrypt = cc_cipher_decrypt, + .min_keysize = SM4_KEY_SIZE, + .max_keysize = SM4_KEY_SIZE, + .ivsize = SM4_BLOCK_SIZE, + }, + .cipher_mode = DRV_CIPHER_CTR, + .flow_mode = S_DIN_to_SM4, + .min_hw_rev = CC_HW_REV_713, + .std_body = CC_STD_OSCCA, }, }; @@ -1398,7 +1499,8 @@ int cc_cipher_alloc(struct cc_drvdata *drvdata) dev_dbg(dev, "Number of algorithms = %zu\n", ARRAY_SIZE(skcipher_algs)); for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) { - if (skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) + if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) || + !(drvdata->std_bodies & skcipher_algs[alg].std_body)) continue; dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name); diff --git a/drivers/crypto/ccree/cc_crypto_ctx.h b/drivers/crypto/ccree/cc_crypto_ctx.h index e032544f4e31..c8dac273c563 100644 --- a/drivers/crypto/ccree/cc_crypto_ctx.h +++ b/drivers/crypto/ccree/cc_crypto_ctx.h @@ -115,7 +115,8 @@ enum drv_hash_mode { DRV_HASH_CBC_MAC = 6, DRV_HASH_XCBC_MAC = 7, DRV_HASH_CMAC = 8, - DRV_HASH_MODE_NUM = 9, + DRV_HASH_SM3 = 9, + DRV_HASH_MODE_NUM = 10, DRV_HASH_RESERVE32B = S32_MAX }; @@ -127,6 +128,7 @@ enum drv_hash_hw_mode { DRV_HASH_HW_SHA512 = 4, DRV_HASH_HW_SHA384 = 12, DRV_HASH_HW_GHASH = 6, + DRV_HASH_HW_SM3 = 14, DRV_HASH_HW_RESERVE32B = S32_MAX }; diff --git a/drivers/crypto/ccree/cc_driver.c b/drivers/crypto/ccree/cc_driver.c index 1ff229c2aeab..8ada308d72ee 100644 --- a/drivers/crypto/ccree/cc_driver.c +++ b/drivers/crypto/ccree/cc_driver.c @@ -39,23 +39,38 @@ struct cc_hw_data { char *name; enum cc_hw_rev rev; u32 sig; + int std_bodies; }; /* Hardware revisions defs. */ +/* The 703 is a OSCCA only variant of the 713 */ +static const struct cc_hw_data cc703_hw = { + .name = "703", .rev = CC_HW_REV_713, .std_bodies = CC_STD_OSCCA +}; + +static const struct cc_hw_data cc713_hw = { + .name = "713", .rev = CC_HW_REV_713, .std_bodies = CC_STD_ALL +}; + static const struct cc_hw_data cc712_hw = { - .name = "712", .rev = CC_HW_REV_712, .sig = 0xDCC71200U + .name = "712", .rev = CC_HW_REV_712, .sig = 0xDCC71200U, + .std_bodies = CC_STD_ALL }; static const struct cc_hw_data cc710_hw = { - .name = "710", .rev = CC_HW_REV_710, .sig = 0xDCC63200U + .name = "710", .rev = CC_HW_REV_710, .sig = 0xDCC63200U, + .std_bodies = CC_STD_ALL }; static const struct cc_hw_data cc630p_hw = { - .name = "630P", .rev = CC_HW_REV_630, .sig = 0xDCC63000U + .name = "630P", .rev = CC_HW_REV_630, .sig = 0xDCC63000U, + .std_bodies = CC_STD_ALL }; static const struct of_device_id arm_ccree_dev_of_match[] = { + { .compatible = "arm,cryptocell-703-ree", .data = &cc703_hw }, + { .compatible = "arm,cryptocell-713-ree", .data = &cc713_hw }, { .compatible = "arm,cryptocell-712-ree", .data = &cc712_hw }, { .compatible = "arm,cryptocell-710-ree", .data = &cc710_hw }, { .compatible = "arm,cryptocell-630p-ree", .data = &cc630p_hw }, @@ -204,14 +219,13 @@ static int init_cc_resources(struct platform_device *plat_dev) hw_rev = (struct cc_hw_data *)dev_id->data; new_drvdata->hw_rev_name = hw_rev->name; new_drvdata->hw_rev = hw_rev->rev; + new_drvdata->std_bodies = hw_rev->std_bodies; if (hw_rev->rev >= CC_HW_REV_712) { - new_drvdata->hash_len_sz = HASH_LEN_SIZE_712; new_drvdata->axim_mon_offset = CC_REG(AXIM_MON_COMP); new_drvdata->sig_offset = CC_REG(HOST_SIGNATURE_712); new_drvdata->ver_offset = CC_REG(HOST_VERSION_712); } else { - new_drvdata->hash_len_sz = HASH_LEN_SIZE_630; new_drvdata->axim_mon_offset = CC_REG(AXIM_MON_COMP8); new_drvdata->sig_offset = CC_REG(HOST_SIGNATURE_630); new_drvdata->ver_offset = CC_REG(HOST_VERSION_630); @@ -297,15 +311,17 @@ static int init_cc_resources(struct platform_device *plat_dev) return rc; } - /* Verify correct mapping */ - signature_val = cc_ioread(new_drvdata, new_drvdata->sig_offset); - if (signature_val != hw_rev->sig) { - dev_err(dev, "Invalid CC signature: SIGNATURE=0x%08X != expected=0x%08X\n", - signature_val, hw_rev->sig); - rc = -EINVAL; - goto post_clk_err; + if (hw_rev->rev <= CC_HW_REV_712) { + /* Verify correct mapping */ + signature_val = cc_ioread(new_drvdata, new_drvdata->sig_offset); + if (signature_val != hw_rev->sig) { + dev_err(dev, "Invalid CC signature: SIGNATURE=0x%08X != expected=0x%08X\n", + signature_val, hw_rev->sig); + rc = -EINVAL; + goto post_clk_err; + } + dev_dbg(dev, "CC SIGNATURE=0x%08X\n", signature_val); } - dev_dbg(dev, "CC SIGNATURE=0x%08X\n", signature_val); /* Display HW versions */ dev_info(dev, "ARM CryptoCell %s Driver: HW version 0x%08X, Driver version %s\n", @@ -461,6 +477,14 @@ int cc_clk_on(struct cc_drvdata *drvdata) return 0; } +unsigned int cc_get_default_hash_len(struct cc_drvdata *drvdata) +{ + if (drvdata->hw_rev >= CC_HW_REV_712) + return HASH_LEN_SIZE_712; + else + return HASH_LEN_SIZE_630; +} + void cc_clk_off(struct cc_drvdata *drvdata) { struct clk *clk = drvdata->clk; diff --git a/drivers/crypto/ccree/cc_driver.h b/drivers/crypto/ccree/cc_driver.h index d608a4faf662..5be7fd431b05 100644 --- a/drivers/crypto/ccree/cc_driver.h +++ b/drivers/crypto/ccree/cc_driver.h @@ -36,12 +36,19 @@ extern bool cc_dump_desc; extern bool cc_dump_bytes; -#define DRV_MODULE_VERSION "4.0" +#define DRV_MODULE_VERSION "5.0" enum cc_hw_rev { CC_HW_REV_630 = 630, CC_HW_REV_710 = 710, - CC_HW_REV_712 = 712 + CC_HW_REV_712 = 712, + CC_HW_REV_713 = 713 +}; + +enum cc_std_body { + CC_STD_NIST = 0x1, + CC_STD_OSCCA = 0x2, + CC_STD_ALL = 0x3 }; #define CC_COHERENT_CACHE_PARAMS 0xEEE @@ -127,10 +134,10 @@ struct cc_drvdata { bool coherent; char *hw_rev_name; enum cc_hw_rev hw_rev; - u32 hash_len_sz; u32 axim_mon_offset; u32 sig_offset; u32 ver_offset; + int std_bodies; }; struct cc_crypto_alg { @@ -156,6 +163,7 @@ struct cc_alg_template { int flow_mode; /* Note: currently, refers to the cipher mode only. */ int auth_mode; u32 min_hw_rev; + enum cc_std_body std_body; unsigned int data_unit; struct cc_drvdata *drvdata; }; @@ -182,6 +190,7 @@ int init_cc_regs(struct cc_drvdata *drvdata, bool is_probe); void fini_cc_regs(struct cc_drvdata *drvdata); int cc_clk_on(struct cc_drvdata *drvdata); void cc_clk_off(struct cc_drvdata *drvdata); +unsigned int cc_get_default_hash_len(struct cc_drvdata *drvdata); static inline void cc_iowrite(struct cc_drvdata *drvdata, u32 reg, u32 val) { diff --git a/drivers/crypto/ccree/cc_hash.c b/drivers/crypto/ccree/cc_hash.c index b9313306c36f..2c4ddc8fb76b 100644 --- a/drivers/crypto/ccree/cc_hash.c +++ b/drivers/crypto/ccree/cc_hash.c @@ -6,6 +6,7 @@ #include <crypto/algapi.h> #include <crypto/hash.h> #include <crypto/md5.h> +#include <crypto/sm3.h> #include <crypto/internal/hash.h> #include "cc_driver.h" @@ -16,6 +17,7 @@ #define CC_MAX_HASH_SEQ_LEN 12 #define CC_MAX_OPAD_KEYS_SIZE CC_MAX_HASH_BLCK_SIZE +#define CC_SM3_HASH_LEN_SIZE 8 struct cc_hash_handle { cc_sram_addr_t digest_len_sram_addr; /* const value in SRAM*/ @@ -43,6 +45,9 @@ static u64 sha384_init[] = { static u64 sha512_init[] = { SHA512_H7, SHA512_H6, SHA512_H5, SHA512_H4, SHA512_H3, SHA512_H2, SHA512_H1, SHA512_H0 }; +static const u32 sm3_init[] = { + SM3_IVH, SM3_IVG, SM3_IVF, SM3_IVE, + SM3_IVD, SM3_IVC, SM3_IVB, SM3_IVA }; static void cc_setup_xcbc(struct ahash_request *areq, struct cc_hw_desc desc[], unsigned int *seq_size); @@ -82,6 +87,7 @@ struct cc_hash_ctx { int hash_mode; int hw_mode; int inter_digestsize; + unsigned int hash_len; struct completion setkey_comp; bool is_hmac; }; @@ -138,10 +144,10 @@ static void cc_init_req(struct device *dev, struct ahash_req_ctx *state, ctx->hash_mode == DRV_HASH_SHA384) memcpy(state->digest_bytes_len, digest_len_sha512_init, - ctx->drvdata->hash_len_sz); + ctx->hash_len); else memcpy(state->digest_bytes_len, digest_len_init, - ctx->drvdata->hash_len_sz); + ctx->hash_len); } if (ctx->hash_mode != DRV_HASH_NULL) { @@ -321,7 +327,7 @@ static int cc_fin_result(struct cc_hw_desc *desc, struct ahash_request *req, /* Get final MAC result */ hw_desc_init(&desc[idx]); - set_cipher_mode(&desc[idx], ctx->hw_mode); + set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode); /* TODO */ set_dout_dlli(&desc[idx], state->digest_result_dma_addr, digestsize, NS_BIT, 1); @@ -367,7 +373,7 @@ static int cc_fin_hmac(struct cc_hw_desc *desc, struct ahash_request *req, set_cipher_mode(&desc[idx], ctx->hw_mode); set_din_sram(&desc[idx], cc_digest_len_addr(ctx->drvdata, ctx->hash_mode), - ctx->drvdata->hash_len_sz); + ctx->hash_len); set_cipher_config1(&desc[idx], HASH_PADDING_ENABLED); set_flow_mode(&desc[idx], S_DIN_to_HASH); set_setup_mode(&desc[idx], SETUP_LOAD_KEY0); @@ -440,7 +446,7 @@ static int cc_hash_digest(struct ahash_request *req) * digest */ hw_desc_init(&desc[idx]); - set_cipher_mode(&desc[idx], ctx->hw_mode); + set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode); if (is_hmac) { set_din_type(&desc[idx], DMA_DLLI, state->digest_buff_dma_addr, ctx->inter_digestsize, NS_BIT); @@ -454,14 +460,14 @@ static int cc_hash_digest(struct ahash_request *req) /* Load the hash current length */ hw_desc_init(&desc[idx]); - set_cipher_mode(&desc[idx], ctx->hw_mode); + set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode); if (is_hmac) { set_din_type(&desc[idx], DMA_DLLI, state->digest_bytes_len_dma_addr, - ctx->drvdata->hash_len_sz, NS_BIT); + ctx->hash_len, NS_BIT); } else { - set_din_const(&desc[idx], 0, ctx->drvdata->hash_len_sz); + set_din_const(&desc[idx], 0, ctx->hash_len); if (nbytes) set_cipher_config1(&desc[idx], HASH_PADDING_ENABLED); else @@ -478,7 +484,7 @@ static int cc_hash_digest(struct ahash_request *req) hw_desc_init(&desc[idx]); set_cipher_mode(&desc[idx], ctx->hw_mode); set_dout_dlli(&desc[idx], state->digest_buff_dma_addr, - ctx->drvdata->hash_len_sz, NS_BIT, 0); + ctx->hash_len, NS_BIT, 0); set_flow_mode(&desc[idx], S_HASH_to_DOUT); set_setup_mode(&desc[idx], SETUP_WRITE_STATE1); set_cipher_do(&desc[idx], DO_PAD); @@ -504,7 +510,7 @@ static int cc_restore_hash(struct cc_hw_desc *desc, struct cc_hash_ctx *ctx, { /* Restore hash digest */ hw_desc_init(&desc[idx]); - set_cipher_mode(&desc[idx], ctx->hw_mode); + set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode); set_din_type(&desc[idx], DMA_DLLI, state->digest_buff_dma_addr, ctx->inter_digestsize, NS_BIT); set_flow_mode(&desc[idx], S_DIN_to_HASH); @@ -513,10 +519,10 @@ static int cc_restore_hash(struct cc_hw_desc *desc, struct cc_hash_ctx *ctx, /* Restore hash current length */ hw_desc_init(&desc[idx]); - set_cipher_mode(&desc[idx], ctx->hw_mode); + set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode); set_cipher_config1(&desc[idx], HASH_PADDING_DISABLED); set_din_type(&desc[idx], DMA_DLLI, state->digest_bytes_len_dma_addr, - ctx->drvdata->hash_len_sz, NS_BIT); + ctx->hash_len, NS_BIT); set_flow_mode(&desc[idx], S_DIN_to_HASH); set_setup_mode(&desc[idx], SETUP_LOAD_KEY0); idx++; @@ -576,7 +582,7 @@ static int cc_hash_update(struct ahash_request *req) /* store the hash digest result in context */ hw_desc_init(&desc[idx]); - set_cipher_mode(&desc[idx], ctx->hw_mode); + set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode); set_dout_dlli(&desc[idx], state->digest_buff_dma_addr, ctx->inter_digestsize, NS_BIT, 0); set_flow_mode(&desc[idx], S_HASH_to_DOUT); @@ -585,9 +591,9 @@ static int cc_hash_update(struct ahash_request *req) /* store current hash length in context */ hw_desc_init(&desc[idx]); - set_cipher_mode(&desc[idx], ctx->hw_mode); + set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode); set_dout_dlli(&desc[idx], state->digest_bytes_len_dma_addr, - ctx->drvdata->hash_len_sz, NS_BIT, 1); + ctx->hash_len, NS_BIT, 1); set_queue_last_ind(ctx->drvdata, &desc[idx]); set_flow_mode(&desc[idx], S_HASH_to_DOUT); set_setup_mode(&desc[idx], SETUP_WRITE_STATE1); @@ -649,9 +655,9 @@ static int cc_do_finup(struct ahash_request *req, bool update) /* Pad the hash */ hw_desc_init(&desc[idx]); set_cipher_do(&desc[idx], DO_PAD); - set_cipher_mode(&desc[idx], ctx->hw_mode); + set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode); set_dout_dlli(&desc[idx], state->digest_bytes_len_dma_addr, - ctx->drvdata->hash_len_sz, NS_BIT, 0); + ctx->hash_len, NS_BIT, 0); set_setup_mode(&desc[idx], SETUP_WRITE_STATE1); set_flow_mode(&desc[idx], S_HASH_to_DOUT); idx++; @@ -749,7 +755,7 @@ static int cc_hash_setkey(struct crypto_ahash *ahash, const u8 *key, /* Load the hash current length*/ hw_desc_init(&desc[idx]); set_cipher_mode(&desc[idx], ctx->hw_mode); - set_din_const(&desc[idx], 0, ctx->drvdata->hash_len_sz); + set_din_const(&desc[idx], 0, ctx->hash_len); set_cipher_config1(&desc[idx], HASH_PADDING_ENABLED); set_flow_mode(&desc[idx], S_DIN_to_HASH); set_setup_mode(&desc[idx], SETUP_LOAD_KEY0); @@ -831,7 +837,7 @@ static int cc_hash_setkey(struct crypto_ahash *ahash, const u8 *key, /* Load the hash current length*/ hw_desc_init(&desc[idx]); set_cipher_mode(&desc[idx], ctx->hw_mode); - set_din_const(&desc[idx], 0, ctx->drvdata->hash_len_sz); + set_din_const(&desc[idx], 0, ctx->hash_len); set_flow_mode(&desc[idx], S_DIN_to_HASH); set_setup_mode(&desc[idx], SETUP_LOAD_KEY0); idx++; @@ -1069,6 +1075,16 @@ fail: return -ENOMEM; } +static int cc_get_hash_len(struct crypto_tfm *tfm) +{ + struct cc_hash_ctx *ctx = crypto_tfm_ctx(tfm); + + if (ctx->hash_mode == DRV_HASH_SM3) + return CC_SM3_HASH_LEN_SIZE; + else + return cc_get_default_hash_len(ctx->drvdata); +} + static int cc_cra_init(struct crypto_tfm *tfm) { struct cc_hash_ctx *ctx = crypto_tfm_ctx(tfm); @@ -1086,7 +1102,7 @@ static int cc_cra_init(struct crypto_tfm *tfm) ctx->hw_mode = cc_alg->hw_mode; ctx->inter_digestsize = cc_alg->inter_digestsize; ctx->drvdata = cc_alg->drvdata; - + ctx->hash_len = cc_get_hash_len(tfm); return cc_alloc_ctx(ctx); } @@ -1465,8 +1481,8 @@ static int cc_hash_export(struct ahash_request *req, void *out) memcpy(out, state->digest_buff, ctx->inter_digestsize); out += ctx->inter_digestsize; - memcpy(out, state->digest_bytes_len, ctx->drvdata->hash_len_sz); - out += ctx->drvdata->hash_len_sz; + memcpy(out, state->digest_bytes_len, ctx->hash_len); + out += ctx->hash_len; memcpy(out, &curr_buff_cnt, sizeof(u32)); out += sizeof(u32); @@ -1494,8 +1510,8 @@ static int cc_hash_import(struct ahash_request *req, const void *in) memcpy(state->digest_buff, in, ctx->inter_digestsize); in += ctx->inter_digestsize; - memcpy(state->digest_bytes_len, in, ctx->drvdata->hash_len_sz); - in += ctx->drvdata->hash_len_sz; + memcpy(state->digest_bytes_len, in, ctx->hash_len); + in += ctx->hash_len; /* Sanity check the data as much as possible */ memcpy(&tmp, in, sizeof(u32)); @@ -1515,6 +1531,7 @@ struct cc_hash_template { char mac_name[CRYPTO_MAX_ALG_NAME]; char mac_driver_name[CRYPTO_MAX_ALG_NAME]; unsigned int blocksize; + bool is_mac; bool synchronize; struct ahash_alg template_ahash; int hash_mode; @@ -1522,6 +1539,7 @@ struct cc_hash_template { int inter_digestsize; struct cc_drvdata *drvdata; u32 min_hw_rev; + enum cc_std_body std_body; }; #define CC_STATE_SIZE(_x) \ @@ -1536,6 +1554,7 @@ static struct cc_hash_template driver_hash[] = { .mac_name = "hmac(sha1)", .mac_driver_name = "hmac-sha1-ccree", .blocksize = SHA1_BLOCK_SIZE, + .is_mac = true, .synchronize = false, .template_ahash = { .init = cc_hash_init, @@ -1555,6 +1574,7 @@ static struct cc_hash_template driver_hash[] = { .hw_mode = DRV_HASH_HW_SHA1, .inter_digestsize = SHA1_DIGEST_SIZE, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "sha256", @@ -1562,6 +1582,7 @@ static struct cc_hash_template driver_hash[] = { .mac_name = "hmac(sha256)", .mac_driver_name = "hmac-sha256-ccree", .blocksize = SHA256_BLOCK_SIZE, + .is_mac = true, .template_ahash = { .init = cc_hash_init, .update = cc_hash_update, @@ -1580,6 +1601,7 @@ static struct cc_hash_template driver_hash[] = { .hw_mode = DRV_HASH_HW_SHA256, .inter_digestsize = SHA256_DIGEST_SIZE, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "sha224", @@ -1587,6 +1609,7 @@ static struct cc_hash_template driver_hash[] = { .mac_name = "hmac(sha224)", .mac_driver_name = "hmac-sha224-ccree", .blocksize = SHA224_BLOCK_SIZE, + .is_mac = true, .template_ahash = { .init = cc_hash_init, .update = cc_hash_update, @@ -1605,6 +1628,7 @@ static struct cc_hash_template driver_hash[] = { .hw_mode = DRV_HASH_HW_SHA256, .inter_digestsize = SHA256_DIGEST_SIZE, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .name = "sha384", @@ -1612,6 +1636,7 @@ static struct cc_hash_template driver_hash[] = { .mac_name = "hmac(sha384)", .mac_driver_name = "hmac-sha384-ccree", .blocksize = SHA384_BLOCK_SIZE, + .is_mac = true, .template_ahash = { .init = cc_hash_init, .update = cc_hash_update, @@ -1630,6 +1655,7 @@ static struct cc_hash_template driver_hash[] = { .hw_mode = DRV_HASH_HW_SHA512, .inter_digestsize = SHA512_DIGEST_SIZE, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "sha512", @@ -1637,6 +1663,7 @@ static struct cc_hash_template driver_hash[] = { .mac_name = "hmac(sha512)", .mac_driver_name = "hmac-sha512-ccree", .blocksize = SHA512_BLOCK_SIZE, + .is_mac = true, .template_ahash = { .init = cc_hash_init, .update = cc_hash_update, @@ -1655,6 +1682,7 @@ static struct cc_hash_template driver_hash[] = { .hw_mode = DRV_HASH_HW_SHA512, .inter_digestsize = SHA512_DIGEST_SIZE, .min_hw_rev = CC_HW_REV_712, + .std_body = CC_STD_NIST, }, { .name = "md5", @@ -1662,6 +1690,7 @@ static struct cc_hash_template driver_hash[] = { .mac_name = "hmac(md5)", .mac_driver_name = "hmac-md5-ccree", .blocksize = MD5_HMAC_BLOCK_SIZE, + .is_mac = true, .template_ahash = { .init = cc_hash_init, .update = cc_hash_update, @@ -1680,11 +1709,38 @@ static struct cc_hash_template driver_hash[] = { .hw_mode = DRV_HASH_HW_MD5, .inter_digestsize = MD5_DIGEST_SIZE, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, + }, + { + .name = "sm3", + .driver_name = "sm3-ccree", + .blocksize = SM3_BLOCK_SIZE, + .is_mac = false, + .template_ahash = { + .init = cc_hash_init, + .update = cc_hash_update, + .final = cc_hash_final, + .finup = cc_hash_finup, + .digest = cc_hash_digest, + .export = cc_hash_export, + .import = cc_hash_import, + .setkey = cc_hash_setkey, + .halg = { + .digestsize = SM3_DIGEST_SIZE, + .statesize = CC_STATE_SIZE(SM3_DIGEST_SIZE), + }, + }, + .hash_mode = DRV_HASH_SM3, + .hw_mode = DRV_HASH_HW_SM3, + .inter_digestsize = SM3_DIGEST_SIZE, + .min_hw_rev = CC_HW_REV_713, + .std_body = CC_STD_OSCCA, }, { .mac_name = "xcbc(aes)", .mac_driver_name = "xcbc-aes-ccree", .blocksize = AES_BLOCK_SIZE, + .is_mac = true, .template_ahash = { .init = cc_hash_init, .update = cc_mac_update, @@ -1703,11 +1759,13 @@ static struct cc_hash_template driver_hash[] = { .hw_mode = DRV_CIPHER_XCBC_MAC, .inter_digestsize = AES_BLOCK_SIZE, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, { .mac_name = "cmac(aes)", .mac_driver_name = "cmac-aes-ccree", .blocksize = AES_BLOCK_SIZE, + .is_mac = true, .template_ahash = { .init = cc_hash_init, .update = cc_mac_update, @@ -1726,6 +1784,7 @@ static struct cc_hash_template driver_hash[] = { .hw_mode = DRV_CIPHER_CMAC, .inter_digestsize = AES_BLOCK_SIZE, .min_hw_rev = CC_HW_REV_630, + .std_body = CC_STD_NIST, }, }; @@ -1780,6 +1839,7 @@ int cc_init_hash_sram(struct cc_drvdata *drvdata) unsigned int larval_seq_len = 0; struct cc_hw_desc larval_seq[CC_DIGEST_SIZE_MAX / sizeof(u32)]; bool large_sha_supported = (drvdata->hw_rev >= CC_HW_REV_712); + bool sm3_supported = (drvdata->hw_rev >= CC_HW_REV_713); int rc = 0; /* Copy-to-sram digest-len */ @@ -1845,6 +1905,17 @@ int cc_init_hash_sram(struct cc_drvdata *drvdata) sram_buff_ofs += sizeof(sha256_init); larval_seq_len = 0; + if (sm3_supported) { + cc_set_sram_desc(sm3_init, sram_buff_ofs, + ARRAY_SIZE(sm3_init), larval_seq, + &larval_seq_len); + rc = send_request_init(drvdata, larval_seq, larval_seq_len); + if (rc) + goto init_digest_const_err; + sram_buff_ofs += sizeof(sm3_init); + larval_seq_len = 0; + } + if (large_sha_supported) { cc_set_sram_desc((u32 *)sha384_init, sram_buff_ofs, (ARRAY_SIZE(sha384_init) * 2), larval_seq, @@ -1911,6 +1982,9 @@ int cc_hash_alloc(struct cc_drvdata *drvdata) sizeof(sha224_init) + sizeof(sha256_init); + if (drvdata->hw_rev >= CC_HW_REV_713) + sram_size_to_alloc += sizeof(sm3_init); + if (drvdata->hw_rev >= CC_HW_REV_712) sram_size_to_alloc += sizeof(digest_len_sha512_init) + sizeof(sha384_init) + sizeof(sha512_init); @@ -1937,30 +2011,33 @@ int cc_hash_alloc(struct cc_drvdata *drvdata) struct cc_hash_alg *t_alg; int hw_mode = driver_hash[alg].hw_mode; - /* We either support both HASH and MAC or none */ - if (driver_hash[alg].min_hw_rev > drvdata->hw_rev) + /* Check that the HW revision and variants are suitable */ + if ((driver_hash[alg].min_hw_rev > drvdata->hw_rev) || + !(drvdata->std_bodies & driver_hash[alg].std_body)) continue; - /* register hmac version */ - t_alg = cc_alloc_hash_alg(&driver_hash[alg], dev, true); - if (IS_ERR(t_alg)) { - rc = PTR_ERR(t_alg); - dev_err(dev, "%s alg allocation failed\n", - driver_hash[alg].driver_name); - goto fail; - } - t_alg->drvdata = drvdata; - - rc = crypto_register_ahash(&t_alg->ahash_alg); - if (rc) { - dev_err(dev, "%s alg registration failed\n", - driver_hash[alg].driver_name); - kfree(t_alg); - goto fail; - } else { - list_add_tail(&t_alg->entry, &hash_handle->hash_list); + if (driver_hash[alg].is_mac) { + /* register hmac version */ + t_alg = cc_alloc_hash_alg(&driver_hash[alg], dev, true); + if (IS_ERR(t_alg)) { + rc = PTR_ERR(t_alg); + dev_err(dev, "%s alg allocation failed\n", + driver_hash[alg].driver_name); + goto fail; + } + t_alg->drvdata = drvdata; + + rc = crypto_register_ahash(&t_alg->ahash_alg); + if (rc) { + dev_err(dev, "%s alg registration failed\n", + driver_hash[alg].driver_name); + kfree(t_alg); + goto fail; + } else { + list_add_tail(&t_alg->entry, + &hash_handle->hash_list); + } } - if (hw_mode == DRV_CIPHER_XCBC_MAC || hw_mode == DRV_CIPHER_CMAC) continue; @@ -2027,7 +2104,7 @@ static void cc_setup_xcbc(struct ahash_request *areq, struct cc_hw_desc desc[], XCBC_MAC_K1_OFFSET), CC_AES_128_BIT_KEY_SIZE, NS_BIT); set_setup_mode(&desc[idx], SETUP_LOAD_KEY0); - set_cipher_mode(&desc[idx], DRV_CIPHER_XCBC_MAC); + set_hash_cipher_mode(&desc[idx], DRV_CIPHER_XCBC_MAC, ctx->hash_mode); set_cipher_config0(&desc[idx], DESC_DIRECTION_ENCRYPT_ENCRYPT); set_key_size_aes(&desc[idx], CC_AES_128_BIT_KEY_SIZE); set_flow_mode(&desc[idx], S_DIN_to_AES); @@ -2162,6 +2239,8 @@ static const void *cc_larval_digest(struct device *dev, u32 mode) return sha384_init; case DRV_HASH_SHA512: return sha512_init; + case DRV_HASH_SM3: + return sm3_init; default: dev_err(dev, "Invalid hash mode (%d)\n", mode); return md5_init; @@ -2182,6 +2261,8 @@ cc_sram_addr_t cc_larval_digest_addr(void *drvdata, u32 mode) struct cc_drvdata *_drvdata = (struct cc_drvdata *)drvdata; struct cc_hash_handle *hash_handle = _drvdata->hash_handle; struct device *dev = drvdata_to_dev(_drvdata); + bool sm3_supported = (_drvdata->hw_rev >= CC_HW_REV_713); + cc_sram_addr_t addr; switch (mode) { case DRV_HASH_NULL: @@ -2200,19 +2281,31 @@ cc_sram_addr_t cc_larval_digest_addr(void *drvdata, u32 mode) sizeof(md5_init) + sizeof(sha1_init) + sizeof(sha224_init)); - case DRV_HASH_SHA384: + case DRV_HASH_SM3: return (hash_handle->larval_digest_sram_addr + sizeof(md5_init) + sizeof(sha1_init) + sizeof(sha224_init) + sizeof(sha256_init)); + case DRV_HASH_SHA384: + addr = (hash_handle->larval_digest_sram_addr + + sizeof(md5_init) + + sizeof(sha1_init) + + sizeof(sha224_init) + + sizeof(sha256_init)); + if (sm3_supported) + addr += sizeof(sm3_init); + return addr; case DRV_HASH_SHA512: - return (hash_handle->larval_digest_sram_addr + + addr = (hash_handle->larval_digest_sram_addr + sizeof(md5_init) + sizeof(sha1_init) + sizeof(sha224_init) + sizeof(sha256_init) + sizeof(sha384_init)); + if (sm3_supported) + addr += sizeof(sm3_init); + return addr; default: dev_err(dev, "Invalid hash mode (%d)\n", mode); } diff --git a/drivers/crypto/ccree/cc_hw_queue_defs.h b/drivers/crypto/ccree/cc_hw_queue_defs.h index 45985b955d2c..7a9b90db7db7 100644 --- a/drivers/crypto/ccree/cc_hw_queue_defs.h +++ b/drivers/crypto/ccree/cc_hw_queue_defs.h @@ -42,6 +42,7 @@ #define WORD3_QUEUE_LAST_IND CC_GENMASK(3, QUEUE_LAST_IND) #define WORD4_ACK_NEEDED CC_GENMASK(4, ACK_NEEDED) #define WORD4_AES_SEL_N_HASH CC_GENMASK(4, AES_SEL_N_HASH) +#define WORD4_AES_XOR_CRYPTO_KEY CC_GENMASK(4, AES_XOR_CRYPTO_KEY) #define WORD4_BYTES_SWAP CC_GENMASK(4, BYTES_SWAP) #define WORD4_CIPHER_CONF0 CC_GENMASK(4, CIPHER_CONF0) #define WORD4_CIPHER_CONF1 CC_GENMASK(4, CIPHER_CONF1) @@ -107,6 +108,7 @@ enum cc_flow_mode { AES_to_AES_to_HASH_and_DOUT = 13, AES_to_AES_to_HASH = 14, AES_to_HASH_and_AES = 15, + DIN_SM4_DOUT = 16, DIN_AES_AESMAC = 17, HASH_to_DOUT = 18, /* setup flows */ @@ -114,9 +116,11 @@ enum cc_flow_mode { S_DIN_to_AES2 = 33, S_DIN_to_DES = 34, S_DIN_to_RC4 = 35, + S_DIN_to_SM4 = 36, S_DIN_to_HASH = 37, S_AES_to_DOUT = 38, S_AES2_to_DOUT = 39, + S_SM4_to_DOUT = 40, S_RC4_to_DOUT = 41, S_DES_to_DOUT = 42, S_HASH_to_DOUT = 43, @@ -394,6 +398,16 @@ static inline void set_aes_not_hash_mode(struct cc_hw_desc *pdesc) } /* + * Set aes xor crypto key, this in some secenrios select SM3 engine + * + * @pdesc: pointer HW descriptor struct + */ +static inline void set_aes_xor_crypto_key(struct cc_hw_desc *pdesc) +{ + pdesc->word[4] |= FIELD_PREP(WORD4_AES_XOR_CRYPTO_KEY, 1); +} + +/* * Set the DOUT field of a HW descriptors to SRAM mode * Note: No need to check SRAM alignment since host requests do not use SRAM and * adaptor will enforce alignment check. @@ -455,6 +469,22 @@ static inline void set_cipher_mode(struct cc_hw_desc *pdesc, int mode) } /* + * Set the cipher mode for hash algorithms. + * + * @pdesc: pointer HW descriptor struct + * @cipher_mode: Any one of the modes defined in [CC7x-DESC] + * @hash_mode: specifies which hash is being handled + */ +static inline void set_hash_cipher_mode(struct cc_hw_desc *pdesc, + enum drv_cipher_mode cipher_mode, + enum drv_hash_mode hash_mode) +{ + set_cipher_mode(pdesc, cipher_mode); + if (hash_mode == DRV_HASH_SM3) + set_aes_xor_crypto_key(pdesc); +} + +/* * Set the cipher configuration fields. * * @pdesc: pointer HW descriptor struct |