diff --git a/drivers/crypto/tegra-hv-vse-safety.c b/drivers/crypto/tegra-hv-vse-safety.c index 67cd3ffc..e672c1cb 100644 --- a/drivers/crypto/tegra-hv-vse-safety.c +++ b/drivers/crypto/tegra-hv-vse-safety.c @@ -606,6 +606,8 @@ enum tegra_virtual_se_op_mode { enum tegra_virtual_se_aes_op_mode { AES_CBC = 0U, AES_CTR = 2U, + AES_SM4_CBC = 0x10000U, + AES_SM4_CTR = 0x10002U, }; /* Security Engine request context */ @@ -689,6 +691,17 @@ static int32_t validate_header( return ret; } +static int is_aes_mode_valid(uint32_t opmode) +{ + int ret = 0; + + if ((opmode == (uint32_t)AES_CBC) || (opmode == (uint32_t)AES_SM4_CBC) || + (opmode == (uint32_t)AES_SM4_CTR) || (opmode == (uint32_t)AES_CTR)) { + ret = 1; + } + return ret; +} + static int read_and_validate_dummy_msg( struct tegra_virtual_se_dev *se_dev, struct tegra_hv_ivc_cookie *pivck, @@ -772,8 +785,7 @@ static int read_and_validate_valid_msg( priv->rx_status = ivc_msg->rx[0].status; req_ctx = skcipher_request_ctx(priv->req); if ((!priv->rx_status) && (req_ctx->encrypt == true) && - ((req_ctx->op_mode == AES_CTR) || - (req_ctx->op_mode == AES_CBC))) { + (is_aes_mode_valid(req_ctx->op_mode) == 1)) { memcpy(priv->iv, ivc_msg->rx[0].iv, TEGRA_VIRTUAL_SE_AES_IV_SIZE); } @@ -2383,9 +2395,9 @@ static void tegra_hv_vse_safety_prepare_cmd(struct tegra_virtual_se_dev *se_dev, if (req->iv) { memcpy(aes->op.lctr, req->iv, TEGRA_VIRTUAL_SE_AES_LCTR_SIZE); - if (req_ctx->op_mode == AES_CTR) + if ((req_ctx->op_mode == AES_CTR) || (req_ctx->op_mode == AES_SM4_CTR)) aes->op.ctr_cntn = TEGRA_VIRTUAL_SE_AES_LCTR_CNTN; - else if (req_ctx->op_mode == AES_CBC) { + else if ((req_ctx->op_mode == AES_CBC) || (req_ctx->op_mode == AES_SM4_CBC)) { if (req_ctx->encrypt == true && aes_ctx->user_nonce == 1U && aes_ctx->b_is_first != 1U) aes->op.ivsel = AES_UPDATED_IV; @@ -2516,9 +2528,8 @@ static int tegra_hv_vse_safety_process_aes_req(struct tegra_virtual_se_dev *se_d * If userNonce is not provided random IV generation is needed. */ if (req_ctx->encrypt && - (req_ctx->op_mode == AES_CBC || - req_ctx->op_mode == AES_CTR) && (aes_ctx->user_nonce == 0U) && - req->iv[0] == 1) { + (is_aes_mode_valid(req_ctx->op_mode) == 1) && (aes_ctx->user_nonce == 0U) && + (req->iv[0] == 1)) { //Random IV generation is required err = tegra_hv_vse_safety_aes_gen_random_iv(se_dev, req, priv, ivc_req_msg); @@ -2554,9 +2565,8 @@ static int tegra_hv_vse_safety_process_aes_req(struct tegra_virtual_se_dev *se_d sg_copy_from_buffer(req->dst, num_sgs, priv->buf, req->cryptlen); - if (((req_ctx->op_mode == AES_CBC) - || (req_ctx->op_mode == AES_CTR)) - && req_ctx->encrypt == true && aes_ctx->user_nonce == 0U) + if ((is_aes_mode_valid(req_ctx->op_mode) == 1) + && (req_ctx->encrypt == true) && (aes_ctx->user_nonce == 0U)) memcpy(req->iv, priv->iv, TEGRA_VIRTUAL_SE_AES_IV_SIZE); } else { dev_err(se_dev->dev, @@ -2616,9 +2626,16 @@ static int tegra_hv_vse_safety_aes_cbc_encrypt(struct skcipher_request *req) req_ctx = skcipher_request_ctx(req); req_ctx->encrypt = true; - req_ctx->op_mode = AES_CBC; req_ctx->engine_id = g_crypto_to_ivc_map[aes_ctx->node_id].se_engine; req_ctx->se_dev = g_virtual_se_dev[g_crypto_to_ivc_map[aes_ctx->node_id].se_engine]; + if ((req_ctx->se_dev->chipdata->sm_supported == false) && (aes_ctx->b_is_sm4 == 1U)) { + pr_err("%s: SM4 CBC is not supported for selected platform\n", __func__); + return -EINVAL; + } + if (aes_ctx->b_is_sm4 == 1U) + req_ctx->op_mode = AES_SM4_CBC; + else + req_ctx->op_mode = AES_CBC; err = tegra_hv_vse_safety_process_aes_req(req_ctx->se_dev, req); if (err) dev_err(req_ctx->se_dev->dev, @@ -2640,9 +2657,20 @@ static int tegra_hv_vse_safety_aes_cbc_decrypt(struct skcipher_request *req) req_ctx = skcipher_request_ctx(req); req_ctx->encrypt = false; - req_ctx->op_mode = AES_CBC; + req_ctx->engine_id = g_crypto_to_ivc_map[aes_ctx->node_id].se_engine; req_ctx->se_dev = g_virtual_se_dev[g_crypto_to_ivc_map[aes_ctx->node_id].se_engine]; + + if ((req_ctx->se_dev->chipdata->sm_supported == false) && (aes_ctx->b_is_sm4 == 1U)) { + pr_err("%s: SM4 CBC is not supported for selected platform\n", __func__); + return -EINVAL; + } + + if (aes_ctx->b_is_sm4 == 1U) + req_ctx->op_mode = AES_SM4_CBC; + else + req_ctx->op_mode = AES_CBC; + err = tegra_hv_vse_safety_process_aes_req(req_ctx->se_dev, req); if (err) dev_err(req_ctx->se_dev->dev, @@ -2671,9 +2699,16 @@ static int tegra_hv_vse_safety_aes_ctr_encrypt(struct skcipher_request *req) req_ctx = skcipher_request_ctx(req); req_ctx->encrypt = true; - req_ctx->op_mode = AES_CTR; req_ctx->engine_id = g_crypto_to_ivc_map[aes_ctx->node_id].se_engine; req_ctx->se_dev = g_virtual_se_dev[g_crypto_to_ivc_map[aes_ctx->node_id].se_engine]; + if ((req_ctx->se_dev->chipdata->sm_supported == false) && (aes_ctx->b_is_sm4 == 1U)) { + pr_err("%s: SM4 CTR is not supported for selected platform\n", __func__); + return -EINVAL; + } + if (aes_ctx->b_is_sm4 == 1U) + req_ctx->op_mode = AES_SM4_CTR; + else + req_ctx->op_mode = AES_CTR; err = tegra_hv_vse_safety_process_aes_req(req_ctx->se_dev, req); if (err) dev_err(req_ctx->se_dev->dev, @@ -2695,9 +2730,18 @@ static int tegra_hv_vse_safety_aes_ctr_decrypt(struct skcipher_request *req) req_ctx = skcipher_request_ctx(req); req_ctx->encrypt = false; - req_ctx->op_mode = AES_CTR; + req_ctx->engine_id = g_crypto_to_ivc_map[aes_ctx->node_id].se_engine; req_ctx->se_dev = g_virtual_se_dev[g_crypto_to_ivc_map[aes_ctx->node_id].se_engine]; + if ((req_ctx->se_dev->chipdata->sm_supported == false) && (aes_ctx->b_is_sm4 == 1U)) { + pr_err("%s: SM4 CTR is not supported for selected platform\n", __func__); + return -EINVAL; + } + if (aes_ctx->b_is_sm4 == 1U) + req_ctx->op_mode = AES_SM4_CTR; + else + req_ctx->op_mode = AES_CTR; + err = tegra_hv_vse_safety_process_aes_req(req_ctx->se_dev, req); if (err) dev_err(req_ctx->se_dev->dev, diff --git a/drivers/crypto/tegra-hv-vse.h b/drivers/crypto/tegra-hv-vse.h index 9cfe089f..f5c61647 100644 --- a/drivers/crypto/tegra-hv-vse.h +++ b/drivers/crypto/tegra-hv-vse.h @@ -101,6 +101,8 @@ struct tegra_virtual_se_aes_context { uint8_t user_nonce; /* Flag to indicate first request*/ uint8_t b_is_first; + /* Flag to indicate if sm4 is enabled*/ + uint8_t b_is_sm4; }; /* Security Engine/TSEC AES CMAC context */ diff --git a/drivers/crypto/tegra-nvvse-cryptodev.c b/drivers/crypto/tegra-nvvse-cryptodev.c index 5a8ebc33..51e446e2 100644 --- a/drivers/crypto/tegra-nvvse-cryptodev.c +++ b/drivers/crypto/tegra-nvvse-cryptodev.c @@ -1245,7 +1245,8 @@ static int tnvvse_crypto_aes_enc_dec(struct tnvvse_crypto_ctx *ctx, int ret = 0; struct tnvvse_crypto_completion tcrypt_complete; struct tegra_virtual_se_aes_context *aes_ctx; - char aes_algo[5][15] = {"cbc-vse(aes)", "ctr-vse(aes)"}; + char aes_algo[5][20] = {"cbc-vse(aes)", "ctr-vse(aes)", "gcm-vse(aes)", "cbc-vse(aes)", + "ctr-vse(aes)"}; const char *driver_name; char key_as_keyslot[AES_KEYSLOT_NAME_SIZE] = {0,}; uint8_t next_block_iv[TEGRA_NVVSE_AES_IV_LEN]; @@ -1348,22 +1349,31 @@ static int tnvvse_crypto_aes_enc_dec(struct tnvvse_crypto_ctx *ctx, tnvvse_crypto_complete, &tcrypt_complete); if (aes_ctx->b_is_first == 1U || !aes_enc_dec_ctl->is_encryption) { - if (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CBC) + if ((aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CBC) || + (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_SM4_CBC)) memcpy(next_block_iv, aes_enc_dec_ctl->initial_vector, TEGRA_NVVSE_AES_IV_LEN); - else if (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR) + else if ((aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR) || + (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_SM4_CTR)) memcpy(next_block_iv, aes_enc_dec_ctl->initial_counter, TEGRA_NVVSE_AES_CTR_LEN); else memset(next_block_iv, 0, TEGRA_NVVSE_AES_IV_LEN); } else { - if (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR) - memcpy(next_block_iv, ctx->intermediate_counter, TEGRA_NVVSE_AES_CTR_LEN); + if ((aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR) || + (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_SM4_CTR)) + memcpy(next_block_iv, ctx->intermediate_counter, + TEGRA_NVVSE_AES_CTR_LEN); else //As CBC uses IV stored in SE server memset(next_block_iv, 0, TEGRA_NVVSE_AES_IV_LEN); } pr_debug("%s(): %scryption\n", __func__, (aes_enc_dec_ctl->is_encryption ? "en" : "de")); + if ((aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_SM4_CBC) || + (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_SM4_CTR)) + aes_ctx->b_is_sm4 = 1U; + else + aes_ctx->b_is_sm4 = 0U; /* copy input buffer */ ret = copy_from_user(in_buf, aes_enc_dec_ctl->src_buffer, in_sz); @@ -1417,12 +1427,15 @@ static int tnvvse_crypto_aes_enc_dec(struct tnvvse_crypto_ctx *ctx, goto free_out_buf; } - if ((aes_enc_dec_ctl->is_encryption) && - (aes_enc_dec_ctl->user_nonce == 0U)) { - if (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CBC) - memcpy(aes_enc_dec_ctl->initial_vector, req->iv, TEGRA_NVVSE_AES_IV_LEN); - else if (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR) - memcpy(aes_enc_dec_ctl->initial_counter, req->iv, TEGRA_NVVSE_AES_CTR_LEN); + if ((aes_enc_dec_ctl->is_encryption) && (aes_enc_dec_ctl->user_nonce == 0U)) { + if ((aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CBC) || + (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_SM4_CBC)) + memcpy(aes_enc_dec_ctl->initial_vector, req->iv, + TEGRA_NVVSE_AES_IV_LEN); + else if ((aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR) || + (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_SM4_CTR)) + memcpy(aes_enc_dec_ctl->initial_counter, req->iv, + TEGRA_NVVSE_AES_CTR_LEN); } if (aes_enc_dec_ctl->user_nonce == 1U) { @@ -1949,12 +1962,14 @@ static long tnvvse_crypto_dev_ioctl(struct file *filp, /* Copy IV returned by VSE */ if (aes_enc_dec_ctl->is_encryption) { - if (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CBC || - aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_GCM) + if ((aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CBC) || + (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_SM4_CBC) || + (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_GCM)) ret = copy_to_user(arg_aes_enc_dec_ctl->initial_vector, aes_enc_dec_ctl->initial_vector, sizeof(aes_enc_dec_ctl->initial_vector)); - else if (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR) + else if ((aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR) || + (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR)) ret = copy_to_user(arg_aes_enc_dec_ctl->initial_counter, aes_enc_dec_ctl->initial_counter, sizeof(aes_enc_dec_ctl->initial_counter)); diff --git a/include/uapi/misc/tegra-nvvse-cryptodev.h b/include/uapi/misc/tegra-nvvse-cryptodev.h index 789fe466..5cb36836 100644 --- a/include/uapi/misc/tegra-nvvse-cryptodev.h +++ b/include/uapi/misc/tegra-nvvse-cryptodev.h @@ -92,6 +92,10 @@ enum tegra_nvvse_aes_mode { TEGRA_NVVSE_AES_MODE_CTR, /** Defines AES MODE GCM */ TEGRA_NVVSE_AES_MODE_GCM, + /** Defines SM4 AES CBC Mode */ + TEGRA_NVVSE_AES_MODE_SM4_CBC, + /** Defines SM4 AES CTR Mode */ + TEGRA_NVVSE_AES_MODE_SM4_CTR, /** Defines maximum AES MODE, must be last entry*/ TEGRA_NVVSE_AES_MODE_MAX, };