nvvse: Add SM4 implementation

- Add VSE driver and crypto driver changes for SM4
CBC/CTR functionality
- Utilize existing function for AES CBC/CTR for SM4

Jira ESSS-1409

Change-Id: Ifaab10dd81c33b047554c28a37c399a3cb0d3419
Signed-off-by: Prashant Parihar <pparihar@nvidia.com>
Reviewed-on: https://git-master.nvidia.com/r/c/linux-nv-oot/+/3143320
Reviewed-by: Leo Chiu <lchiu@nvidia.com>
GVS: buildbot_gerritrpt <buildbot_gerritrpt@nvidia.com>
Reviewed-by: Sandeep Trasi <strasi@nvidia.com>
This commit is contained in:
Prashant Parihar
2024-05-22 20:31:20 +00:00
committed by mobile promotions
parent 20c43e79ff
commit a21f30c983
4 changed files with 93 additions and 28 deletions

View File

@@ -606,6 +606,8 @@ enum tegra_virtual_se_op_mode {
enum tegra_virtual_se_aes_op_mode { enum tegra_virtual_se_aes_op_mode {
AES_CBC = 0U, AES_CBC = 0U,
AES_CTR = 2U, AES_CTR = 2U,
AES_SM4_CBC = 0x10000U,
AES_SM4_CTR = 0x10002U,
}; };
/* Security Engine request context */ /* Security Engine request context */
@@ -689,6 +691,17 @@ static int32_t validate_header(
return ret; return ret;
} }
static int is_aes_mode_valid(uint32_t opmode)
{
int ret = 0;
if ((opmode == (uint32_t)AES_CBC) || (opmode == (uint32_t)AES_SM4_CBC) ||
(opmode == (uint32_t)AES_SM4_CTR) || (opmode == (uint32_t)AES_CTR)) {
ret = 1;
}
return ret;
}
static int read_and_validate_dummy_msg( static int read_and_validate_dummy_msg(
struct tegra_virtual_se_dev *se_dev, struct tegra_virtual_se_dev *se_dev,
struct tegra_hv_ivc_cookie *pivck, struct tegra_hv_ivc_cookie *pivck,
@@ -772,8 +785,7 @@ static int read_and_validate_valid_msg(
priv->rx_status = ivc_msg->rx[0].status; priv->rx_status = ivc_msg->rx[0].status;
req_ctx = skcipher_request_ctx(priv->req); req_ctx = skcipher_request_ctx(priv->req);
if ((!priv->rx_status) && (req_ctx->encrypt == true) && if ((!priv->rx_status) && (req_ctx->encrypt == true) &&
((req_ctx->op_mode == AES_CTR) || (is_aes_mode_valid(req_ctx->op_mode) == 1)) {
(req_ctx->op_mode == AES_CBC))) {
memcpy(priv->iv, ivc_msg->rx[0].iv, memcpy(priv->iv, ivc_msg->rx[0].iv,
TEGRA_VIRTUAL_SE_AES_IV_SIZE); TEGRA_VIRTUAL_SE_AES_IV_SIZE);
} }
@@ -2383,9 +2395,9 @@ static void tegra_hv_vse_safety_prepare_cmd(struct tegra_virtual_se_dev *se_dev,
if (req->iv) { if (req->iv) {
memcpy(aes->op.lctr, req->iv, memcpy(aes->op.lctr, req->iv,
TEGRA_VIRTUAL_SE_AES_LCTR_SIZE); TEGRA_VIRTUAL_SE_AES_LCTR_SIZE);
if (req_ctx->op_mode == AES_CTR) if ((req_ctx->op_mode == AES_CTR) || (req_ctx->op_mode == AES_SM4_CTR))
aes->op.ctr_cntn = TEGRA_VIRTUAL_SE_AES_LCTR_CNTN; aes->op.ctr_cntn = TEGRA_VIRTUAL_SE_AES_LCTR_CNTN;
else if (req_ctx->op_mode == AES_CBC) { else if ((req_ctx->op_mode == AES_CBC) || (req_ctx->op_mode == AES_SM4_CBC)) {
if (req_ctx->encrypt == true && aes_ctx->user_nonce == 1U && if (req_ctx->encrypt == true && aes_ctx->user_nonce == 1U &&
aes_ctx->b_is_first != 1U) aes_ctx->b_is_first != 1U)
aes->op.ivsel = AES_UPDATED_IV; aes->op.ivsel = AES_UPDATED_IV;
@@ -2516,9 +2528,8 @@ static int tegra_hv_vse_safety_process_aes_req(struct tegra_virtual_se_dev *se_d
* If userNonce is not provided random IV generation is needed. * If userNonce is not provided random IV generation is needed.
*/ */
if (req_ctx->encrypt && if (req_ctx->encrypt &&
(req_ctx->op_mode == AES_CBC || (is_aes_mode_valid(req_ctx->op_mode) == 1) && (aes_ctx->user_nonce == 0U) &&
req_ctx->op_mode == AES_CTR) && (aes_ctx->user_nonce == 0U) && (req->iv[0] == 1)) {
req->iv[0] == 1) {
//Random IV generation is required //Random IV generation is required
err = tegra_hv_vse_safety_aes_gen_random_iv(se_dev, req, err = tegra_hv_vse_safety_aes_gen_random_iv(se_dev, req,
priv, ivc_req_msg); priv, ivc_req_msg);
@@ -2554,9 +2565,8 @@ static int tegra_hv_vse_safety_process_aes_req(struct tegra_virtual_se_dev *se_d
sg_copy_from_buffer(req->dst, num_sgs, sg_copy_from_buffer(req->dst, num_sgs,
priv->buf, req->cryptlen); priv->buf, req->cryptlen);
if (((req_ctx->op_mode == AES_CBC) if ((is_aes_mode_valid(req_ctx->op_mode) == 1)
|| (req_ctx->op_mode == AES_CTR)) && (req_ctx->encrypt == true) && (aes_ctx->user_nonce == 0U))
&& req_ctx->encrypt == true && aes_ctx->user_nonce == 0U)
memcpy(req->iv, priv->iv, TEGRA_VIRTUAL_SE_AES_IV_SIZE); memcpy(req->iv, priv->iv, TEGRA_VIRTUAL_SE_AES_IV_SIZE);
} else { } else {
dev_err(se_dev->dev, dev_err(se_dev->dev,
@@ -2616,9 +2626,16 @@ static int tegra_hv_vse_safety_aes_cbc_encrypt(struct skcipher_request *req)
req_ctx = skcipher_request_ctx(req); req_ctx = skcipher_request_ctx(req);
req_ctx->encrypt = true; req_ctx->encrypt = true;
req_ctx->op_mode = AES_CBC;
req_ctx->engine_id = g_crypto_to_ivc_map[aes_ctx->node_id].se_engine; req_ctx->engine_id = g_crypto_to_ivc_map[aes_ctx->node_id].se_engine;
req_ctx->se_dev = g_virtual_se_dev[g_crypto_to_ivc_map[aes_ctx->node_id].se_engine]; req_ctx->se_dev = g_virtual_se_dev[g_crypto_to_ivc_map[aes_ctx->node_id].se_engine];
if ((req_ctx->se_dev->chipdata->sm_supported == false) && (aes_ctx->b_is_sm4 == 1U)) {
pr_err("%s: SM4 CBC is not supported for selected platform\n", __func__);
return -EINVAL;
}
if (aes_ctx->b_is_sm4 == 1U)
req_ctx->op_mode = AES_SM4_CBC;
else
req_ctx->op_mode = AES_CBC;
err = tegra_hv_vse_safety_process_aes_req(req_ctx->se_dev, req); err = tegra_hv_vse_safety_process_aes_req(req_ctx->se_dev, req);
if (err) if (err)
dev_err(req_ctx->se_dev->dev, dev_err(req_ctx->se_dev->dev,
@@ -2640,9 +2657,20 @@ static int tegra_hv_vse_safety_aes_cbc_decrypt(struct skcipher_request *req)
req_ctx = skcipher_request_ctx(req); req_ctx = skcipher_request_ctx(req);
req_ctx->encrypt = false; req_ctx->encrypt = false;
req_ctx->op_mode = AES_CBC;
req_ctx->engine_id = g_crypto_to_ivc_map[aes_ctx->node_id].se_engine; req_ctx->engine_id = g_crypto_to_ivc_map[aes_ctx->node_id].se_engine;
req_ctx->se_dev = g_virtual_se_dev[g_crypto_to_ivc_map[aes_ctx->node_id].se_engine]; req_ctx->se_dev = g_virtual_se_dev[g_crypto_to_ivc_map[aes_ctx->node_id].se_engine];
if ((req_ctx->se_dev->chipdata->sm_supported == false) && (aes_ctx->b_is_sm4 == 1U)) {
pr_err("%s: SM4 CBC is not supported for selected platform\n", __func__);
return -EINVAL;
}
if (aes_ctx->b_is_sm4 == 1U)
req_ctx->op_mode = AES_SM4_CBC;
else
req_ctx->op_mode = AES_CBC;
err = tegra_hv_vse_safety_process_aes_req(req_ctx->se_dev, req); err = tegra_hv_vse_safety_process_aes_req(req_ctx->se_dev, req);
if (err) if (err)
dev_err(req_ctx->se_dev->dev, dev_err(req_ctx->se_dev->dev,
@@ -2671,9 +2699,16 @@ static int tegra_hv_vse_safety_aes_ctr_encrypt(struct skcipher_request *req)
req_ctx = skcipher_request_ctx(req); req_ctx = skcipher_request_ctx(req);
req_ctx->encrypt = true; req_ctx->encrypt = true;
req_ctx->op_mode = AES_CTR;
req_ctx->engine_id = g_crypto_to_ivc_map[aes_ctx->node_id].se_engine; req_ctx->engine_id = g_crypto_to_ivc_map[aes_ctx->node_id].se_engine;
req_ctx->se_dev = g_virtual_se_dev[g_crypto_to_ivc_map[aes_ctx->node_id].se_engine]; req_ctx->se_dev = g_virtual_se_dev[g_crypto_to_ivc_map[aes_ctx->node_id].se_engine];
if ((req_ctx->se_dev->chipdata->sm_supported == false) && (aes_ctx->b_is_sm4 == 1U)) {
pr_err("%s: SM4 CTR is not supported for selected platform\n", __func__);
return -EINVAL;
}
if (aes_ctx->b_is_sm4 == 1U)
req_ctx->op_mode = AES_SM4_CTR;
else
req_ctx->op_mode = AES_CTR;
err = tegra_hv_vse_safety_process_aes_req(req_ctx->se_dev, req); err = tegra_hv_vse_safety_process_aes_req(req_ctx->se_dev, req);
if (err) if (err)
dev_err(req_ctx->se_dev->dev, dev_err(req_ctx->se_dev->dev,
@@ -2695,9 +2730,18 @@ static int tegra_hv_vse_safety_aes_ctr_decrypt(struct skcipher_request *req)
req_ctx = skcipher_request_ctx(req); req_ctx = skcipher_request_ctx(req);
req_ctx->encrypt = false; req_ctx->encrypt = false;
req_ctx->op_mode = AES_CTR;
req_ctx->engine_id = g_crypto_to_ivc_map[aes_ctx->node_id].se_engine; req_ctx->engine_id = g_crypto_to_ivc_map[aes_ctx->node_id].se_engine;
req_ctx->se_dev = g_virtual_se_dev[g_crypto_to_ivc_map[aes_ctx->node_id].se_engine]; req_ctx->se_dev = g_virtual_se_dev[g_crypto_to_ivc_map[aes_ctx->node_id].se_engine];
if ((req_ctx->se_dev->chipdata->sm_supported == false) && (aes_ctx->b_is_sm4 == 1U)) {
pr_err("%s: SM4 CTR is not supported for selected platform\n", __func__);
return -EINVAL;
}
if (aes_ctx->b_is_sm4 == 1U)
req_ctx->op_mode = AES_SM4_CTR;
else
req_ctx->op_mode = AES_CTR;
err = tegra_hv_vse_safety_process_aes_req(req_ctx->se_dev, req); err = tegra_hv_vse_safety_process_aes_req(req_ctx->se_dev, req);
if (err) if (err)
dev_err(req_ctx->se_dev->dev, dev_err(req_ctx->se_dev->dev,

View File

@@ -101,6 +101,8 @@ struct tegra_virtual_se_aes_context {
uint8_t user_nonce; uint8_t user_nonce;
/* Flag to indicate first request*/ /* Flag to indicate first request*/
uint8_t b_is_first; uint8_t b_is_first;
/* Flag to indicate if sm4 is enabled*/
uint8_t b_is_sm4;
}; };
/* Security Engine/TSEC AES CMAC context */ /* Security Engine/TSEC AES CMAC context */

View File

@@ -1245,7 +1245,8 @@ static int tnvvse_crypto_aes_enc_dec(struct tnvvse_crypto_ctx *ctx,
int ret = 0; int ret = 0;
struct tnvvse_crypto_completion tcrypt_complete; struct tnvvse_crypto_completion tcrypt_complete;
struct tegra_virtual_se_aes_context *aes_ctx; struct tegra_virtual_se_aes_context *aes_ctx;
char aes_algo[5][15] = {"cbc-vse(aes)", "ctr-vse(aes)"}; char aes_algo[5][20] = {"cbc-vse(aes)", "ctr-vse(aes)", "gcm-vse(aes)", "cbc-vse(aes)",
"ctr-vse(aes)"};
const char *driver_name; const char *driver_name;
char key_as_keyslot[AES_KEYSLOT_NAME_SIZE] = {0,}; char key_as_keyslot[AES_KEYSLOT_NAME_SIZE] = {0,};
uint8_t next_block_iv[TEGRA_NVVSE_AES_IV_LEN]; uint8_t next_block_iv[TEGRA_NVVSE_AES_IV_LEN];
@@ -1348,22 +1349,31 @@ static int tnvvse_crypto_aes_enc_dec(struct tnvvse_crypto_ctx *ctx,
tnvvse_crypto_complete, &tcrypt_complete); tnvvse_crypto_complete, &tcrypt_complete);
if (aes_ctx->b_is_first == 1U || !aes_enc_dec_ctl->is_encryption) { if (aes_ctx->b_is_first == 1U || !aes_enc_dec_ctl->is_encryption) {
if (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CBC) if ((aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CBC) ||
(aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_SM4_CBC))
memcpy(next_block_iv, aes_enc_dec_ctl->initial_vector, memcpy(next_block_iv, aes_enc_dec_ctl->initial_vector,
TEGRA_NVVSE_AES_IV_LEN); TEGRA_NVVSE_AES_IV_LEN);
else if (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR) else if ((aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR) ||
(aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_SM4_CTR))
memcpy(next_block_iv, aes_enc_dec_ctl->initial_counter, memcpy(next_block_iv, aes_enc_dec_ctl->initial_counter,
TEGRA_NVVSE_AES_CTR_LEN); TEGRA_NVVSE_AES_CTR_LEN);
else else
memset(next_block_iv, 0, TEGRA_NVVSE_AES_IV_LEN); memset(next_block_iv, 0, TEGRA_NVVSE_AES_IV_LEN);
} else { } else {
if (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR) if ((aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR) ||
memcpy(next_block_iv, ctx->intermediate_counter, TEGRA_NVVSE_AES_CTR_LEN); (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_SM4_CTR))
memcpy(next_block_iv, ctx->intermediate_counter,
TEGRA_NVVSE_AES_CTR_LEN);
else //As CBC uses IV stored in SE server else //As CBC uses IV stored in SE server
memset(next_block_iv, 0, TEGRA_NVVSE_AES_IV_LEN); memset(next_block_iv, 0, TEGRA_NVVSE_AES_IV_LEN);
} }
pr_debug("%s(): %scryption\n", __func__, (aes_enc_dec_ctl->is_encryption ? "en" : "de")); pr_debug("%s(): %scryption\n", __func__, (aes_enc_dec_ctl->is_encryption ? "en" : "de"));
if ((aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_SM4_CBC) ||
(aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_SM4_CTR))
aes_ctx->b_is_sm4 = 1U;
else
aes_ctx->b_is_sm4 = 0U;
/* copy input buffer */ /* copy input buffer */
ret = copy_from_user(in_buf, aes_enc_dec_ctl->src_buffer, in_sz); ret = copy_from_user(in_buf, aes_enc_dec_ctl->src_buffer, in_sz);
@@ -1417,12 +1427,15 @@ static int tnvvse_crypto_aes_enc_dec(struct tnvvse_crypto_ctx *ctx,
goto free_out_buf; goto free_out_buf;
} }
if ((aes_enc_dec_ctl->is_encryption) && if ((aes_enc_dec_ctl->is_encryption) && (aes_enc_dec_ctl->user_nonce == 0U)) {
(aes_enc_dec_ctl->user_nonce == 0U)) { if ((aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CBC) ||
if (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CBC) (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_SM4_CBC))
memcpy(aes_enc_dec_ctl->initial_vector, req->iv, TEGRA_NVVSE_AES_IV_LEN); memcpy(aes_enc_dec_ctl->initial_vector, req->iv,
else if (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR) TEGRA_NVVSE_AES_IV_LEN);
memcpy(aes_enc_dec_ctl->initial_counter, req->iv, TEGRA_NVVSE_AES_CTR_LEN); else if ((aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR) ||
(aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_SM4_CTR))
memcpy(aes_enc_dec_ctl->initial_counter, req->iv,
TEGRA_NVVSE_AES_CTR_LEN);
} }
if (aes_enc_dec_ctl->user_nonce == 1U) { if (aes_enc_dec_ctl->user_nonce == 1U) {
@@ -1949,12 +1962,14 @@ static long tnvvse_crypto_dev_ioctl(struct file *filp,
/* Copy IV returned by VSE */ /* Copy IV returned by VSE */
if (aes_enc_dec_ctl->is_encryption) { if (aes_enc_dec_ctl->is_encryption) {
if (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CBC || if ((aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CBC) ||
aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_GCM) (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_SM4_CBC) ||
(aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_GCM))
ret = copy_to_user(arg_aes_enc_dec_ctl->initial_vector, ret = copy_to_user(arg_aes_enc_dec_ctl->initial_vector,
aes_enc_dec_ctl->initial_vector, aes_enc_dec_ctl->initial_vector,
sizeof(aes_enc_dec_ctl->initial_vector)); sizeof(aes_enc_dec_ctl->initial_vector));
else if (aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR) else if ((aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR) ||
(aes_enc_dec_ctl->aes_mode == TEGRA_NVVSE_AES_MODE_CTR))
ret = copy_to_user(arg_aes_enc_dec_ctl->initial_counter, ret = copy_to_user(arg_aes_enc_dec_ctl->initial_counter,
aes_enc_dec_ctl->initial_counter, aes_enc_dec_ctl->initial_counter,
sizeof(aes_enc_dec_ctl->initial_counter)); sizeof(aes_enc_dec_ctl->initial_counter));

View File

@@ -92,6 +92,10 @@ enum tegra_nvvse_aes_mode {
TEGRA_NVVSE_AES_MODE_CTR, TEGRA_NVVSE_AES_MODE_CTR,
/** Defines AES MODE GCM */ /** Defines AES MODE GCM */
TEGRA_NVVSE_AES_MODE_GCM, TEGRA_NVVSE_AES_MODE_GCM,
/** Defines SM4 AES CBC Mode */
TEGRA_NVVSE_AES_MODE_SM4_CBC,
/** Defines SM4 AES CTR Mode */
TEGRA_NVVSE_AES_MODE_SM4_CTR,
/** Defines maximum AES MODE, must be last entry*/ /** Defines maximum AES MODE, must be last entry*/
TEGRA_NVVSE_AES_MODE_MAX, TEGRA_NVVSE_AES_MODE_MAX,
}; };