crypto: tegra: Add Tegra SE driver for T264

Add Tegra Security Engine driver which supports AES-ECB/CBC/CTR/XTS
SHA1/SHA2/SHA3 AES-GCM, AES CCM, SM4, SM3 algorithms.

Signed-off-by: Akhil R <akhilrajeev@nvidia.com>
Change-Id: I86be2fcc485c31988496395183cb44a386907668
This commit is contained in:
Akhil R
2024-07-17 18:41:20 +05:30
committed by Jon Hunter
parent 897629c033
commit a29b51cd58
8 changed files with 2966 additions and 119 deletions

View File

@@ -7,6 +7,7 @@ tegra-se-objs := tegra-se-key.o tegra-se-main.o
tegra-se-y += tegra-se-aes.o tegra-se-y += tegra-se-aes.o
tegra-se-y += tegra-se-hash.o tegra-se-y += tegra-se-hash.o
tegra-se-y += tegra-se-sm4.o
obj-m += tegra-se.o obj-m += tegra-se.o
obj-m += tegra-se-kds.o obj-m += tegra-se-kds.o

View File

@@ -42,6 +42,8 @@ struct tegra_aes_reqctx {
u32 crypto_config; u32 crypto_config;
u32 len; u32 len;
u32 *iv; u32 *iv;
u32 key1_id;
u32 key2_id;
}; };
struct tegra_aead_ctx { struct tegra_aead_ctx {
@@ -51,6 +53,9 @@ struct tegra_aead_ctx {
struct tegra_se *se; struct tegra_se *se;
unsigned int authsize; unsigned int authsize;
u32 alg; u32 alg;
u32 mac_alg;
u32 final_alg;
u32 verify_alg;
u32 keylen; u32 keylen;
u32 key_id; u32 key_id;
}; };
@@ -76,7 +81,8 @@ struct tegra_cmac_ctx {
struct crypto_engine_ctx enginectx; struct crypto_engine_ctx enginectx;
#endif #endif
struct tegra_se *se; struct tegra_se *se;
unsigned int alg; u32 alg;
u32 final_alg;
u32 key_id; u32 key_id;
struct crypto_shash *fallback_tfm; struct crypto_shash *fallback_tfm;
}; };
@@ -84,6 +90,7 @@ struct tegra_cmac_ctx {
struct tegra_cmac_reqctx { struct tegra_cmac_reqctx {
struct scatterlist *src_sg; struct scatterlist *src_sg;
struct tegra_se_datbuf datbuf; struct tegra_se_datbuf datbuf;
struct tegra_se_datbuf digest;
struct tegra_se_datbuf residue; struct tegra_se_datbuf residue;
unsigned int total_len; unsigned int total_len;
unsigned int blk_size; unsigned int blk_size;
@@ -138,6 +145,7 @@ static int tegra234_aes_crypto_cfg(u32 alg, bool encrypt)
{ {
switch (alg) { switch (alg) {
case SE_ALG_CMAC: case SE_ALG_CMAC:
case SE_ALG_CMAC_FINAL:
case SE_ALG_GMAC: case SE_ALG_GMAC:
case SE_ALG_GCM: case SE_ALG_GCM:
case SE_ALG_GCM_FINAL: case SE_ALG_GCM_FINAL:
@@ -201,12 +209,16 @@ static int tegra234_aes_cfg(u32 alg, bool encrypt)
return SE_CFG_GCM_FINAL_DECRYPT; return SE_CFG_GCM_FINAL_DECRYPT;
case SE_ALG_CMAC: case SE_ALG_CMAC:
return SE_CFG_CMAC | SE_AES_DST_HASH_REG;
case SE_ALG_CMAC_FINAL:
return SE_CFG_CMAC; return SE_CFG_CMAC;
case SE_ALG_CBC_MAC: case SE_ALG_CBC_MAC:
return SE_AES_ENC_ALG_AES_ENC | return SE_AES_ENC_ALG_AES_ENC |
SE_AES_DST_HASH_REG; SE_AES_DST_HASH_REG;
} }
return -EINVAL; return -EINVAL;
} }
@@ -272,12 +284,32 @@ static int tegra_aes_do_one_req(struct crypto_engine *engine, void *areq)
unsigned int cmdlen; unsigned int cmdlen;
int ret; int ret;
/* Keys in ctx might be stored in KDS. Copy it to request ctx */
rctx->key1_id = tegra_key_get_idx(ctx->se, ctx->key1_id);
if (!rctx->key1_id) {
ret = -ENOMEM;
goto out;
}
rctx->key2_id = 0;
/* If there are 2 keys stored (for XTS), retrieve them both */
if (ctx->key2_id) {
rctx->key2_id = tegra_key_get_idx(ctx->se, ctx->key2_id);
if (!rctx->key2_id) {
ret = -ENOMEM;
goto key1_free;
}
}
/* Set buffer size as a multiple of AES_BLOCK_SIZE*/ /* Set buffer size as a multiple of AES_BLOCK_SIZE*/
rctx->datbuf.size = ((req->cryptlen / AES_BLOCK_SIZE) + 1) * AES_BLOCK_SIZE; rctx->datbuf.size = ((req->cryptlen / AES_BLOCK_SIZE) + 1) * AES_BLOCK_SIZE;
rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->datbuf.size, rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->datbuf.size,
&rctx->datbuf.addr, GFP_KERNEL); &rctx->datbuf.addr, GFP_KERNEL);
if (!rctx->datbuf.buf) if (!rctx->datbuf.buf) {
return -ENOMEM; ret = -ENOMEM;
goto key2_free;
}
rctx->iv = (u32 *)req->iv; rctx->iv = (u32 *)req->iv;
rctx->len = req->cryptlen; rctx->len = req->cryptlen;
@@ -290,6 +322,11 @@ static int tegra_aes_do_one_req(struct crypto_engine *engine, void *areq)
scatterwalk_map_and_copy(rctx->datbuf.buf, req->src, 0, req->cryptlen, 0); scatterwalk_map_and_copy(rctx->datbuf.buf, req->src, 0, req->cryptlen, 0);
/* Update crypto_config with Local KSLT IDs */
rctx->crypto_config |= SE_AES_KEY_INDEX(rctx->key1_id);
if (rctx->key2_id)
rctx->crypto_config |= SE_AES_KEY2_INDEX(rctx->key2_id);
/* Prepare the command and submit for execution */ /* Prepare the command and submit for execution */
cmdlen = tegra_aes_prep_cmd(se, rctx); cmdlen = tegra_aes_prep_cmd(se, rctx);
ret = tegra_se_host1x_submit(se, cmdlen); ret = tegra_se_host1x_submit(se, cmdlen);
@@ -302,6 +339,13 @@ static int tegra_aes_do_one_req(struct crypto_engine *engine, void *areq)
dma_free_coherent(ctx->se->dev, rctx->datbuf.size, dma_free_coherent(ctx->se->dev, rctx->datbuf.size,
rctx->datbuf.buf, rctx->datbuf.addr); rctx->datbuf.buf, rctx->datbuf.addr);
key2_free:
if (rctx->key2_id != ctx->key2_id)
tegra_key_invalidate(ctx->se, rctx->key2_id, ctx->alg);
key1_free:
if (rctx->key1_id != ctx->key1_id)
tegra_key_invalidate(ctx->se, rctx->key1_id, ctx->alg);
out:
crypto_finalize_skcipher_request(se->engine, req, ret); crypto_finalize_skcipher_request(se->engine, req, ret);
return 0; return 0;
@@ -439,12 +483,147 @@ static int tegra_aes_kac_manifest(u32 user, u32 alg, u32 keylen)
return manifest; return manifest;
} }
static int tegra_aes_crypt(struct skcipher_request *req, bool encrypt) static int tegra_aes_kac2_manifest(u32 user, u32 alg, u32 keylen)
{
int manifest;
manifest = SE_KAC2_USER(user) | SE_KAC2_ORIGIN_SW;
manifest |= SE_KAC2_DECRYPT_EN | SE_KAC2_ENCRYPT_EN;
manifest |= SE_KAC2_TYPE_SYM | SE_KAC2_SUBTYPE_AES;
switch (alg) {
case SE_ALG_CBC:
case SE_ALG_ECB:
case SE_ALG_CTR:
manifest |= SE_KAC2_ENC;
break;
case SE_ALG_XTS:
manifest |= SE_KAC2_XTS;
break;
case SE_ALG_GCM:
manifest |= SE_KAC2_GCM;
break;
case SE_ALG_CMAC:
manifest |= SE_KAC2_CMAC;
break;
case SE_ALG_CBC_MAC:
manifest |= SE_KAC2_ENC;
break;
default:
return -EINVAL;
}
switch (keylen) {
case AES_KEYSIZE_128:
manifest |= SE_KAC2_SIZE_128;
break;
case AES_KEYSIZE_192:
manifest |= SE_KAC2_SIZE_192;
break;
case AES_KEYSIZE_256:
manifest |= SE_KAC2_SIZE_256;
break;
default:
return -EINVAL;
}
return manifest;
}
static inline int tegra264_aes_crypto_cfg(u32 alg, bool encrypt)
{
u32 cfg = SE_AES_CRYPTO_CFG_SCC_DIS;
switch (alg) {
case SE_ALG_ECB:
case SE_ALG_SM4_ECB:
case SE_ALG_CMAC:
case SE_ALG_GMAC:
break;
case SE_ALG_CTR:
cfg |= SE_AES_IV_SEL_REG |
SE_AES_CRYPTO_CFG_CTR_CNTN(1);
break;
case SE_ALG_CBC:
case SE_ALG_CBC_MAC:
case SE_ALG_XTS:
case SE_ALG_GCM:
case SE_ALG_GCM_FINAL:
case SE_ALG_GCM_VERIFY:
cfg |= SE_AES_IV_SEL_REG;
break;
default:
return -EINVAL;
}
return cfg;
}
static int tegra264_aes_cfg(u32 alg, bool encrypt)
{
switch (alg) {
case SE_ALG_CBC:
if (encrypt)
return SE_CFG_CBC_ENCRYPT;
else
return SE_CFG_CBC_DECRYPT;
case SE_ALG_ECB:
if (encrypt)
return SE_CFG_ECB_ENCRYPT;
else
return SE_CFG_ECB_DECRYPT;
case SE_ALG_CTR:
if (encrypt)
return SE_CFG_CTR_ENCRYPT;
else
return SE_CFG_CTR_DECRYPT;
case SE_ALG_XTS:
if (encrypt)
return SE_CFG_XTS_ENCRYPT;
else
return SE_CFG_XTS_DECRYPT;
case SE_ALG_GMAC:
if (encrypt)
return SE_CFG_GMAC_ENCRYPT;
else
return SE_CFG_GMAC_DECRYPT;
case SE_ALG_GCM:
if (encrypt)
return SE_CFG_GCM_ENCRYPT;
else
return SE_CFG_GCM_DECRYPT;
case SE_ALG_GCM_FINAL:
if (encrypt)
return SE_CFG_GCM_FINAL_ENCRYPT;
else
return SE_CFG_GCM_FINAL_DECRYPT;
case SE_ALG_GCM_VERIFY:
return SE_CFG_GCM_VERIFY;
case SE_ALG_CMAC:
return SE_CFG_CMAC | SE_AES_DST_KEYTABLE;
case SE_ALG_CMAC_FINAL:
return SE_CFG_CMAC;
case SE_ALG_CBC_MAC:
return SE_CFG_CBC_MAC | SE_AES_DST_HASH_REG;
}
return -EINVAL;
}
static int tegra_aes_crypt(struct skcipher_request *req, bool encrypt)
{ {
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct tegra_aes_ctx *ctx = crypto_skcipher_ctx(tfm); struct tegra_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
struct tegra_aes_reqctx *rctx = skcipher_request_ctx(req); struct tegra_aes_reqctx *rctx = skcipher_request_ctx(req);
struct tegra_se *se = ctx->se;
if (ctx->alg != SE_ALG_XTS) { if (ctx->alg != SE_ALG_XTS) {
if (!IS_ALIGNED(req->cryptlen, crypto_skcipher_blocksize(tfm))) { if (!IS_ALIGNED(req->cryptlen, crypto_skcipher_blocksize(tfm))) {
@@ -460,12 +639,8 @@ static int tegra_aes_crypt(struct skcipher_request *req, bool encrypt)
return 0; return 0;
rctx->encrypt = encrypt; rctx->encrypt = encrypt;
rctx->config = tegra234_aes_cfg(ctx->alg, encrypt); rctx->config = se->regcfg->cfg(ctx->alg, encrypt);
rctx->crypto_config = tegra234_aes_crypto_cfg(ctx->alg, encrypt); rctx->crypto_config = se->regcfg->crypto_cfg(ctx->alg, encrypt);
rctx->crypto_config |= SE_AES_KEY_INDEX(ctx->key1_id);
if (ctx->key2_id)
rctx->crypto_config |= SE_AES_KEY2_INDEX(ctx->key2_id);
return crypto_transfer_skcipher_request_to_engine(ctx->se->engine, req); return crypto_transfer_skcipher_request_to_engine(ctx->se->engine, req);
} }
@@ -593,10 +768,29 @@ static struct tegra_se_alg tegra_aes_algs[] = {
} }
}, },
}; };
struct tegra_se_regcfg tegra234_aes_regcfg = {
.cfg = tegra234_aes_cfg,
.crypto_cfg = tegra234_aes_crypto_cfg,
.manifest = tegra_aes_kac_manifest,
};
struct tegra_se_regcfg tegra264_aes_regcfg = {
.cfg = tegra264_aes_cfg,
.crypto_cfg = tegra264_aes_crypto_cfg,
.manifest = tegra_aes_kac2_manifest
};
static void tegra_aes_set_regcfg(struct tegra_se *se)
{
if (se->hw->kac_ver > 1)
se->regcfg = &tegra264_aes_regcfg;
else
se->regcfg = &tegra234_aes_regcfg;
}
static unsigned int tegra_gmac_prep_cmd(struct tegra_se *se, struct tegra_aead_reqctx *rctx) static unsigned int tegra_gmac_prep_cmd(struct tegra_se *se, struct tegra_aead_reqctx *rctx)
{ {
unsigned int data_count, res_bits, i = 0; unsigned int data_count, res_bits, i = 0, j;
u32 *cpuvaddr = se->cmdbuf->addr; u32 *cpuvaddr = se->cmdbuf->addr;
data_count = (rctx->assoclen / AES_BLOCK_SIZE); data_count = (rctx->assoclen / AES_BLOCK_SIZE);
@@ -609,6 +803,11 @@ static unsigned int tegra_gmac_prep_cmd(struct tegra_se *se, struct tegra_aead_r
if (!res_bits) if (!res_bits)
data_count--; data_count--;
cpuvaddr[i++] = host1x_opcode_setpayload(SE_CRYPTO_CTR_REG_COUNT);
cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->linear_ctr);
for (j = 0; j < SE_CRYPTO_CTR_REG_COUNT; j++)
cpuvaddr[i++] = rctx->iv[j];
cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->last_blk, 1); cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->last_blk, 1);
cpuvaddr[i++] = SE_LAST_BLOCK_VAL(data_count) | cpuvaddr[i++] = SE_LAST_BLOCK_VAL(data_count) |
SE_LAST_BLOCK_RES_BITS(res_bits); SE_LAST_BLOCK_RES_BITS(res_bits);
@@ -616,6 +815,7 @@ static unsigned int tegra_gmac_prep_cmd(struct tegra_se *se, struct tegra_aead_r
cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 4); cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 4);
cpuvaddr[i++] = rctx->config; cpuvaddr[i++] = rctx->config;
cpuvaddr[i++] = rctx->crypto_config; cpuvaddr[i++] = rctx->crypto_config;
cpuvaddr[i++] = lower_32_bits(rctx->inbuf.addr); cpuvaddr[i++] = lower_32_bits(rctx->inbuf.addr);
cpuvaddr[i++] = SE_ADDR_HI_MSB(upper_32_bits(rctx->inbuf.addr)) | cpuvaddr[i++] = SE_ADDR_HI_MSB(upper_32_bits(rctx->inbuf.addr)) |
SE_ADDR_HI_SZ(rctx->assoclen); SE_ADDR_HI_SZ(rctx->assoclen);
@@ -629,6 +829,8 @@ static unsigned int tegra_gmac_prep_cmd(struct tegra_se *se, struct tegra_aead_r
cpuvaddr[i++] = host1x_uclass_incr_syncpt_cond_f(1) | cpuvaddr[i++] = host1x_uclass_incr_syncpt_cond_f(1) |
host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); host1x_uclass_incr_syncpt_indx_f(se->syncpt_id);
dev_dbg(se->dev, "cfg %#x crypto cfg %#x\n", rctx->config, rctx->crypto_config);
return i; return i;
} }
@@ -687,6 +889,7 @@ static unsigned int tegra_gcm_crypt_prep_cmd(struct tegra_se *se, struct tegra_a
host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); host1x_uclass_incr_syncpt_indx_f(se->syncpt_id);
dev_dbg(se->dev, "cfg %#x crypto cfg %#x\n", rctx->config, rctx->crypto_config); dev_dbg(se->dev, "cfg %#x crypto cfg %#x\n", rctx->config, rctx->crypto_config);
return i; return i;
} }
@@ -721,8 +924,9 @@ static int tegra_gcm_prep_final_cmd(struct tegra_se *se, u32 *cpuvaddr,
cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 6); cpuvaddr[i++] = se_host1x_opcode_incr(se->hw->regs->config, 6);
cpuvaddr[i++] = rctx->config; cpuvaddr[i++] = rctx->config;
cpuvaddr[i++] = rctx->crypto_config; cpuvaddr[i++] = rctx->crypto_config;
cpuvaddr[i++] = 0; cpuvaddr[i++] = lower_32_bits(rctx->inbuf.addr);
cpuvaddr[i++] = 0; cpuvaddr[i++] = SE_ADDR_HI_MSB(upper_32_bits(rctx->outbuf.addr)) |
SE_ADDR_HI_SZ(rctx->authsize);
/* Destination Address */ /* Destination Address */
cpuvaddr[i++] = lower_32_bits(rctx->outbuf.addr); cpuvaddr[i++] = lower_32_bits(rctx->outbuf.addr);
@@ -749,9 +953,9 @@ static int tegra_gcm_do_gmac(struct tegra_aead_ctx *ctx, struct tegra_aead_reqct
scatterwalk_map_and_copy(rctx->inbuf.buf, scatterwalk_map_and_copy(rctx->inbuf.buf,
rctx->src_sg, 0, rctx->assoclen, 0); rctx->src_sg, 0, rctx->assoclen, 0);
rctx->config = tegra234_aes_cfg(SE_ALG_GMAC, rctx->encrypt); rctx->config = se->regcfg->cfg(ctx->mac_alg, rctx->encrypt);
rctx->crypto_config = tegra234_aes_crypto_cfg(SE_ALG_GMAC, rctx->encrypt) | rctx->crypto_config = se->regcfg->crypto_cfg(ctx->mac_alg, rctx->encrypt) |
SE_AES_KEY_INDEX(ctx->key_id); SE_AES_KEY_INDEX(rctx->key_id);
cmdlen = tegra_gmac_prep_cmd(se, rctx); cmdlen = tegra_gmac_prep_cmd(se, rctx);
@@ -766,9 +970,9 @@ static int tegra_gcm_do_crypt(struct tegra_aead_ctx *ctx, struct tegra_aead_reqc
scatterwalk_map_and_copy(rctx->inbuf.buf, rctx->src_sg, scatterwalk_map_and_copy(rctx->inbuf.buf, rctx->src_sg,
rctx->assoclen, rctx->cryptlen, 0); rctx->assoclen, rctx->cryptlen, 0);
rctx->config = tegra234_aes_cfg(SE_ALG_GCM, rctx->encrypt); rctx->config = se->regcfg->cfg(ctx->alg, rctx->encrypt);
rctx->crypto_config = tegra234_aes_crypto_cfg(SE_ALG_GCM, rctx->encrypt) | rctx->crypto_config = se->regcfg->crypto_cfg(ctx->alg, rctx->encrypt) |
SE_AES_KEY_INDEX(ctx->key_id); SE_AES_KEY_INDEX(rctx->key_id);
/* Prepare command and submit */ /* Prepare command and submit */
cmdlen = tegra_gcm_crypt_prep_cmd(se, rctx); cmdlen = tegra_gcm_crypt_prep_cmd(se, rctx);
@@ -789,9 +993,9 @@ static int tegra_gcm_do_final(struct tegra_aead_ctx *ctx, struct tegra_aead_reqc
u32 *cpuvaddr = se->cmdbuf->addr; u32 *cpuvaddr = se->cmdbuf->addr;
int cmdlen, ret, offset; int cmdlen, ret, offset;
rctx->config = tegra234_aes_cfg(SE_ALG_GCM_FINAL, rctx->encrypt); rctx->config = se->regcfg->cfg(ctx->final_alg, rctx->encrypt);
rctx->crypto_config = tegra234_aes_crypto_cfg(SE_ALG_GCM_FINAL, rctx->encrypt) | rctx->crypto_config = se->regcfg->crypto_cfg(ctx->final_alg, rctx->encrypt) |
SE_AES_KEY_INDEX(ctx->key_id); SE_AES_KEY_INDEX(rctx->key_id);
/* Prepare command and submit */ /* Prepare command and submit */
cmdlen = tegra_gcm_prep_final_cmd(se, cpuvaddr, rctx); cmdlen = tegra_gcm_prep_final_cmd(se, cpuvaddr, rctx);
@@ -809,15 +1013,48 @@ static int tegra_gcm_do_final(struct tegra_aead_ctx *ctx, struct tegra_aead_reqc
return 0; return 0;
} }
static int tegra_gcm_do_verify(struct tegra_se *se, struct tegra_aead_reqctx *rctx) static int tegra_gcm_hw_verify(struct tegra_aead_ctx *ctx, struct tegra_aead_reqctx *rctx, u8 *mac)
{ {
unsigned int offset; struct tegra_se *se = ctx->se;
u32 result, *cpuvaddr = se->cmdbuf->addr;
int size, ret;
memcpy(rctx->inbuf.buf, mac, rctx->authsize);
rctx->inbuf.size = rctx->authsize;
rctx->config = se->regcfg->cfg(ctx->verify_alg, rctx->encrypt);
rctx->crypto_config = se->regcfg->crypto_cfg(ctx->verify_alg, rctx->encrypt) |
SE_AES_KEY_INDEX(rctx->key_id);
/* Prepare command and submit */
size = tegra_gcm_prep_final_cmd(se, cpuvaddr, rctx);
ret = tegra_se_host1x_submit(se, size);
if (ret)
return ret;
memcpy(&result, rctx->outbuf.buf, 4);
if (result != SE_GCM_VERIFY_OK)
return -EBADMSG;
return 0;
}
static int tegra_gcm_do_verify(struct tegra_aead_ctx *ctx, struct tegra_aead_reqctx *rctx)
{
struct tegra_se *se = ctx->se;
int offset, ret;
u8 mac[16]; u8 mac[16];
offset = rctx->assoclen + rctx->cryptlen; offset = rctx->assoclen + rctx->cryptlen;
scatterwalk_map_and_copy(mac, rctx->src_sg, offset, rctx->authsize, 0); scatterwalk_map_and_copy(mac, rctx->src_sg, offset, rctx->authsize, 0);
if (crypto_memneq(rctx->outbuf.buf, mac, rctx->authsize)) if (se->hw->support_aad_verify)
ret = tegra_gcm_hw_verify(ctx, rctx, mac);
else
ret = crypto_memneq(rctx->outbuf.buf, mac, rctx->authsize);
if (ret)
return -EBADMSG; return -EBADMSG;
return 0; return 0;
@@ -860,13 +1097,14 @@ static unsigned int tegra_cbcmac_prep_cmd(struct tegra_se *se, struct tegra_aead
SE_ADDR_HI_SZ(0x10); /* HW always generates 128 bit tag */ SE_ADDR_HI_SZ(0x10); /* HW always generates 128 bit tag */
cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1); cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1);
cpuvaddr[i++] = SE_AES_OP_WRSTALL | cpuvaddr[i++] = SE_AES_OP_WRSTALL | SE_AES_OP_LASTBUF | SE_AES_OP_START;
SE_AES_OP_LASTBUF | SE_AES_OP_START;
cpuvaddr[i++] = se_host1x_opcode_nonincr(host1x_uclass_incr_syncpt_r(), 1); cpuvaddr[i++] = se_host1x_opcode_nonincr(host1x_uclass_incr_syncpt_r(), 1);
cpuvaddr[i++] = host1x_uclass_incr_syncpt_cond_f(1) | cpuvaddr[i++] = host1x_uclass_incr_syncpt_cond_f(1) |
host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); host1x_uclass_incr_syncpt_indx_f(se->syncpt_id);
dev_dbg(se->dev, "cfg %#x crypto cfg %#x\n", rctx->config, rctx->crypto_config);
return i; return i;
} }
@@ -910,15 +1148,13 @@ static unsigned int tegra_ctr_prep_cmd(struct tegra_se *se, struct tegra_aead_re
return i; return i;
} }
static int tegra_ccm_do_cbcmac(struct tegra_aead_ctx *ctx, struct tegra_aead_reqctx *rctx) static int tegra_ccm_do_cbcmac(struct tegra_se *se, struct tegra_aead_reqctx *rctx)
{ {
struct tegra_se *se = ctx->se;
int cmdlen; int cmdlen;
rctx->config = tegra234_aes_cfg(SE_ALG_CBC_MAC, rctx->encrypt); rctx->config = se->regcfg->cfg(SE_ALG_CBC_MAC, rctx->encrypt);
rctx->crypto_config = tegra234_aes_crypto_cfg(SE_ALG_CBC_MAC, rctx->crypto_config = se->regcfg->crypto_cfg(SE_ALG_CBC_MAC,
rctx->encrypt) | rctx->encrypt) | SE_AES_KEY_INDEX(rctx->key_id);
SE_AES_KEY_INDEX(ctx->key_id);
/* Prepare command and submit */ /* Prepare command and submit */
cmdlen = tegra_cbcmac_prep_cmd(se, rctx); cmdlen = tegra_cbcmac_prep_cmd(se, rctx);
@@ -1068,9 +1304,8 @@ static int tegra_ccm_ctr_result(struct tegra_se *se, struct tegra_aead_reqctx *r
return 0; return 0;
} }
static int tegra_ccm_compute_auth(struct tegra_aead_ctx *ctx, struct tegra_aead_reqctx *rctx) static int tegra_ccm_compute_auth(struct tegra_se *se, struct tegra_aead_reqctx *rctx)
{ {
struct tegra_se *se = ctx->se;
struct scatterlist *sg; struct scatterlist *sg;
int offset, ret; int offset, ret;
@@ -1089,23 +1324,22 @@ static int tegra_ccm_compute_auth(struct tegra_aead_ctx *ctx, struct tegra_aead_
rctx->inbuf.size = offset; rctx->inbuf.size = offset;
ret = tegra_ccm_do_cbcmac(ctx, rctx); ret = tegra_ccm_do_cbcmac(se, rctx);
if (ret) if (ret)
return ret; return ret;
return tegra_ccm_mac_result(se, rctx); return tegra_ccm_mac_result(se, rctx);
} }
static int tegra_ccm_do_ctr(struct tegra_aead_ctx *ctx, struct tegra_aead_reqctx *rctx) static int tegra_ccm_do_ctr(struct tegra_se *se, struct tegra_aead_reqctx *rctx)
{ {
struct tegra_se *se = ctx->se;
unsigned int cmdlen, offset = 0; unsigned int cmdlen, offset = 0;
struct scatterlist *sg = rctx->src_sg; struct scatterlist *sg = rctx->src_sg;
int ret; int ret;
rctx->config = tegra234_aes_cfg(SE_ALG_CTR, rctx->encrypt); rctx->config = se->regcfg->cfg(SE_ALG_CTR, rctx->encrypt);
rctx->crypto_config = tegra234_aes_crypto_cfg(SE_ALG_CTR, rctx->encrypt) | rctx->crypto_config = se->regcfg->crypto_cfg(SE_ALG_CTR, rctx->encrypt) |
SE_AES_KEY_INDEX(ctx->key_id); SE_AES_KEY_INDEX(rctx->key_id);
/* Copy authdata in the top of buffer for encryption/decryption */ /* Copy authdata in the top of buffer for encryption/decryption */
if (rctx->encrypt) if (rctx->encrypt)
@@ -1181,19 +1415,26 @@ static int tegra_ccm_do_one_req(struct crypto_engine *engine, void *areq)
else else
rctx->cryptlen = req->cryptlen - ctx->authsize; rctx->cryptlen = req->cryptlen - ctx->authsize;
/* Keys in ctx might be stored in KDS. Copy it to local keyslot */
rctx->key_id = tegra_key_get_idx(ctx->se, ctx->key_id);
if (!rctx->key_id)
goto out;
rctx->inbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen + 100; rctx->inbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen + 100;
/* Allocate buffers required */ /* Allocate buffers required */
rctx->inbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->inbuf.size, rctx->inbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->inbuf.size,
&rctx->inbuf.addr, GFP_KERNEL); &rctx->inbuf.addr, GFP_KERNEL);
if (!rctx->inbuf.buf) if (!rctx->inbuf.buf) {
return -ENOMEM; ret = -ENOMEM;
goto key_free;
}
rctx->outbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen + 100; rctx->outbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen + 100;
rctx->outbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->outbuf.size, rctx->outbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->outbuf.size,
&rctx->outbuf.addr, GFP_KERNEL); &rctx->outbuf.addr, GFP_KERNEL);
if (!rctx->outbuf.buf) { if (!rctx->outbuf.buf) {
ret = -ENOMEM; ret = -ENOMEM;
goto outbuf_err; goto inbuf_free;
} }
ret = tegra_ccm_crypt_init(req, se, rctx); ret = tegra_ccm_crypt_init(req, se, rctx);
@@ -1203,34 +1444,38 @@ static int tegra_ccm_do_one_req(struct crypto_engine *engine, void *areq)
if (rctx->encrypt) { if (rctx->encrypt) {
/* CBC MAC Operation */ /* CBC MAC Operation */
ret = tegra_ccm_compute_auth(ctx, rctx); ret = tegra_ccm_compute_auth(se, rctx);
if (ret) if (ret)
goto out; goto outbuf_free;
/* CTR operation */ /* CTR operation */
ret = tegra_ccm_do_ctr(ctx, rctx); ret = tegra_ccm_do_ctr(se, rctx);
if (ret) if (ret)
goto out; goto outbuf_free;
} else { } else {
/* CTR operation */ /* CTR operation */
ret = tegra_ccm_do_ctr(ctx, rctx); ret = tegra_ccm_do_ctr(se, rctx);
if (ret) if (ret)
goto out; goto outbuf_free;
/* CBC MAC Operation */ /* CBC MAC Operation */
ret = tegra_ccm_compute_auth(ctx, rctx); ret = tegra_ccm_compute_auth(se, rctx);
if (ret) if (ret)
goto out; goto outbuf_free;
} }
out: outbuf_free:
dma_free_coherent(ctx->se->dev, rctx->outbuf.size, dma_free_coherent(ctx->se->dev, rctx->outbuf.size,
rctx->outbuf.buf, rctx->outbuf.addr); rctx->outbuf.buf, rctx->outbuf.addr);
outbuf_err: inbuf_free:
dma_free_coherent(ctx->se->dev, rctx->inbuf.size, dma_free_coherent(ctx->se->dev, rctx->inbuf.size,
rctx->inbuf.buf, rctx->inbuf.addr); rctx->inbuf.buf, rctx->inbuf.addr);
key_free:
/* Free the keyslot if it is cloned for this request */
if (rctx->key_id != ctx->key_id)
tegra_key_invalidate(ctx->se, rctx->key_id, ctx->alg);
out:
crypto_finalize_aead_request(ctx->se->engine, req, ret); crypto_finalize_aead_request(ctx->se->engine, req, ret);
return 0; return 0;
@@ -1242,6 +1487,7 @@ static int tegra_gcm_do_one_req(struct crypto_engine *engine, void *areq)
struct crypto_aead *tfm = crypto_aead_reqtfm(req); struct crypto_aead *tfm = crypto_aead_reqtfm(req);
struct tegra_aead_ctx *ctx = crypto_aead_ctx(tfm); struct tegra_aead_ctx *ctx = crypto_aead_ctx(tfm);
struct tegra_aead_reqctx *rctx = aead_request_ctx(req); struct tegra_aead_reqctx *rctx = aead_request_ctx(req);
struct tegra_se *se = ctx->se;
int ret; int ret;
rctx->src_sg = req->src; rctx->src_sg = req->src;
@@ -1254,6 +1500,12 @@ static int tegra_gcm_do_one_req(struct crypto_engine *engine, void *areq)
else else
rctx->cryptlen = req->cryptlen - ctx->authsize; rctx->cryptlen = req->cryptlen - ctx->authsize;
/* Keys in ctx might be stored in KDS. Copy it to local keyslot */
rctx->key_id = tegra_key_get_idx(ctx->se, ctx->key_id);
if (!rctx->key_id)
goto key_err;
/* Allocate buffers required */ /* Allocate buffers required */
rctx->inbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen; rctx->inbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen;
rctx->inbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->inbuf.size, rctx->inbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->inbuf.size,
@@ -1288,17 +1540,22 @@ static int tegra_gcm_do_one_req(struct crypto_engine *engine, void *areq)
} }
/* GCM_FINAL operation */ /* GCM_FINAL operation */
ret = tegra_gcm_do_final(ctx, rctx); /* Need not do FINAL operation if hw supports MAC verification */
if (ret) if (rctx->encrypt || !se->hw->support_aad_verify) {
goto out; ret = tegra_gcm_do_final(ctx, rctx);
if (ret)
goto out;
}
if (!rctx->encrypt) if (!rctx->encrypt)
ret = tegra_gcm_do_verify(ctx->se, rctx); ret = tegra_gcm_do_verify(ctx, rctx);
out: out:
if (rctx->key_id != ctx->key_id)
tegra_key_invalidate(ctx->se, rctx->key_id, ctx->alg);
key_err:
dma_free_coherent(ctx->se->dev, rctx->outbuf.size, dma_free_coherent(ctx->se->dev, rctx->outbuf.size,
rctx->outbuf.buf, rctx->outbuf.addr); rctx->outbuf.buf, rctx->outbuf.addr);
outbuf_err: outbuf_err:
dma_free_coherent(ctx->se->dev, rctx->inbuf.size, dma_free_coherent(ctx->se->dev, rctx->inbuf.size,
rctx->inbuf.buf, rctx->inbuf.addr); rctx->inbuf.buf, rctx->inbuf.addr);
@@ -1352,10 +1609,6 @@ static int tegra_gcm_cra_init(struct crypto_aead *tfm)
struct tegra_aead_ctx *ctx = crypto_aead_ctx(tfm); struct tegra_aead_ctx *ctx = crypto_aead_ctx(tfm);
struct aead_alg *alg = crypto_aead_alg(tfm); struct aead_alg *alg = crypto_aead_alg(tfm);
struct tegra_se_alg *se_alg; struct tegra_se_alg *se_alg;
const char *algname;
int ret;
algname = crypto_tfm_alg_name(&tfm->base);
#ifdef NV_CONFTEST_REMOVE_STRUCT_CRYPTO_ENGINE_CTX #ifdef NV_CONFTEST_REMOVE_STRUCT_CRYPTO_ENGINE_CTX
se_alg = container_of(alg, struct tegra_se_alg, alg.aead.base); se_alg = container_of(alg, struct tegra_se_alg, alg.aead.base);
@@ -1368,13 +1621,10 @@ static int tegra_gcm_cra_init(struct crypto_aead *tfm)
ctx->se = se_alg->se_dev; ctx->se = se_alg->se_dev;
ctx->key_id = 0; ctx->key_id = 0;
ret = se_algname_to_algid(algname); ctx->alg = SE_ALG_GCM;
if (ret < 0) { ctx->final_alg = SE_ALG_GCM_FINAL;
dev_err(ctx->se->dev, "invalid algorithm\n"); ctx->verify_alg = SE_ALG_GCM_VERIFY;
return ret; ctx->mac_alg = SE_ALG_GMAC;
}
ctx->alg = ret;
#ifndef NV_CONFTEST_REMOVE_STRUCT_CRYPTO_ENGINE_CTX #ifndef NV_CONFTEST_REMOVE_STRUCT_CRYPTO_ENGINE_CTX
ctx->enginectx.op.prepare_request = NULL; ctx->enginectx.op.prepare_request = NULL;
@@ -1480,6 +1730,12 @@ static unsigned int tegra_cmac_prep_cmd(struct tegra_se *se, struct tegra_cmac_r
data_count--; data_count--;
if (rctx->task & SHA_FIRST) { if (rctx->task & SHA_FIRST) {
/* T264 needs INIT to be set for first operation
* whereas T234 will return error if INIT is set
* Differentiate T264 and T234 based on CFG */
if ((rctx->config & SE_AES_DST_KEYTABLE) == SE_AES_DST_KEYTABLE)
op |= SE_AES_OP_INIT;
rctx->task &= ~SHA_FIRST; rctx->task &= ~SHA_FIRST;
cpuvaddr[i++] = host1x_opcode_setpayload(SE_CRYPTO_CTR_REG_COUNT); cpuvaddr[i++] = host1x_opcode_setpayload(SE_CRYPTO_CTR_REG_COUNT);
@@ -1501,8 +1757,11 @@ static unsigned int tegra_cmac_prep_cmd(struct tegra_se *se, struct tegra_cmac_r
cpuvaddr[i++] = lower_32_bits(rctx->datbuf.addr); cpuvaddr[i++] = lower_32_bits(rctx->datbuf.addr);
cpuvaddr[i++] = SE_ADDR_HI_MSB(upper_32_bits(rctx->datbuf.addr)) | cpuvaddr[i++] = SE_ADDR_HI_MSB(upper_32_bits(rctx->datbuf.addr)) |
SE_ADDR_HI_SZ(rctx->datbuf.size); SE_ADDR_HI_SZ(rctx->datbuf.size);
cpuvaddr[i++] = 0;
cpuvaddr[i++] = SE_ADDR_HI_SZ(AES_BLOCK_SIZE); /* Destination Address */
cpuvaddr[i++] = rctx->digest.addr;
cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->digest.addr)) |
SE_ADDR_HI_SZ(rctx->digest.size));
cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1); cpuvaddr[i++] = se_host1x_opcode_nonincr(se->hw->regs->op, 1);
cpuvaddr[i++] = op; cpuvaddr[i++] = op;
@@ -1511,6 +1770,8 @@ static unsigned int tegra_cmac_prep_cmd(struct tegra_se *se, struct tegra_cmac_r
cpuvaddr[i++] = host1x_uclass_incr_syncpt_cond_f(1) | cpuvaddr[i++] = host1x_uclass_incr_syncpt_cond_f(1) |
host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); host1x_uclass_incr_syncpt_indx_f(se->syncpt_id);
dev_dbg(se->dev, "cfg %#x\n", rctx->config);
return i; return i;
} }
@@ -1557,8 +1818,9 @@ static int tegra_cmac_do_update(struct ahash_request *req)
rctx->src_sg = req->src; rctx->src_sg = req->src;
rctx->datbuf.size = (req->nbytes + rctx->residue.size) - nresidue; rctx->datbuf.size = (req->nbytes + rctx->residue.size) - nresidue;
rctx->total_len += rctx->datbuf.size; rctx->total_len += rctx->datbuf.size;
rctx->config = tegra234_aes_cfg(SE_ALG_CMAC, 0); rctx->config = se->regcfg->cfg(ctx->alg, 0);
rctx->crypto_config = SE_AES_KEY_INDEX(ctx->key_id); rctx->crypto_config = se->regcfg->crypto_cfg(ctx->alg, 0) |
SE_AES_KEY_INDEX(rctx->key_id);
/* /*
* Keep one block and residue bytes in residue and * Keep one block and residue bytes in residue and
@@ -1618,7 +1880,6 @@ static int tegra_cmac_do_final(struct ahash_request *req)
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
struct tegra_cmac_ctx *ctx = crypto_ahash_ctx(tfm); struct tegra_cmac_ctx *ctx = crypto_ahash_ctx(tfm);
struct tegra_se *se = ctx->se; struct tegra_se *se = ctx->se;
u32 *result = (u32 *)req->result;
int ret = 0, i, cmdlen; int ret = 0, i, cmdlen;
if (!req->nbytes && !rctx->total_len && ctx->fallback_tfm) { if (!req->nbytes && !rctx->total_len && ctx->fallback_tfm) {
@@ -1628,7 +1889,7 @@ static int tegra_cmac_do_final(struct ahash_request *req)
rctx->datbuf.size = rctx->residue.size; rctx->datbuf.size = rctx->residue.size;
rctx->total_len += rctx->residue.size; rctx->total_len += rctx->residue.size;
rctx->config = tegra234_aes_cfg(SE_ALG_CMAC, 0); rctx->config = se->regcfg->cfg(ctx->final_alg, 0);
if (rctx->residue.size) { if (rctx->residue.size) {
rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->residue.size, rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->residue.size,
@@ -1649,13 +1910,14 @@ static int tegra_cmac_do_final(struct ahash_request *req)
if (ret) if (ret)
goto out; goto out;
/* Read and clear Result register */ memcpy(req->result, rctx->digest.buf, rctx->digest.size);
for (i = 0; i < CMAC_RESULT_REG_COUNT; i++)
result[i] = readl(se->base + se->hw->regs->result + (i * 4));
for (i = 0; i < CMAC_RESULT_REG_COUNT; i++) for (i = 0; i < CMAC_RESULT_REG_COUNT; i++)
writel(0, se->base + se->hw->regs->result + (i * 4)); writel(0, se->base + se->hw->regs->result + (i * 4));
if (rctx->key_id != ctx->key_id)
tegra_key_invalidate(ctx->se, rctx->key_id, ctx->alg);
out: out:
if (rctx->residue.size) if (rctx->residue.size)
dma_free_coherent(se->dev, rctx->datbuf.size, dma_free_coherent(se->dev, rctx->datbuf.size,
@@ -1663,6 +1925,9 @@ out:
out_free: out_free:
dma_free_coherent(se->dev, crypto_ahash_blocksize(tfm) * 2, dma_free_coherent(se->dev, crypto_ahash_blocksize(tfm) * 2,
rctx->residue.buf, rctx->residue.addr); rctx->residue.buf, rctx->residue.addr);
dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf,
rctx->digest.addr);
return ret; return ret;
} }
@@ -1716,7 +1981,6 @@ static int tegra_cmac_cra_init(struct crypto_tfm *tfm)
struct ahash_alg *alg = __crypto_ahash_alg(tfm->__crt_alg); struct ahash_alg *alg = __crypto_ahash_alg(tfm->__crt_alg);
struct tegra_se_alg *se_alg; struct tegra_se_alg *se_alg;
const char *algname; const char *algname;
int ret;
algname = crypto_tfm_alg_name(tfm); algname = crypto_tfm_alg_name(tfm);
#ifdef NV_CONFTEST_REMOVE_STRUCT_CRYPTO_ENGINE_CTX #ifdef NV_CONFTEST_REMOVE_STRUCT_CRYPTO_ENGINE_CTX
@@ -1729,14 +1993,9 @@ static int tegra_cmac_cra_init(struct crypto_tfm *tfm)
ctx->se = se_alg->se_dev; ctx->se = se_alg->se_dev;
ctx->key_id = 0; ctx->key_id = 0;
ctx->alg = SE_ALG_CMAC;
ctx->final_alg = SE_ALG_CMAC_FINAL;
ret = se_algname_to_algid(algname);
if (ret < 0) {
dev_err(ctx->se->dev, "invalid algorithm\n");
return ret;
}
ctx->alg = ret;
#ifndef NV_CONFTEST_REMOVE_STRUCT_CRYPTO_ENGINE_CTX #ifndef NV_CONFTEST_REMOVE_STRUCT_CRYPTO_ENGINE_CTX
ctx->enginectx.op.prepare_request = NULL; ctx->enginectx.op.prepare_request = NULL;
@@ -1770,13 +2029,27 @@ static int tegra_cmac_init(struct ahash_request *req)
rctx->total_len = 0; rctx->total_len = 0;
rctx->datbuf.size = 0; rctx->datbuf.size = 0;
rctx->residue.size = 0; rctx->residue.size = 0;
rctx->key_id = 0;
rctx->task = SHA_FIRST; rctx->task = SHA_FIRST;
rctx->blk_size = crypto_ahash_blocksize(tfm); rctx->blk_size = crypto_ahash_blocksize(tfm);
rctx->digest.size = crypto_ahash_digestsize(tfm);
/* Retrieve the key slot for CMAC */
if (ctx->key_id) {
rctx->key_id = tegra_key_get_idx(ctx->se, ctx->key_id);
if (!rctx->key_id)
return -ENOMEM;
}
rctx->digest.buf = dma_alloc_coherent(se->dev, rctx->digest.size,
&rctx->digest.addr, GFP_KERNEL);
if (!rctx->digest.buf)
goto digbuf_fail;
rctx->residue.buf = dma_alloc_coherent(se->dev, rctx->blk_size * 2, rctx->residue.buf = dma_alloc_coherent(se->dev, rctx->blk_size * 2,
&rctx->residue.addr, GFP_KERNEL); &rctx->residue.addr, GFP_KERNEL);
if (!rctx->residue.buf) if (!rctx->residue.buf)
return -ENOMEM; goto resbuf_fail;
rctx->residue.size = 0; rctx->residue.size = 0;
rctx->datbuf.size = 0; rctx->datbuf.size = 0;
@@ -1786,6 +2059,15 @@ static int tegra_cmac_init(struct ahash_request *req)
writel(0, se->base + se->hw->regs->result + (i * 4)); writel(0, se->base + se->hw->regs->result + (i * 4));
return 0; return 0;
resbuf_fail:
dma_free_coherent(se->dev, rctx->blk_size, rctx->digest.buf,
rctx->digest.addr);
digbuf_fail:
if (rctx->key_id != ctx->key_id)
tegra_key_invalidate(ctx->se, rctx->key_id, ctx->alg);
return -ENOMEM;
} }
static int tegra_cmac_setkey(struct crypto_ahash *tfm, const u8 *key, static int tegra_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
@@ -1944,7 +2226,7 @@ static struct tegra_se_alg tegra_cmac_algs[] = {
.halg.statesize = sizeof(struct tegra_cmac_reqctx), .halg.statesize = sizeof(struct tegra_cmac_reqctx),
.halg.base = { .halg.base = {
.cra_name = "cmac(aes)", .cra_name = "cmac(aes)",
.cra_driver_name = "tegra-se-cmac", .cra_driver_name = "cmac-aes-tegra",
.cra_priority = 300, .cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_AHASH, .cra_flags = CRYPTO_ALG_TYPE_AHASH,
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
@@ -1970,7 +2252,7 @@ int tegra_init_aes(struct tegra_se *se)
struct skcipher_engine_alg *sk_alg; struct skcipher_engine_alg *sk_alg;
int i, ret; int i, ret;
se->manifest = tegra_aes_kac_manifest; tegra_aes_set_regcfg(se);
for (i = 0; i < ARRAY_SIZE(tegra_aes_algs); i++) { for (i = 0; i < ARRAY_SIZE(tegra_aes_algs); i++) {
sk_alg = &tegra_aes_algs[i].alg.skcipher; sk_alg = &tegra_aes_algs[i].alg.skcipher;
@@ -2033,7 +2315,7 @@ int tegra_init_aes(struct tegra_se *se)
struct skcipher_alg *sk_alg; struct skcipher_alg *sk_alg;
int i, ret; int i, ret;
se->manifest = tegra_aes_kac_manifest; tegra_aes_set_regcfg(se);
for (i = 0; i < ARRAY_SIZE(tegra_aes_algs); i++) { for (i = 0; i < ARRAY_SIZE(tegra_aes_algs); i++) {
sk_alg = &tegra_aes_algs[i].alg.skcipher; sk_alg = &tegra_aes_algs[i].alg.skcipher;

View File

@@ -15,6 +15,7 @@
#include <crypto/sha1.h> #include <crypto/sha1.h>
#include <crypto/sha2.h> #include <crypto/sha2.h>
#include <crypto/sha3.h> #include <crypto/sha3.h>
#include <crypto/sm3.h>
#include <crypto/internal/des.h> #include <crypto/internal/des.h>
#include <crypto/engine.h> #include <crypto/engine.h>
#include <crypto/scatterwalk.h> #include <crypto/scatterwalk.h>
@@ -106,6 +107,11 @@ static int tegra_sha_get_config(u32 alg)
cfg |= SE_SHA_ENC_ALG_SHA; cfg |= SE_SHA_ENC_ALG_SHA;
cfg |= SE_SHA_ENC_MODE_SHA3_512; cfg |= SE_SHA_ENC_MODE_SHA3_512;
break; break;
case SE_ALG_SM3_256:
cfg |= SE_SHA_ENC_ALG_SM3;
cfg |= SE_SHA_ENC_MODE_SM3_256;
break;
default: default:
return -EINVAL; return -EINVAL;
} }
@@ -445,6 +451,9 @@ static int tegra_sha_do_one_req(struct crypto_engine *engine, void *areq)
if (rctx->task & SHA_FINAL) { if (rctx->task & SHA_FINAL) {
ret = tegra_sha_do_final(req); ret = tegra_sha_do_final(req);
rctx->task &= ~SHA_FINAL; rctx->task &= ~SHA_FINAL;
if (rctx->key_id)
tegra_key_invalidate(ctx->se, rctx->key_id, ctx->alg);
} }
crypto_finalize_hash_request(se->engine, req, ret); crypto_finalize_hash_request(se->engine, req, ret);
@@ -543,12 +552,19 @@ static int tegra_sha_init(struct ahash_request *req)
rctx->total_len = 0; rctx->total_len = 0;
rctx->datbuf.size = 0; rctx->datbuf.size = 0;
rctx->residue.size = 0; rctx->residue.size = 0;
rctx->key_id = ctx->key_id; rctx->key_id = 0;
rctx->task = SHA_FIRST; rctx->task = SHA_FIRST;
rctx->alg = ctx->alg; rctx->alg = ctx->alg;
rctx->blk_size = crypto_ahash_blocksize(tfm); rctx->blk_size = crypto_ahash_blocksize(tfm);
rctx->digest.size = crypto_ahash_digestsize(tfm); rctx->digest.size = crypto_ahash_digestsize(tfm);
/* Retrieve the key slot for HMAC */
if (ctx->key_id) {
rctx->key_id = tegra_key_get_idx(ctx->se, ctx->key_id);
if (!rctx->key_id)
return -ENOMEM;
}
rctx->digest.buf = dma_alloc_coherent(se->dev, rctx->digest.size, rctx->digest.buf = dma_alloc_coherent(se->dev, rctx->digest.size,
&rctx->digest.addr, GFP_KERNEL); &rctx->digest.addr, GFP_KERNEL);
if (!rctx->digest.buf) if (!rctx->digest.buf)
@@ -565,6 +581,9 @@ resbuf_fail:
dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf, dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf,
rctx->digest.addr); rctx->digest.addr);
digbuf_fail: digbuf_fail:
if (rctx->key_id != ctx->key_id)
tegra_key_invalidate(ctx->se, rctx->key_id, ctx->alg);
return -ENOMEM; return -ENOMEM;
} }
@@ -1093,6 +1112,42 @@ static struct tegra_se_alg tegra_hash_algs[] = {
} }
}; };
static struct tegra_se_alg tegra_sm3_algs[] = {
{
.alg.ahash = {
#ifdef NV_CONFTEST_REMOVE_STRUCT_CRYPTO_ENGINE_CTX
.base = {
#endif
.init = tegra_sha_init,
.update = tegra_sha_update,
.final = tegra_sha_final,
.finup = tegra_sha_finup,
.digest = tegra_sha_digest,
.export = tegra_sha_export,
.import = tegra_sha_import,
.halg.digestsize = SM3_DIGEST_SIZE,
.halg.statesize = sizeof(struct tegra_sha_reqctx),
.halg.base = {
.cra_name = "sm3",
.cra_driver_name = "tegra-se-sm3",
.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_AHASH,
.cra_blocksize = SM3_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct tegra_sha_ctx),
.cra_alignmask = 0,
.cra_module = THIS_MODULE,
.cra_init = tegra_sha_cra_init,
.cra_exit = tegra_sha_cra_exit,
}
#ifdef NV_CONFTEST_REMOVE_STRUCT_CRYPTO_ENGINE_CTX
},
.op.do_one_request = tegra_sha_do_one_req,
#endif
}
},
};
static int tegra_hash_kac_manifest(u32 user, u32 alg, u32 keylen) static int tegra_hash_kac_manifest(u32 user, u32 alg, u32 keylen)
{ {
int manifest; int manifest;
@@ -1126,6 +1181,57 @@ static int tegra_hash_kac_manifest(u32 user, u32 alg, u32 keylen)
return manifest; return manifest;
} }
static int tegra_hash_kac2_manifest(u32 user, u32 alg, u32 keylen)
{
int manifest;
manifest = SE_KAC2_USER(user) | SE_KAC2_ORIGIN_SW;
manifest |= SE_KAC2_DECRYPT_EN | SE_KAC2_ENCRYPT_EN;
manifest |= SE_KAC2_SUBTYPE_SHA | SE_KAC2_TYPE_SYM;
switch (alg) {
case SE_ALG_HMAC_SHA224:
case SE_ALG_HMAC_SHA256:
case SE_ALG_HMAC_SHA384:
case SE_ALG_HMAC_SHA512:
manifest |= SE_KAC2_HMAC;
break;
default:
return -EINVAL;
}
switch (keylen) {
case AES_KEYSIZE_128:
manifest |= SE_KAC2_SIZE_128;
break;
case AES_KEYSIZE_192:
manifest |= SE_KAC2_SIZE_192;
break;
case AES_KEYSIZE_256:
default:
manifest |= SE_KAC2_SIZE_256;
break;
}
return manifest;
}
struct tegra_se_regcfg tegra234_hash_regcfg = {
.manifest = tegra_hash_kac_manifest,
};
struct tegra_se_regcfg tegra264_hash_regcfg = {
.manifest = tegra_hash_kac2_manifest,
};
static void tegra_hash_set_regcfg(struct tegra_se *se)
{
if (se->hw->kac_ver > 1)
se->regcfg = &tegra264_hash_regcfg;
else
se->regcfg = &tegra234_hash_regcfg;
}
int tegra_init_hash(struct tegra_se *se) int tegra_init_hash(struct tegra_se *se)
{ {
#ifdef NV_CONFTEST_REMOVE_STRUCT_CRYPTO_ENGINE_CTX #ifdef NV_CONFTEST_REMOVE_STRUCT_CRYPTO_ENGINE_CTX
@@ -1135,7 +1241,7 @@ int tegra_init_hash(struct tegra_se *se)
#endif #endif
int i, ret; int i, ret;
se->manifest = tegra_hash_kac_manifest; tegra_hash_set_regcfg(se);
for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++) { for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++) {
tegra_hash_algs[i].se_dev = se; tegra_hash_algs[i].se_dev = se;
@@ -1154,8 +1260,33 @@ int tegra_init_hash(struct tegra_se *se)
} }
} }
if (!se->hw->support_sm_alg)
return 0;
for (i = 0; i < ARRAY_SIZE(tegra_sm3_algs); i++) {
tegra_sm3_algs[i].se_dev = se;
alg = &tegra_sm3_algs[i].alg.ahash;
ret = CRYPTO_REGISTER(ahash, alg);
if (ret) {
#ifdef NV_CONFTEST_REMOVE_STRUCT_CRYPTO_ENGINE_CTX
dev_err(se->dev, "failed to register %s\n",
alg->base.halg.base.cra_name);
#else
dev_err(se->dev, "failed to register %s\n",
alg->halg.base.cra_name);
#endif
goto sm3_err;
}
}
dev_info(se->dev, "registered HASH algorithms\n");
return 0; return 0;
sm3_err:
for (--i; i >= 0; i--)
CRYPTO_REGISTER(ahash, &tegra_sm3_algs[i].alg.ahash);
i = ARRAY_SIZE(tegra_hash_algs);
sha_err: sha_err:
for (--i; i >= 0; i--) for (--i; i >= 0; i--)
CRYPTO_UNREGISTER(ahash, &tegra_hash_algs[i].alg.ahash); CRYPTO_UNREGISTER(ahash, &tegra_hash_algs[i].alg.ahash);
@@ -1169,4 +1300,10 @@ void tegra_deinit_hash(struct tegra_se *se)
for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++) for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++)
CRYPTO_UNREGISTER(ahash, &tegra_hash_algs[i].alg.ahash); CRYPTO_UNREGISTER(ahash, &tegra_hash_algs[i].alg.ahash);
if (!se->hw->support_sm_alg)
return;
for (i = 0; i < ARRAY_SIZE(tegra_sm3_algs); i++)
CRYPTO_UNREGISTER(ahash, &tegra_sm3_algs[i].alg.ahash);
} }

View File

@@ -1,15 +1,239 @@
// SPDX-License-Identifier: GPL-2.0-only // SPDX-License-Identifier: GPL-2.0-only
// SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. // SPDX-FileCopyrightText: Copyright (c) 2023-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
/*
* Crypto driver for NVIDIA Security Engine for block cipher operations.
*/
#include <linux/bitfield.h>
#include <linux/iopoll.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/mod_devicetable.h>
#include <linux/platform_device.h>
/* Dummy implementation for module */ #include "tegra-se.h"
static int __init tegra_se_kds_dummy_init(void)
#define KDS_ALLOC_MUTEX 0
#define KDS_ALLOC_RGN_ATTR0 0x4
#define KDS_ALLOC_RGN_ATTR1 0x8
#define KDS_ALLOC_OP_TRIG 0x18
#define KDS_ALLOC_OP_STATUS 0x1c
#define KDS_MUTEX_MST_ID(x) FIELD_PREP(GENMASK(13, 8), x)
#define KDS_MUTEX_BUSY BIT(4)
#define KDS_MUTEX_OP(x) FIELD_PREP(BIT(0), x)
#define KDS_MUTEX_REQ KDS_MUTEX_OP(1)
#define KDS_MUTEX_RELEASE KDS_MUTEX_OP(0)
#define KDS_RGN_ATTR_OWNER(x) FIELD_PREP(GENMASK(31, 24), x)
#define KDS_RGN_ATTR_TYPE(x) FIELD_PREP(GENMASK(21, 20), x)
#define KDS_RGN_ATTR_TYPE_NORMAL KDS_RGN_ATTR_TYPE(0)
#define KDS_RGN_ATTR_MAX_KSIZE_256 FIELD_PREP(GENMASK(17, 16), 1)
#define KDS_RGN_ATTR_NUM_KEYS(x) FIELD_PREP(GENMASK(15, 0), x)
#define KDS_ALLOC_OP_STATUS_FIELD(x) FIELD_PREP(GENMASK(1, 0), x)
#define KDS_ALLOC_OP_IDLE KDS_ALLOC_OP_STATUS_FIELD(0)
#define KDS_ALLOC_OP_BUSY KDS_ALLOC_OP_STATUS_FIELD(1)
#define KDS_ALLOC_OP_PASS KDS_ALLOC_OP_STATUS_FIELD(2)
#define KDS_ALLOC_OP_FAIL KDS_ALLOC_OP_STATUS_FIELD(3)
#define KDS_ALLOC_RGN_ID_MASK GENMASK(14, 4)
#define SE_KSLT_KEY_ID_MASK GENMASK(15, 0)
#define SE_KSLT_REGION_ID_MASK GENMASK(25, 16)
#define SE_KSLT_TABLE_ID_MASK GENMASK(31, 26)
#define SE_KSLT_TABLE_ID(x) FIELD_PREP(SE_KSLT_TABLE_ID_MASK, x)
#define SE_KSLT_TABLE_ID_GLOBAL SE_KSLT_TABLE_ID(48)
#define KDS_TIMEOUT 100000 /* 100 msec */
#define KDS_MAX_KEYID 63
#define KDS_ID_VALID_MASK GENMASK(KDS_MAX_KEYID, 0)
#define TEGRA_GPSE 3
static u32 kds_region_id;
static u64 kds_keyid = BIT(0);
struct tegra_kds {
struct device *dev;
void __iomem *base;
u32 owner;
u32 id;
};
static u16 tegra_kds_keyid_alloc(void)
{
u16 keyid;
/* Check if all key slots are full */
if (kds_keyid == GENMASK(KDS_MAX_KEYID, 0))
return 0;
keyid = ffz(kds_keyid);
kds_keyid |= BIT(keyid);
return keyid;
}
static void tegra_kds_keyid_free(u32 id)
{
kds_keyid &= ~(BIT(id));
}
static inline void kds_writel(struct tegra_kds *kds, unsigned int offset,
unsigned int val)
{
writel_relaxed(val, kds->base + offset);
}
static inline u32 kds_readl(struct tegra_kds *kds, unsigned int offset)
{
return readl_relaxed(kds->base + offset);
}
static int kds_mutex_lock(struct tegra_kds *kds)
{
u32 val;
int ret;
ret = readl_relaxed_poll_timeout(kds->base + KDS_ALLOC_MUTEX,
val, !(val & KDS_MUTEX_BUSY),
10, KDS_TIMEOUT);
if (ret)
return ret;
val = KDS_MUTEX_MST_ID(TEGRA_GPSE) |
KDS_MUTEX_REQ;
kds_writel(kds, KDS_ALLOC_MUTEX, val);
return 0;
}
static void kds_mutex_unlock(struct tegra_kds *kds)
{
u32 val;
val = KDS_MUTEX_MST_ID(TEGRA_GPSE) |
KDS_MUTEX_RELEASE;
kds_writel(kds, KDS_ALLOC_MUTEX, val);
}
static int tegra_kds_region_setup(struct tegra_kds *kds)
{
u32 val, region_attr;
int ret;
region_attr = KDS_RGN_ATTR_OWNER(TEGRA_GPSE) |
KDS_RGN_ATTR_TYPE_NORMAL |
KDS_RGN_ATTR_MAX_KSIZE_256 |
KDS_RGN_ATTR_NUM_KEYS(64);
ret = kds_mutex_lock(kds);
if (ret)
return ret;
kds_writel(kds, KDS_ALLOC_RGN_ATTR0, region_attr);
kds_writel(kds, KDS_ALLOC_RGN_ATTR1, BIT(TEGRA_GPSE));
kds_writel(kds, KDS_ALLOC_OP_TRIG, 1);
ret = readl_relaxed_poll_timeout(kds->base + KDS_ALLOC_OP_STATUS,
val, !(val & KDS_ALLOC_OP_BUSY), 10, KDS_TIMEOUT);
if (ret) {
dev_err(kds->dev, "Region allocation timed out val\n");
goto out;
}
if (KDS_ALLOC_OP_STATUS_FIELD(val) == KDS_ALLOC_OP_FAIL) {
dev_err(kds->dev, "Region allocation failed\n");
ret = -EINVAL;
goto out;
}
kds->id = FIELD_GET(KDS_ALLOC_RGN_ID_MASK, val);
kds_region_id = kds->id;
dev_info(kds->dev, "Allocated Global Key ID table with ID %#x\n", kds->id);
out:
kds_mutex_unlock(kds);
return ret;
}
bool tegra_key_in_kds(u32 keyid)
{
if (!((keyid & SE_KSLT_TABLE_ID_MASK) == SE_KSLT_TABLE_ID_GLOBAL))
return false;
return ((BIT(keyid & SE_KSLT_KEY_ID_MASK) & KDS_ID_VALID_MASK) &&
(BIT(keyid & SE_KSLT_KEY_ID_MASK) & kds_keyid));
}
EXPORT_SYMBOL(tegra_key_in_kds);
u32 tegra_kds_get_id(void)
{
u32 kds_id, keyid;
keyid = tegra_kds_keyid_alloc();
if (!keyid)
return -ENOMEM;
kds_id = SE_KSLT_TABLE_ID_GLOBAL |
FIELD_PREP(SE_KSLT_REGION_ID_MASK, kds_region_id) |
FIELD_PREP(SE_KSLT_KEY_ID_MASK, keyid);
return kds_id;
}
EXPORT_SYMBOL(tegra_kds_get_id);
void tegra_kds_free_id(u32 keyid)
{
tegra_kds_keyid_free(keyid & 0xff);
}
EXPORT_SYMBOL(tegra_kds_free_id);
static int tegra_kds_probe(struct platform_device *pdev)
{
struct tegra_kds *kds;
kds = devm_kzalloc(&pdev->dev, sizeof(struct tegra_kds), GFP_KERNEL);
if (!kds)
return -ENOMEM;
kds->dev = &pdev->dev;
kds->base = devm_platform_ioremap_resource(pdev, 0);
if (IS_ERR(kds->base))
return PTR_ERR(kds->base);
return tegra_kds_region_setup(kds);
}
static int tegra_kds_remove(struct platform_device *pdev)
{ {
return 0; return 0;
} }
device_initcall(tegra_se_kds_dummy_init);
MODULE_AUTHOR("Laxman Dewangan <ldewangan@nvidia.com>"); static const struct of_device_id tegra_kds_of_match[] = {
MODULE_DESCRIPTION("Dummy Tegra SE KDS driver"); {
.compatible = "nvidia,tegra264-kds",
},
{ },
};
MODULE_DEVICE_TABLE(of, tegra_kds_of_match);
static struct platform_driver tegra_kds_driver = {
.driver = {
.name = "tegra-kds",
.of_match_table = tegra_kds_of_match,
},
.probe = tegra_kds_probe,
.remove = tegra_kds_remove,
};
module_platform_driver(tegra_kds_driver);
MODULE_DESCRIPTION("NVIDIA Tegra Key Distribution System Driver");
MODULE_AUTHOR("Akhil R <akhilrajeev@nvidia.com>");
MODULE_LICENSE("GPL"); MODULE_LICENSE("GPL");

View File

@@ -59,7 +59,7 @@ static unsigned int tegra_key_prep_ins_cmd(struct tegra_se *se, u32 *cpuvaddr,
cpuvaddr[i++] = host1x_opcode_setpayload(1); cpuvaddr[i++] = host1x_opcode_setpayload(1);
cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->manifest); cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->manifest);
cpuvaddr[i++] = se->manifest(se->owner, alg, keylen); cpuvaddr[i++] = se->regcfg->manifest(se->owner, alg, keylen);
cpuvaddr[i++] = host1x_opcode_setpayload(1); cpuvaddr[i++] = host1x_opcode_setpayload(1);
cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->key_dst); cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->key_dst);
@@ -91,7 +91,69 @@ static unsigned int tegra_key_prep_ins_cmd(struct tegra_se *se, u32 *cpuvaddr,
host1x_uclass_incr_syncpt_indx_f(se->syncpt_id); host1x_uclass_incr_syncpt_indx_f(se->syncpt_id);
dev_dbg(se->dev, "key-slot %u key-manifest %#x\n", dev_dbg(se->dev, "key-slot %u key-manifest %#x\n",
slot, se->manifest(se->owner, alg, keylen)); slot, se->regcfg->manifest(se->owner, alg, keylen));
return i;
}
static unsigned int tegra_key_prep_mov_cmd(struct tegra_se *se, u32 *cpuvaddr,
u32 src_keyid, u32 tgt_keyid)
{
int i = 0;
cpuvaddr[i++] = host1x_opcode_setpayload(1);
cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->op);
cpuvaddr[i++] = SE_AES_OP_WRSTALL | SE_AES_OP_DUMMY;
cpuvaddr[i++] = host1x_opcode_setpayload(2);
cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->src_kslt);
cpuvaddr[i++] = src_keyid;
cpuvaddr[i++] = tgt_keyid;
cpuvaddr[i++] = host1x_opcode_setpayload(1);
cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->config);
cpuvaddr[i++] = SE_CFG_MOV;
cpuvaddr[i++] = host1x_opcode_setpayload(1);
cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->op);
cpuvaddr[i++] = SE_AES_OP_WRSTALL | SE_AES_OP_START |
SE_AES_OP_LASTBUF;
cpuvaddr[i++] = se_host1x_opcode_nonincr(host1x_uclass_incr_syncpt_r(), 1);
cpuvaddr[i++] = host1x_uclass_incr_syncpt_cond_f(1) |
host1x_uclass_incr_syncpt_indx_f(se->syncpt_id);
dev_dbg(se->dev, "keymov: src keyid %u target keyid %u\n", src_keyid, tgt_keyid);
return i;
}
static unsigned int tegra_key_prep_invld_cmd(struct tegra_se *se, u32 *cpuvaddr, u32 keyid)
{
int i = 0;
cpuvaddr[i++] = host1x_opcode_setpayload(1);
cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->op);
cpuvaddr[i++] = SE_AES_OP_WRSTALL | SE_AES_OP_DUMMY;
cpuvaddr[i++] = host1x_opcode_setpayload(1);
cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->tgt_kslt);
cpuvaddr[i++] = keyid;
cpuvaddr[i++] = host1x_opcode_setpayload(1);
cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->config);
cpuvaddr[i++] = SE_CFG_INVLD;
cpuvaddr[i++] = host1x_opcode_setpayload(1);
cpuvaddr[i++] = se_host1x_opcode_incr_w(se->hw->regs->op);
cpuvaddr[i++] = SE_AES_OP_WRSTALL | SE_AES_OP_START |
SE_AES_OP_LASTBUF;
cpuvaddr[i++] = se_host1x_opcode_nonincr(host1x_uclass_incr_syncpt_r(), 1);
cpuvaddr[i++] = host1x_uclass_incr_syncpt_cond_f(1) |
host1x_uclass_incr_syncpt_indx_f(se->syncpt_id);
dev_dbg(se->dev, "invalidate keyid %u\n", keyid);
return i; return i;
} }
@@ -122,6 +184,64 @@ static int tegra_key_insert(struct tegra_se *se, const u8 *key,
return tegra_se_host1x_submit(se, size); return tegra_se_host1x_submit(se, size);
} }
static int tegra_key_move_to_kds(struct tegra_se *se, u32 slot, u32 kds_id)
{
u32 src_keyid, size;
int ret;
src_keyid = SE_KSLT_REGION_ID_SYM | slot;
size = tegra_key_prep_mov_cmd(se, se->cmdbuf->addr, src_keyid, kds_id);
ret = tegra_se_host1x_submit(se, size);
if (ret)
return ret;
return 0;
}
static unsigned int tegra_kac_get_from_kds(struct tegra_se *se, u32 keyid, u16 slot)
{
u32 tgt_keyid, size;
int ret;
tgt_keyid = SE_KSLT_REGION_ID_SYM | slot;
size = tegra_key_prep_mov_cmd(se, se->cmdbuf->addr, keyid, tgt_keyid);
ret = tegra_se_host1x_submit(se, size);
if (ret)
tegra_keyslot_free(slot);
return ret;
}
static void tegra_key_kds_invalidate(struct tegra_se *se, u32 keyid)
{
unsigned int size;
size = tegra_key_prep_invld_cmd(se, se->cmdbuf->addr, keyid);
tegra_se_host1x_submit(se, size);
tegra_kds_free_id(keyid);
}
unsigned int tegra_key_get_idx(struct tegra_se *se, u32 keyid)
{
u16 slot;
if (tegra_key_in_kslt(keyid))
return keyid;
if (!tegra_key_in_kds(keyid))
return 0;
slot = tegra_keyslot_alloc();
if (!slot)
return 0;
tegra_kac_get_from_kds(se, keyid, slot);
return slot;
}
void tegra_key_invalidate(struct tegra_se *se, u32 keyid, u32 alg) void tegra_key_invalidate(struct tegra_se *se, u32 keyid, u32 alg)
{ {
u8 zkey[AES_MAX_KEY_SIZE] = {0}; u8 zkey[AES_MAX_KEY_SIZE] = {0};
@@ -129,14 +249,17 @@ void tegra_key_invalidate(struct tegra_se *se, u32 keyid, u32 alg)
if (!keyid) if (!keyid)
return; return;
/* Overwrite the key with 0s */ if (tegra_key_in_kds(keyid)) {
tegra_key_insert(se, zkey, AES_MAX_KEY_SIZE, keyid, alg); tegra_key_kds_invalidate(se, keyid);
} else {
tegra_keyslot_free(keyid); tegra_key_insert(se, zkey, AES_MAX_KEY_SIZE, keyid, alg);
tegra_keyslot_free(keyid);
}
} }
int tegra_key_submit(struct tegra_se *se, const u8 *key, u32 keylen, u32 alg, u32 *keyid) int tegra_key_submit(struct tegra_se *se, const u8 *key, u32 keylen, u32 alg, u32 *keyid)
{ {
u32 kds_id, orig_id = *keyid;
int ret; int ret;
/* Use the existing slot if it is already allocated */ /* Use the existing slot if it is already allocated */
@@ -152,5 +275,36 @@ int tegra_key_submit(struct tegra_se *se, const u8 *key, u32 keylen, u32 alg, u3
if (ret) if (ret)
return ret; return ret;
if (!se->hw->support_kds)
return 0;
/*
* Move the key to KDS and free the slot if HW supports.
* The key will have to be brought back to local KSLT for any task.
*/
/* If it is a valid key, invalidate it */
if (tegra_key_in_kds(orig_id))
tegra_key_kds_invalidate(se, orig_id);
kds_id = tegra_kds_get_id();
if (!kds_id) {
/* Not a fatal error. Key can still reside in KSLT */
dev_err(se->dev, "Failed to get KDS slot.The key is in local key slot\n");
return 0;
}
ret = tegra_key_move_to_kds(se, *keyid, kds_id);
if (ret) {
/* Not a fatal error. Key can still reside in KSLT */
dev_err(se->dev, "Failed to move key to KDS. The key is in local key slot\n");
tegra_kds_free_id(kds_id);
return 0;
}
/* Free the local keyslot. */
tegra_key_invalidate(se, *keyid, alg);
*keyid = kds_id;
return 0; return 0;
} }

View File

@@ -334,6 +334,36 @@ static int tegra_se_remove(struct platform_device *pdev)
return 0; return 0;
} }
static const struct tegra_se_regs tegra234_aes0_regs = {
.config = SE_AES0_CFG,
.op = SE_AES0_OPERATION,
.last_blk = SE_AES0_LAST_BLOCK,
.linear_ctr = SE_AES0_LINEAR_CTR,
.aad_len = SE_AES0_AAD_LEN,
.cryp_msg_len = SE_AES0_CRYPTO_MSG_LEN,
.manifest = SE_AES0_KEYMANIFEST,
.key_addr = SE_AES0_KEY_ADDR,
.key_data = SE_AES0_KEY_DATA,
.key_dst = SE_AES0_KEY_DST,
.result = SE_AES0_CMAC_RESULT,
};
static const struct tegra_se_regs tegra264_aes0_regs = {
.config = SE_AES0_CFG,
.op = SE_AES0_OPERATION,
.last_blk = SE_AES0_LAST_BLOCK,
.linear_ctr = SE_AES0_LINEAR_CTR,
.aad_len = SE_AES0_AAD_LEN,
.cryp_msg_len = SE_AES0_CRYPTO_MSG_LEN,
.manifest = SE_AES0_KAC2_KEYMANIFEST,
.key_addr = SE_AES0_KEY_ADDR,
.key_data = SE_AES0_KEY_DATA,
.key_dst = SE_AES0_KEY_DST,
.src_kslt = SE_AES0_SRC_KSLT,
.tgt_kslt = SE_AES0_TGT_KSLT,
.result = SE_AES0_CMAC_RESULT,
};
static const struct tegra_se_regs tegra234_aes1_regs = { static const struct tegra_se_regs tegra234_aes1_regs = {
.config = SE_AES1_CFG, .config = SE_AES1_CFG,
.op = SE_AES1_OPERATION, .op = SE_AES1_OPERATION,
@@ -348,6 +378,22 @@ static const struct tegra_se_regs tegra234_aes1_regs = {
.result = SE_AES1_CMAC_RESULT, .result = SE_AES1_CMAC_RESULT,
}; };
static const struct tegra_se_regs tegra264_aes1_regs = {
.config = SE_AES1_CFG,
.op = SE_AES1_OPERATION,
.last_blk = SE_AES1_LAST_BLOCK,
.linear_ctr = SE_AES1_LINEAR_CTR,
.aad_len = SE_AES1_AAD_LEN,
.cryp_msg_len = SE_AES1_CRYPTO_MSG_LEN,
.manifest = SE_AES1_KAC2_KEYMANIFEST,
.key_addr = SE_AES1_KEY_ADDR,
.key_data = SE_AES1_KEY_DATA,
.key_dst = SE_AES1_KEY_DST,
.src_kslt = SE_AES1_SRC_KSLT,
.tgt_kslt = SE_AES1_TGT_KSLT,
.result = SE_AES1_CMAC_RESULT,
};
static const struct tegra_se_regs tegra234_hash_regs = { static const struct tegra_se_regs tegra234_hash_regs = {
.config = SE_SHA_CFG, .config = SE_SHA_CFG,
.op = SE_SHA_OPERATION, .op = SE_SHA_OPERATION,
@@ -358,6 +404,18 @@ static const struct tegra_se_regs tegra234_hash_regs = {
.result = SE_SHA_HASH_RESULT, .result = SE_SHA_HASH_RESULT,
}; };
static const struct tegra_se_regs tegra264_hash_regs = {
.config = SE_SHA_CFG,
.op = SE_SHA_OPERATION,
.manifest = SE_SHA_KAC2_KEYMANIFEST,
.key_addr = SE_SHA_KEY_ADDR,
.key_data = SE_SHA_KEY_DATA,
.key_dst = SE_SHA_KEY_DST,
.src_kslt = SE_SHA_SRC_KSLT,
.tgt_kslt = SE_SHA_TGT_KSLT,
.result = SE_SHA_HASH_RESULT,
};
static const struct tegra_se_hw tegra234_aes_hw = { static const struct tegra_se_hw tegra234_aes_hw = {
.regs = &tegra234_aes1_regs, .regs = &tegra234_aes1_regs,
.kac_ver = 1, .kac_ver = 1,
@@ -366,6 +424,16 @@ static const struct tegra_se_hw tegra234_aes_hw = {
.deinit_alg = tegra_deinit_aes, .deinit_alg = tegra_deinit_aes,
}; };
const struct tegra_se_hw tegra264_aes_hw = {
.regs = &tegra264_aes1_regs,
.kac_ver = 2,
.support_sm_alg = true,
.support_kds = false, // FIXME: Bug 4663009
.host1x_class = 0x3b,
.init_alg = tegra_init_aes,
.deinit_alg = tegra_deinit_aes,
};
static const struct tegra_se_hw tegra234_hash_hw = { static const struct tegra_se_hw tegra234_hash_hw = {
.regs = &tegra234_hash_regs, .regs = &tegra234_hash_regs,
.kac_ver = 1, .kac_ver = 1,
@@ -374,6 +442,27 @@ static const struct tegra_se_hw tegra234_hash_hw = {
.deinit_alg = tegra_deinit_hash, .deinit_alg = tegra_deinit_hash,
}; };
static const struct tegra_se_hw tegra264_hash_hw = {
.regs = &tegra264_hash_regs,
.kac_ver = 2,
.support_sm_alg = true,
.support_kds = false, // FIXME: Bug 4663009
.host1x_class = 0x3d,
.init_alg = tegra_init_hash,
.deinit_alg = tegra_deinit_hash,
};
static const struct tegra_se_hw tegra264_sm4_hw = {
.regs = &tegra264_aes0_regs,
.kac_ver = 2,
.host1x_class = 0x3a,
.support_kds = false, // FIXME: Bug 4663009
.support_aad_verify = true,
.support_sm_alg = true,
.init_alg = tegra_init_sm4,
.deinit_alg = tegra_deinit_sm4,
};
static const struct of_device_id tegra_se_of_match[] = { static const struct of_device_id tegra_se_of_match[] = {
{ {
.compatible = "nvidia,tegra234-se-aes", .compatible = "nvidia,tegra234-se-aes",
@@ -381,6 +470,15 @@ static const struct of_device_id tegra_se_of_match[] = {
}, { }, {
.compatible = "nvidia,tegra234-se-hash", .compatible = "nvidia,tegra234-se-hash",
.data = &tegra234_hash_hw, .data = &tegra234_hash_hw,
}, {
.compatible = "nvidia,tegra264-se-aes",
.data = &tegra264_aes_hw
}, {
.compatible = "nvidia,tegra264-se-hash",
.data = &tegra264_hash_hw,
}, {
.compatible = "nvidia,tegra264-se-sm4",
.data = &tegra264_sm4_hw
}, },
{ }, { },
}; };

View File

File diff suppressed because it is too large Load Diff

View File

@@ -29,6 +29,7 @@
#define SE_SHA_KEY_ADDR 0x4094 #define SE_SHA_KEY_ADDR 0x4094
#define SE_SHA_KEY_DATA 0x4098 #define SE_SHA_KEY_DATA 0x4098
#define SE_SHA_KEYMANIFEST 0x409c #define SE_SHA_KEYMANIFEST 0x409c
#define SE_SHA_KAC2_KEYMANIFEST 0x4178
#define SE_SHA_CRYPTO_CFG 0x40a4 #define SE_SHA_CRYPTO_CFG 0x40a4
#define SE_SHA_KEY_DST 0x40a8 #define SE_SHA_KEY_DST 0x40a8
#define SE_SHA_SRC_KSLT 0x4180 #define SE_SHA_SRC_KSLT 0x4180
@@ -102,6 +103,7 @@
#define SE_AES0_CMAC_RESULT 0x10c4 #define SE_AES0_CMAC_RESULT 0x10c4
#define SE_AES0_SRC_KSLT 0x1100 #define SE_AES0_SRC_KSLT 0x1100
#define SE_AES0_TGT_KSLT 0x1104 #define SE_AES0_TGT_KSLT 0x1104
#define SE_AES0_KAC2_KEYMANIFEST 0x1108
#define SE_AES0_KEYMANIFEST 0x1114 #define SE_AES0_KEYMANIFEST 0x1114
#define SE_AES0_AAD_LEN 0x112c #define SE_AES0_AAD_LEN 0x112c
#define SE_AES0_CRYPTO_MSG_LEN 0x1134 #define SE_AES0_CRYPTO_MSG_LEN 0x1134
@@ -117,35 +119,66 @@
#define SE_AES1_CMAC_RESULT 0x20c4 #define SE_AES1_CMAC_RESULT 0x20c4
#define SE_AES1_SRC_KSLT 0x2100 #define SE_AES1_SRC_KSLT 0x2100
#define SE_AES1_TGT_KSLT 0x2104 #define SE_AES1_TGT_KSLT 0x2104
#define SE_AES1_KAC2_KEYMANIFEST 0x2108
#define SE_AES1_KEYMANIFEST 0x2114 #define SE_AES1_KEYMANIFEST 0x2114
#define SE_AES1_AAD_LEN 0x212c #define SE_AES1_AAD_LEN 0x212c
#define SE_AES1_CRYPTO_MSG_LEN 0x2134 #define SE_AES1_CRYPTO_MSG_LEN 0x2134
#define SE_AES_CFG_ENC_MODE(x) FIELD_PREP(GENMASK(31, 24), x) #define SE_AES_CFG_ENC_MODE(x) FIELD_PREP(GENMASK(31, 24), x)
#define SE_AES_ENC_MODE_ECB SE_AES_CFG_ENC_MODE(0)
#define SE_AES_ENC_MODE_CBC SE_AES_CFG_ENC_MODE(1)
#define SE_AES_ENC_MODE_OFB SE_AES_CFG_ENC_MODE(2)
#define SE_AES_ENC_MODE_GMAC SE_AES_CFG_ENC_MODE(3) #define SE_AES_ENC_MODE_GMAC SE_AES_CFG_ENC_MODE(3)
#define SE_AES_ENC_MODE_GCM SE_AES_CFG_ENC_MODE(4) #define SE_AES_ENC_MODE_GCM SE_AES_CFG_ENC_MODE(4)
#define SE_AES_ENC_MODE_GCM_FINAL SE_AES_CFG_ENC_MODE(5) #define SE_AES_ENC_MODE_GCM_FINAL SE_AES_CFG_ENC_MODE(5)
#define SE_AES_ENC_MODE_KW SE_AES_CFG_ENC_MODE(6)
#define SE_AES_ENC_MODE_CMAC SE_AES_CFG_ENC_MODE(7) #define SE_AES_ENC_MODE_CMAC SE_AES_CFG_ENC_MODE(7)
#define SE_AES_ENC_MODE_CTR SE_AES_CFG_ENC_MODE(10)
#define SE_AES_ENC_MODE_XTS SE_AES_CFG_ENC_MODE(11)
#define SE_AES_ENC_MODE_CBC_MAC SE_AES_CFG_ENC_MODE(12) #define SE_AES_ENC_MODE_CBC_MAC SE_AES_CFG_ENC_MODE(12)
#define SE_AES_ENC_MODE_AESKW_CRYPT SE_AES_CFG_ENC_MODE(13)
#define SE_AES_ENC_MODE_CTR_DRBG_INSTANTIATE_DF SE_AES_CFG_ENC_MODE(14)
#define SE_AES_ENC_MODE_CTR_DRBG_GENKEY SE_AES_CFG_ENC_MODE(15)
#define SE_AES_ENC_MODE_CTR_DRBG_GENRND SE_AES_CFG_ENC_MODE(16)
#define SE_AES_ENC_MODE_KDF_CMAC_AES SE_AES_CFG_ENC_MODE(20)
#define SE_AES_ENC_MODE_GCM2 SE_AES_CFG_ENC_MODE(21)
#define SE_AES_CFG_DEC_MODE(x) FIELD_PREP(GENMASK(23, 16), x) #define SE_AES_CFG_DEC_MODE(x) FIELD_PREP(GENMASK(23, 16), x)
#define SE_AES_DEC_MODE_ECB SE_AES_CFG_DEC_MODE(0)
#define SE_AES_DEC_MODE_CBC SE_AES_CFG_DEC_MODE(1)
#define SE_AES_DEC_MODE_OFB SE_AES_CFG_DEC_MODE(2)
#define SE_AES_DEC_MODE_GMAC SE_AES_CFG_DEC_MODE(3) #define SE_AES_DEC_MODE_GMAC SE_AES_CFG_DEC_MODE(3)
#define SE_AES_DEC_MODE_GCM SE_AES_CFG_DEC_MODE(4) #define SE_AES_DEC_MODE_GCM SE_AES_CFG_DEC_MODE(4)
#define SE_AES_DEC_MODE_GCM_FINAL SE_AES_CFG_DEC_MODE(5) #define SE_AES_DEC_MODE_GCM_FINAL SE_AES_CFG_DEC_MODE(5)
#define SE_AES_DEC_MODE_KW SE_AES_CFG_DEC_MODE(6)
#define SE_AES_DEC_MODE_CMAC SE_AES_CFG_DEC_MODE(7)
#define SE_AES_DEC_MODE_CMAC_VERIFY SE_AES_CFG_DEC_MODE(8)
#define SE_AES_DEC_MODE_GCM_VERIFY SE_AES_CFG_DEC_MODE(9)
#define SE_AES_DEC_MODE_CTR SE_AES_CFG_DEC_MODE(10)
#define SE_AES_DEC_MODE_XTS SE_AES_CFG_DEC_MODE(11)
#define SE_AES_DEC_MODE_CBC_MAC SE_AES_CFG_DEC_MODE(12) #define SE_AES_DEC_MODE_CBC_MAC SE_AES_CFG_DEC_MODE(12)
#define SE_AES_DEC_MODE_AESKW_CRYPT SE_AES_CFG_DEC_MODE(13)
#define SE_AES_DEC_MODE_GCM2 SE_AES_CFG_DEC_MODE(21)
#define SE_AES_CFG_ENC_ALG(x) FIELD_PREP(GENMASK(15, 12), x) #define SE_AES_CFG_ENC_ALG(x) FIELD_PREP(GENMASK(15, 12), x)
#define SE_AES_ENC_ALG_NOP SE_AES_CFG_ENC_ALG(0) #define SE_AES_ENC_ALG_NOP SE_AES_CFG_ENC_ALG(0)
#define SE_AES_ENC_ALG_AES_ENC SE_AES_CFG_ENC_ALG(1) #define SE_AES_ENC_ALG_AES_ENC SE_AES_CFG_ENC_ALG(1)
#define SE_AES_ENC_ALG_RNG SE_AES_CFG_ENC_ALG(2) #define SE_AES_ENC_ALG_RNG SE_AES_CFG_ENC_ALG(2)
#define SE_AES_ENC_ALG_SHA SE_AES_CFG_ENC_ALG(3) #define SE_AES_ENC_ALG_SHA SE_AES_CFG_ENC_ALG(3)
#define SE_AES_ENC_ALG_SM4_ENC SE_AES_CFG_ENC_ALG(5)
#define SE_AES_ENC_ALG_HMAC SE_AES_CFG_ENC_ALG(7) #define SE_AES_ENC_ALG_HMAC SE_AES_CFG_ENC_ALG(7)
#define SE_AES_ENC_ALG_KDF SE_AES_CFG_ENC_ALG(8) #define SE_AES_ENC_ALG_KDF SE_AES_CFG_ENC_ALG(8)
#define SE_AES_ENC_ALG_KEY_INVLD SE_AES_CFG_ENC_ALG(10)
#define SE_AES_ENC_ALG_KEY_MOV SE_AES_CFG_ENC_ALG(11)
#define SE_AES_ENC_ALG_KEY_INQUIRE SE_AES_CFG_ENC_ALG(12)
#define SE_AES_ENC_ALG_INS SE_AES_CFG_ENC_ALG(13) #define SE_AES_ENC_ALG_INS SE_AES_CFG_ENC_ALG(13)
#define SE_AES_ENC_ALG_CLONE SE_AES_CFG_ENC_ALG(14)
#define SE_AES_ENC_ALG_LOCK SE_AES_CFG_ENC_ALG(15)
#define SE_AES_CFG_DEC_ALG(x) FIELD_PREP(GENMASK(11, 8), x) #define SE_AES_CFG_DEC_ALG(x) FIELD_PREP(GENMASK(11, 8), x)
#define SE_AES_DEC_ALG_NOP SE_AES_CFG_DEC_ALG(0) #define SE_AES_DEC_ALG_NOP SE_AES_CFG_DEC_ALG(0)
#define SE_AES_DEC_ALG_AES_DEC SE_AES_CFG_DEC_ALG(1) #define SE_AES_DEC_ALG_AES_DEC SE_AES_CFG_DEC_ALG(1)
#define SE_AES_DEC_ALG_SM4_DEC SE_AES_CFG_DEC_ALG(5)
#define SE_AES_CFG_DST(x) FIELD_PREP(GENMASK(4, 2), x) #define SE_AES_CFG_DST(x) FIELD_PREP(GENMASK(4, 2), x)
#define SE_AES_DST_MEMORY SE_AES_CFG_DST(0) #define SE_AES_DST_MEMORY SE_AES_CFG_DST(0)
@@ -232,6 +265,56 @@
#define SE_KAC_USER_NS FIELD_PREP(GENMASK(6, 4), 3) #define SE_KAC_USER_NS FIELD_PREP(GENMASK(6, 4), 3)
#define SE_KAC2_USER(x) FIELD_PREP(GENMASK(29, 22), x)
#define SE_KAC2_USER_GPSE SE_KAC2_USER(3)
#define SE_KAC2_USER_GCSE SE_KAC2_USER(5)
#define SE_KAC2_CLONEABLE FIELD_PREP(BIT(21), 1)
#define SE_KAC2_EXPORTABLE FIELD_PREP(BIT(20), 1)
#define SE_KAC2_DECRYPT_EN FIELD_PREP(BIT(19), 1)
#define SE_KAC2_ENCRYPT_EN FIELD_PREP(BIT(18), 1)
#define SE_KAC2_PURPOSE(x) FIELD_PREP(GENMASK(17, 12), x)
#define SE_KAC2_ENC SE_KAC2_PURPOSE(0)
#define SE_KAC2_CMAC SE_KAC2_PURPOSE(1)
#define SE_KAC2_HMAC SE_KAC2_PURPOSE(2)
#define SE_KAC2_GCM_KW SE_KAC2_PURPOSE(3)
#define SE_KAC2_HMAC_KDK SE_KAC2_PURPOSE(6)
#define SE_KAC2_HMAC_KDD SE_KAC2_PURPOSE(7)
#define SE_KAC2_HMAC_KDD_KUW SE_KAC2_PURPOSE(8)
#define SE_KAC2_XTS SE_KAC2_PURPOSE(9)
#define SE_KAC2_GCM SE_KAC2_PURPOSE(10)
#define SE_KAC2_CMAC_KDK SE_KAC2_PURPOSE(12)
#define SE_KAC2_AES_KW SE_KAC2_PURPOSE(13)
#define SE_KAC2_CTR_DRBG_ENT SE_KAC2_PURPOSE(14)
#define SE_KAC2_CTR_DRBG_KV SE_KAC2_PURPOSE(15)
#define SE_KAC2_GCM_HWIV SE_KAC2_PURPOSE(16)
#define SE_KAC2_HARDEN_HMAC_KDK SE_KAC2_PURPOSE(17)
#define SE_KAC2_HARDEN_HMAC_KDD SE_KAC2_PURPOSE(18)
#define SE_KAC2_SIZE(x) FIELD_PREP(GENMASK(11, 8), x)
#define SE_KAC2_SIZE_128 SE_KAC2_SIZE(0)
#define SE_KAC2_SIZE_192 SE_KAC2_SIZE(1)
#define SE_KAC2_SIZE_256 SE_KAC2_SIZE(2)
#define SE_KAC2_ORIGIN_SW FIELD_PREP(BIT(7), 1)
#define SE_KAC2_SUBTYPE(x) FIELD_PREP(GENMASK(6, 3), x)
#define SE_KAC2_SUBTYPE_AES SE_KAC2_SUBTYPE(0)
#define SE_KAC2_SUBTYPE_SHA SE_KAC2_SUBTYPE(0)
#define SE_KAC2_SUBTYPE_SM4 SE_KAC2_SUBTYPE(1)
#define SE_KAC2_SUBTYPE_SM3 SE_KAC2_SUBTYPE(1)
#define SE_KAC2_TYPE(x) FIELD_PREP(GENMASK(2, 0), x)
#define SE_KAC2_TYPE_SYM SE_KAC2_TYPE(2)
#define SE_KSLT_TABLE_ID_MASK GENMASK(31, 26)
#define SE_KSLT_TABLE_ID(x) FIELD_PREP(SE_KSLT_TABLE_ID_MASK, x)
#define SE_KSLT_TABLE_ID_GLOBAL SE_KSLT_TABLE_ID(48)
#define SE_KSLT_REGION_ID(x) FIELD_PREP(GENMASK(25, 16), x)
#define SE_KSLT_REGION_ID_SYM SE_KSLT_REGION_ID(2)
#define SE_AES_KEY_DST_INDEX(x) FIELD_PREP(GENMASK(11, 8), x) #define SE_AES_KEY_DST_INDEX(x) FIELD_PREP(GENMASK(11, 8), x)
#define SE_ADDR_HI_MSB(x) FIELD_PREP(GENMASK(31, 24), x) #define SE_ADDR_HI_MSB(x) FIELD_PREP(GENMASK(31, 24), x)
#define SE_ADDR_HI_SZ(x) FIELD_PREP(GENMASK(23, 0), x) #define SE_ADDR_HI_SZ(x) FIELD_PREP(GENMASK(23, 0), x)
@@ -245,12 +328,10 @@
SE_AES_DST_MEMORY) SE_AES_DST_MEMORY)
#define SE_CFG_GMAC_ENCRYPT (SE_AES_ENC_ALG_AES_ENC | \ #define SE_CFG_GMAC_ENCRYPT (SE_AES_ENC_ALG_AES_ENC | \
SE_AES_DEC_ALG_NOP | \
SE_AES_ENC_MODE_GMAC | \ SE_AES_ENC_MODE_GMAC | \
SE_AES_DST_MEMORY) SE_AES_DST_MEMORY)
#define SE_CFG_GMAC_DECRYPT (SE_AES_ENC_ALG_NOP | \ #define SE_CFG_GMAC_DECRYPT (SE_AES_DEC_ALG_AES_DEC | \
SE_AES_DEC_ALG_AES_DEC | \
SE_AES_DEC_MODE_GMAC | \ SE_AES_DEC_MODE_GMAC | \
SE_AES_DST_MEMORY) SE_AES_DST_MEMORY)
@@ -259,31 +340,136 @@
SE_AES_ENC_MODE_GCM | \ SE_AES_ENC_MODE_GCM | \
SE_AES_DST_MEMORY) SE_AES_DST_MEMORY)
#define SE_CFG_GCM_DECRYPT (SE_AES_ENC_ALG_NOP | \ #define SE_CFG_GCM_DECRYPT (SE_AES_DEC_ALG_AES_DEC | \
SE_AES_DEC_ALG_AES_DEC | \
SE_AES_DEC_MODE_GCM | \ SE_AES_DEC_MODE_GCM | \
SE_AES_DST_MEMORY) SE_AES_DST_MEMORY)
#define SE_CFG_GCM_FINAL_ENCRYPT (SE_AES_ENC_ALG_AES_ENC | \ #define SE_CFG_GCM_FINAL_ENCRYPT (SE_AES_ENC_ALG_AES_ENC | \
SE_AES_DEC_ALG_NOP | \
SE_AES_ENC_MODE_GCM_FINAL | \ SE_AES_ENC_MODE_GCM_FINAL | \
SE_AES_DST_MEMORY) SE_AES_DST_MEMORY)
#define SE_CFG_GCM_FINAL_DECRYPT (SE_AES_ENC_ALG_NOP | \ #define SE_CFG_GCM_FINAL_DECRYPT (SE_AES_DEC_ALG_AES_DEC | \
SE_AES_DEC_ALG_AES_DEC | \
SE_AES_DEC_MODE_GCM_FINAL | \ SE_AES_DEC_MODE_GCM_FINAL | \
SE_AES_DST_MEMORY) SE_AES_DST_MEMORY)
#define SE_CFG_GCM_VERIFY (SE_AES_DEC_ALG_AES_DEC | \
SE_AES_DEC_MODE_GCM_VERIFY | \
SE_AES_DST_MEMORY)
#define SE_CFG_CMAC (SE_AES_ENC_ALG_AES_ENC | \ #define SE_CFG_CMAC (SE_AES_ENC_ALG_AES_ENC | \
SE_AES_ENC_MODE_CMAC | \ SE_AES_ENC_MODE_CMAC)
SE_AES_DST_HASH_REG)
#define SE_CFG_CBC_MAC (SE_AES_ENC_ALG_AES_ENC | \ #define SE_CFG_CBC_MAC (SE_AES_ENC_ALG_AES_ENC | \
SE_AES_ENC_MODE_CBC_MAC) SE_AES_ENC_MODE_CBC_MAC)
#define SE_CFG_SM4_ENCRYPT (SE_AES_ENC_ALG_SM4_ENC | \
SE_AES_DEC_ALG_NOP | \
SE_AES_DST_MEMORY)
#define SE_CFG_SM4_DECRYPT (SE_AES_ENC_ALG_NOP | \
SE_AES_DEC_ALG_SM4_DEC | \
SE_AES_DST_MEMORY)
#define SE_CFG_SM4_GMAC_ENCRYPT (SE_AES_ENC_ALG_SM4_ENC | \
SE_AES_ENC_MODE_GMAC | \
SE_AES_DST_MEMORY)
#define SE_CFG_SM4_GMAC_DECRYPT (SE_AES_DEC_ALG_SM4_DEC | \
SE_AES_DEC_MODE_GMAC | \
SE_AES_DST_MEMORY)
#define SE_CFG_SM4_GCM_ENCRYPT (SE_AES_ENC_ALG_SM4_ENC | \
SE_AES_ENC_MODE_GCM | \
SE_AES_DST_MEMORY)
#define SE_CFG_SM4_GCM_DECRYPT (SE_AES_DEC_ALG_SM4_DEC | \
SE_AES_DEC_MODE_GCM | \
SE_AES_DST_MEMORY)
#define SE_CFG_SM4_GCM_FINAL_ENCRYPT (SE_AES_ENC_ALG_SM4_ENC | \
SE_AES_ENC_MODE_GCM_FINAL | \
SE_AES_DST_MEMORY)
#define SE_CFG_SM4_GCM_FINAL_DECRYPT (SE_AES_DEC_ALG_SM4_DEC | \
SE_AES_DEC_MODE_GCM_FINAL | \
SE_AES_DST_MEMORY)
#define SE_CFG_SM4_GCM_VERIFY (SE_AES_DEC_ALG_SM4_DEC | \
SE_AES_DEC_MODE_GCM_VERIFY | \
SE_AES_DST_MEMORY)
#define SE_CFG_SM4_CMAC (SE_AES_ENC_ALG_SM4_ENC | \
SE_AES_ENC_MODE_CMAC)
#define SE_CFG_INS (SE_AES_ENC_ALG_INS | \ #define SE_CFG_INS (SE_AES_ENC_ALG_INS | \
SE_AES_DEC_ALG_NOP) SE_AES_DEC_ALG_NOP)
#define SE_CFG_MOV (SE_AES_ENC_ALG_KEY_MOV | \
SE_AES_DEC_ALG_NOP)
#define SE_CFG_INVLD (SE_AES_ENC_ALG_KEY_INVLD | \
SE_AES_DEC_ALG_NOP)
#define SE_CFG_ECB_ENCRYPT (SE_AES_ENC_MODE_ECB | \
SE_CFG_AES_ENCRYPT)
#define SE_CFG_ECB_DECRYPT (SE_AES_DEC_MODE_ECB | \
SE_CFG_AES_DECRYPT)
#define SE_CFG_CBC_ENCRYPT (SE_AES_ENC_MODE_CBC | \
SE_CFG_AES_ENCRYPT)
#define SE_CFG_CBC_DECRYPT (SE_AES_DEC_MODE_CBC | \
SE_CFG_AES_DECRYPT)
#define SE_CFG_OFB_ENCRYPT (SE_AES_ENC_MODE_OFB | \
SE_CFG_AES_ENCRYPT)
#define SE_CFG_OFB_DECRYPT (SE_AES_DEC_MODE_OFB | \
SE_CFG_AES_DECRYPT)
#define SE_CFG_CTR_ENCRYPT (SE_AES_ENC_MODE_CTR | \
SE_CFG_AES_ENCRYPT)
#define SE_CFG_CTR_DECRYPT (SE_AES_DEC_MODE_CTR | \
SE_CFG_AES_DECRYPT)
#define SE_CFG_XTS_ENCRYPT (SE_AES_ENC_MODE_XTS | \
SE_CFG_AES_ENCRYPT)
#define SE_CFG_XTS_DECRYPT (SE_AES_DEC_MODE_XTS | \
SE_CFG_AES_DECRYPT)
#define SE_CFG_SM4_ECB_ENCRYPT (SE_AES_ENC_MODE_ECB | \
SE_CFG_SM4_ENCRYPT)
#define SE_CFG_SM4_ECB_DECRYPT (SE_AES_DEC_MODE_ECB | \
SE_CFG_SM4_DECRYPT)
#define SE_CFG_SM4_CBC_ENCRYPT (SE_AES_ENC_MODE_CBC | \
SE_CFG_SM4_ENCRYPT)
#define SE_CFG_SM4_CBC_DECRYPT (SE_AES_DEC_MODE_CBC | \
SE_CFG_SM4_DECRYPT)
#define SE_CFG_SM4_OFB_ENCRYPT (SE_AES_ENC_MODE_OFB | \
SE_CFG_SM4_ENCRYPT)
#define SE_CFG_SM4_OFB_DECRYPT (SE_AES_DEC_MODE_OFB | \
SE_CFG_SM4_DECRYPT)
#define SE_CFG_SM4_CTR_ENCRYPT (SE_AES_ENC_MODE_CTR | \
SE_CFG_SM4_ENCRYPT)
#define SE_CFG_SM4_CTR_DECRYPT (SE_AES_DEC_MODE_CTR | \
SE_CFG_SM4_DECRYPT)
#define SE_CFG_SM4_XTS_ENCRYPT (SE_AES_ENC_MODE_XTS | \
SE_CFG_SM4_ENCRYPT)
#define SE_CFG_SM4_XTS_DECRYPT (SE_AES_DEC_MODE_XTS | \
SE_CFG_SM4_ENCRYPT)
#define SE_CRYPTO_CFG_ECB_ENCRYPT (SE_AES_INPUT_SEL_MEMORY | \ #define SE_CRYPTO_CFG_ECB_ENCRYPT (SE_AES_INPUT_SEL_MEMORY | \
SE_AES_XOR_POS_BYPASS | \ SE_AES_XOR_POS_BYPASS | \
SE_AES_CORE_SEL_ENCRYPT) SE_AES_CORE_SEL_ENCRYPT)
@@ -343,6 +529,8 @@
#define SE_MAX_KEYSLOT 15 #define SE_MAX_KEYSLOT 15
#define SE_MAX_MEM_ALLOC SZ_4M #define SE_MAX_MEM_ALLOC SZ_4M
#define SE_GCM_VERIFY_OK 0x5a5a5a5a
#define SHA_FIRST BIT(0) #define SHA_FIRST BIT(0)
#define SHA_UPDATE BIT(1) #define SHA_UPDATE BIT(1)
#define SHA_FINAL BIT(2) #define SHA_FINAL BIT(2)
@@ -372,8 +560,23 @@ enum se_aes_alg {
SE_ALG_GMAC, /* GMAC mode */ SE_ALG_GMAC, /* GMAC mode */
SE_ALG_GCM, /* GCM mode */ SE_ALG_GCM, /* GCM mode */
SE_ALG_GCM_FINAL, /* GCM FINAL mode */ SE_ALG_GCM_FINAL, /* GCM FINAL mode */
SE_ALG_CMAC, /* Cipher-based MAC (CMAC) mode */ SE_ALG_GCM_VERIFY, /* GCM Verify */
SE_ALG_CBC_MAC, /* CBC MAC mode */ SE_ALG_CMAC, /* Cipher-based MAC (CMAC) mode */
SE_ALG_CMAC_FINAL, /* Cipher-based MAC (CMAC) mode final task */
SE_ALG_CBC_MAC, /* CBC MAC mode */
/* ShāngMì 4 Algorithms */
SE_ALG_SM4_CBC,
SE_ALG_SM4_ECB,
SE_ALG_SM4_CTR,
SE_ALG_SM4_OFB,
SE_ALG_SM4_XTS,
SE_ALG_SM4_GMAC,
SE_ALG_SM4_GCM,
SE_ALG_SM4_GCM_FINAL,
SE_ALG_SM4_GCM_VERIFY,
SE_ALG_SM4_CMAC,
SE_ALG_SM4_CMAC_FINAL,
}; };
enum se_hash_alg { enum se_hash_alg {
@@ -387,6 +590,7 @@ enum se_hash_alg {
SE_ALG_SHA3_256, /* Secure Hash Algorithm3-256 (SHA3-256) mode */ SE_ALG_SHA3_256, /* Secure Hash Algorithm3-256 (SHA3-256) mode */
SE_ALG_SHA3_384, /* Secure Hash Algorithm3-384 (SHA3-384) mode */ SE_ALG_SHA3_384, /* Secure Hash Algorithm3-384 (SHA3-384) mode */
SE_ALG_SHA3_512, /* Secure Hash Algorithm3-512 (SHA3-512) mode */ SE_ALG_SHA3_512, /* Secure Hash Algorithm3-512 (SHA3-512) mode */
SE_ALG_SM3_256, /* ShangMi 3 - 256 */
SE_ALG_SHAKE128, /* Secure Hash Algorithm3 (SHAKE128) mode */ SE_ALG_SHAKE128, /* Secure Hash Algorithm3 (SHAKE128) mode */
SE_ALG_SHAKE256, /* Secure Hash Algorithm3 (SHAKE256) mode */ SE_ALG_SHAKE256, /* Secure Hash Algorithm3 (SHAKE256) mode */
SE_ALG_HMAC_SHA224, /* Hash based MAC (HMAC) - 224 */ SE_ALG_HMAC_SHA224, /* Hash based MAC (HMAC) - 224 */
@@ -424,23 +628,33 @@ struct tegra_se_regs {
u32 key_addr; u32 key_addr;
u32 key_data; u32 key_data;
u32 key_dst; u32 key_dst;
u32 src_kslt;
u32 tgt_kslt;
u32 result; u32 result;
}; };
struct tegra_se_regcfg {
int (*cfg)(u32 alg, bool encrypt);
int (*crypto_cfg)(u32 alg, bool encrypt);
int (*manifest)(u32 user, u32 alg, u32 keylen);
};
struct tegra_se_hw { struct tegra_se_hw {
const struct tegra_se_regs *regs; const struct tegra_se_regs *regs;
int (*init_alg)(struct tegra_se *se); int (*init_alg)(struct tegra_se *se);
void (*deinit_alg)(struct tegra_se *se); void (*deinit_alg)(struct tegra_se *se);
bool support_kds;
bool support_sm_alg; bool support_sm_alg;
bool support_aad_verify;
u32 host1x_class; u32 host1x_class;
u32 kac_ver; u32 kac_ver;
}; };
struct tegra_se { struct tegra_se {
int (*manifest)(u32 user, u32 alg, u32 keylen);
const struct tegra_se_hw *hw; const struct tegra_se_hw *hw;
struct host1x_client client; struct host1x_client client;
struct host1x_channel *channel; struct host1x_channel *channel;
struct tegra_se_regcfg *regcfg;
struct tegra_se_cmdbuf *cmdbuf; struct tegra_se_cmdbuf *cmdbuf;
struct crypto_engine *engine; struct crypto_engine *engine;
struct host1x_syncpt *syncpt; struct host1x_syncpt *syncpt;
@@ -481,11 +695,38 @@ static inline int se_algname_to_algid(const char *name)
return SE_ALG_XTS; return SE_ALG_XTS;
else if (!strcmp(name, "cmac(aes)")) else if (!strcmp(name, "cmac(aes)"))
return SE_ALG_CMAC; return SE_ALG_CMAC;
else if (!strcmp(name, "cmac(aes)-final"))
return SE_ALG_CMAC_FINAL;
else if (!strcmp(name, "gcm(aes)")) else if (!strcmp(name, "gcm(aes)"))
return SE_ALG_GCM; return SE_ALG_GCM;
else if (!strcmp(name, "gcm(aes)-mac"))
return SE_ALG_GMAC;
else if (!strcmp(name, "gcm(aes)-final"))
return SE_ALG_GCM_FINAL;
else if (!strcmp(name, "ccm(aes)")) else if (!strcmp(name, "ccm(aes)"))
return SE_ALG_CBC_MAC; return SE_ALG_CBC_MAC;
else if (!strcmp(name, "cbc(sm4)"))
return SE_ALG_SM4_CBC;
else if (!strcmp(name, "ecb(sm4)"))
return SE_ALG_SM4_ECB;
else if (!strcmp(name, "ofb(sm4)"))
return SE_ALG_SM4_OFB;
else if (!strcmp(name, "ctr(sm4)"))
return SE_ALG_SM4_CTR;
else if (!strcmp(name, "xts(sm4)"))
return SE_ALG_SM4_XTS;
else if (!strcmp(name, "cmac(sm4)"))
return SE_ALG_SM4_CMAC;
else if (!strcmp(name, "cmac(sm4)-final"))
return SE_ALG_SM4_CMAC_FINAL;
else if (!strcmp(name, "gcm(sm4)"))
return SE_ALG_SM4_GCM;
else if (!strcmp(name, "gcm(sm4)-mac"))
return SE_ALG_SM4_GMAC;
else if (!strcmp(name, "gcm(sm4)-final"))
return SE_ALG_SM4_GCM_FINAL;
else if (!strcmp(name, "sha1")) else if (!strcmp(name, "sha1"))
return SE_ALG_SHA1; return SE_ALG_SHA1;
else if (!strcmp(name, "sha224")) else if (!strcmp(name, "sha224"))
@@ -504,6 +745,8 @@ static inline int se_algname_to_algid(const char *name)
return SE_ALG_SHA3_384; return SE_ALG_SHA3_384;
else if (!strcmp(name, "sha3-512")) else if (!strcmp(name, "sha3-512"))
return SE_ALG_SHA3_512; return SE_ALG_SHA3_512;
else if (!strcmp(name, "sm3"))
return SE_ALG_SM3_256;
else if (!strcmp(name, "hmac(sha224)")) else if (!strcmp(name, "hmac(sha224)"))
return SE_ALG_HMAC_SHA224; return SE_ALG_HMAC_SHA224;
else if (!strcmp(name, "hmac(sha256)")) else if (!strcmp(name, "hmac(sha256)"))
@@ -519,13 +762,20 @@ static inline int se_algname_to_algid(const char *name)
/* Functions */ /* Functions */
int tegra_init_aes(struct tegra_se *se); int tegra_init_aes(struct tegra_se *se);
int tegra_init_hash(struct tegra_se *se); int tegra_init_hash(struct tegra_se *se);
int tegra_init_sm4(struct tegra_se *se);
void tegra_deinit_aes(struct tegra_se *se); void tegra_deinit_aes(struct tegra_se *se);
void tegra_deinit_hash(struct tegra_se *se); void tegra_deinit_hash(struct tegra_se *se);
void tegra_deinit_sm4(struct tegra_se *se);
int tegra_key_submit(struct tegra_se *se, const u8 *key, int tegra_key_submit(struct tegra_se *se, const u8 *key,
u32 keylen, u32 alg, u32 *keyid); u32 keylen, u32 alg, u32 *keyid);
void tegra_key_invalidate(struct tegra_se *se, u32 keyid, u32 alg); void tegra_key_invalidate(struct tegra_se *se, u32 keyid, u32 alg);
unsigned int tegra_key_get_idx(struct tegra_se *se, u32 keyid);
int tegra_se_host1x_submit(struct tegra_se *se, u32 size); int tegra_se_host1x_submit(struct tegra_se *se, u32 size);
u32 tegra_kds_get_id(void);
void tegra_kds_free_id(u32 keyid);
bool tegra_key_in_kds(u32 keyid);
/* HOST1x OPCODES */ /* HOST1x OPCODES */
static inline u32 host1x_opcode_setpayload(unsigned int payload) static inline u32 host1x_opcode_setpayload(unsigned int payload)
{ {