mirror of
https://github.com/openwrt/openwrt.git
synced 2024-12-20 14:13:16 +00:00
e5aa498acb
Fixes CVE-2020-10757 via upstream commit df4988aa1c96 ("mm: Fix mremap not considering huge pmd devmap"). Resolved merge conflict in the following patches: bcm27xx: 950-0128-gpiolib-Don-t-prevent-IRQ-usage-of-output-GPIOs.patch Refreshed patches, removed upstreamed patch: generic: 751-v5.8-net-dsa-mt7530-set-CPU-port-to-fallback-mode.patch generic: 754-v5.7-net-dsa-mt7530-fix-roaming-from-DSA-user-ports.patch Run tested: qemu-x86-64 Build tested: x86/64, imx6, sunxi/a53 Signed-off-by: Petr Štetiar <ynezz@true.cz>
5465 lines
172 KiB
Diff
5465 lines
172 KiB
Diff
--- a/drivers/crypto/inside-secure/safexcel.c
|
|
+++ b/drivers/crypto/inside-secure/safexcel.c
|
|
@@ -75,9 +75,9 @@ static void eip197_trc_cache_banksel(str
|
|
}
|
|
|
|
static u32 eip197_trc_cache_probe(struct safexcel_crypto_priv *priv,
|
|
- int maxbanks, u32 probemask)
|
|
+ int maxbanks, u32 probemask, u32 stride)
|
|
{
|
|
- u32 val, addrhi, addrlo, addrmid;
|
|
+ u32 val, addrhi, addrlo, addrmid, addralias, delta, marker;
|
|
int actbank;
|
|
|
|
/*
|
|
@@ -87,32 +87,37 @@ static u32 eip197_trc_cache_probe(struct
|
|
addrhi = 1 << (16 + maxbanks);
|
|
addrlo = 0;
|
|
actbank = min(maxbanks - 1, 0);
|
|
- while ((addrhi - addrlo) > 32) {
|
|
+ while ((addrhi - addrlo) > stride) {
|
|
/* write marker to lowest address in top half */
|
|
addrmid = (addrhi + addrlo) >> 1;
|
|
+ marker = (addrmid ^ 0xabadbabe) & probemask; /* Unique */
|
|
eip197_trc_cache_banksel(priv, addrmid, &actbank);
|
|
- writel((addrmid | (addrlo << 16)) & probemask,
|
|
+ writel(marker,
|
|
priv->base + EIP197_CLASSIFICATION_RAMS +
|
|
(addrmid & 0xffff));
|
|
|
|
- /* write marker to lowest address in bottom half */
|
|
- eip197_trc_cache_banksel(priv, addrlo, &actbank);
|
|
- writel((addrlo | (addrhi << 16)) & probemask,
|
|
- priv->base + EIP197_CLASSIFICATION_RAMS +
|
|
- (addrlo & 0xffff));
|
|
+ /* write invalid markers to possible aliases */
|
|
+ delta = 1 << __fls(addrmid);
|
|
+ while (delta >= stride) {
|
|
+ addralias = addrmid - delta;
|
|
+ eip197_trc_cache_banksel(priv, addralias, &actbank);
|
|
+ writel(~marker,
|
|
+ priv->base + EIP197_CLASSIFICATION_RAMS +
|
|
+ (addralias & 0xffff));
|
|
+ delta >>= 1;
|
|
+ }
|
|
|
|
/* read back marker from top half */
|
|
eip197_trc_cache_banksel(priv, addrmid, &actbank);
|
|
val = readl(priv->base + EIP197_CLASSIFICATION_RAMS +
|
|
(addrmid & 0xffff));
|
|
|
|
- if (val == ((addrmid | (addrlo << 16)) & probemask)) {
|
|
+ if ((val & probemask) == marker)
|
|
/* read back correct, continue with top half */
|
|
addrlo = addrmid;
|
|
- } else {
|
|
+ else
|
|
/* not read back correct, continue with bottom half */
|
|
addrhi = addrmid;
|
|
- }
|
|
}
|
|
return addrhi;
|
|
}
|
|
@@ -150,7 +155,7 @@ static void eip197_trc_cache_clear(struc
|
|
htable_offset + i * sizeof(u32));
|
|
}
|
|
|
|
-static void eip197_trc_cache_init(struct safexcel_crypto_priv *priv)
|
|
+static int eip197_trc_cache_init(struct safexcel_crypto_priv *priv)
|
|
{
|
|
u32 val, dsize, asize;
|
|
int cs_rc_max, cs_ht_wc, cs_trc_rec_wc, cs_trc_lg_rec_wc;
|
|
@@ -183,7 +188,7 @@ static void eip197_trc_cache_init(struct
|
|
writel(val, priv->base + EIP197_TRC_PARAMS);
|
|
|
|
/* Probed data RAM size in bytes */
|
|
- dsize = eip197_trc_cache_probe(priv, maxbanks, 0xffffffff);
|
|
+ dsize = eip197_trc_cache_probe(priv, maxbanks, 0xffffffff, 32);
|
|
|
|
/*
|
|
* Now probe the administration RAM size pretty much the same way
|
|
@@ -196,11 +201,18 @@ static void eip197_trc_cache_init(struct
|
|
writel(val, priv->base + EIP197_TRC_PARAMS);
|
|
|
|
/* Probed admin RAM size in admin words */
|
|
- asize = eip197_trc_cache_probe(priv, 0, 0xbfffffff) >> 4;
|
|
+ asize = eip197_trc_cache_probe(priv, 0, 0x3fffffff, 16) >> 4;
|
|
|
|
/* Clear any ECC errors detected while probing! */
|
|
writel(0, priv->base + EIP197_TRC_ECCCTRL);
|
|
|
|
+ /* Sanity check probing results */
|
|
+ if (dsize < EIP197_MIN_DSIZE || asize < EIP197_MIN_ASIZE) {
|
|
+ dev_err(priv->dev, "Record cache probing failed (%d,%d).",
|
|
+ dsize, asize);
|
|
+ return -ENODEV;
|
|
+ }
|
|
+
|
|
/*
|
|
* Determine optimal configuration from RAM sizes
|
|
* Note that we assume that the physical RAM configuration is sane
|
|
@@ -251,6 +263,7 @@ static void eip197_trc_cache_init(struct
|
|
|
|
dev_info(priv->dev, "TRC init: %dd,%da (%dr,%dh)\n",
|
|
dsize, asize, cs_rc_max, cs_ht_wc + cs_ht_wc);
|
|
+ return 0;
|
|
}
|
|
|
|
static void eip197_init_firmware(struct safexcel_crypto_priv *priv)
|
|
@@ -298,13 +311,14 @@ static void eip197_init_firmware(struct
|
|
static int eip197_write_firmware(struct safexcel_crypto_priv *priv,
|
|
const struct firmware *fw)
|
|
{
|
|
- const u32 *data = (const u32 *)fw->data;
|
|
+ const __be32 *data = (const __be32 *)fw->data;
|
|
int i;
|
|
|
|
/* Write the firmware */
|
|
for (i = 0; i < fw->size / sizeof(u32); i++)
|
|
writel(be32_to_cpu(data[i]),
|
|
- priv->base + EIP197_CLASSIFICATION_RAMS + i * sizeof(u32));
|
|
+ priv->base + EIP197_CLASSIFICATION_RAMS +
|
|
+ i * sizeof(__be32));
|
|
|
|
/* Exclude final 2 NOPs from size */
|
|
return i - EIP197_FW_TERMINAL_NOPS;
|
|
@@ -471,6 +485,14 @@ static int safexcel_hw_setup_cdesc_rings
|
|
cd_fetch_cnt = ((1 << priv->hwconfig.hwcfsize) /
|
|
cd_size_rnd) - 1;
|
|
}
|
|
+ /*
|
|
+ * Since we're using command desc's way larger than formally specified,
|
|
+ * we need to check whether we can fit even 1 for low-end EIP196's!
|
|
+ */
|
|
+ if (!cd_fetch_cnt) {
|
|
+ dev_err(priv->dev, "Unable to fit even 1 command desc!\n");
|
|
+ return -ENODEV;
|
|
+ }
|
|
|
|
for (i = 0; i < priv->config.rings; i++) {
|
|
/* ring base address */
|
|
@@ -479,12 +501,12 @@ static int safexcel_hw_setup_cdesc_rings
|
|
writel(upper_32_bits(priv->ring[i].cdr.base_dma),
|
|
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_RING_BASE_ADDR_HI);
|
|
|
|
- writel(EIP197_xDR_DESC_MODE_64BIT | (priv->config.cd_offset << 16) |
|
|
- priv->config.cd_size,
|
|
+ writel(EIP197_xDR_DESC_MODE_64BIT | EIP197_CDR_DESC_MODE_ADCP |
|
|
+ (priv->config.cd_offset << 14) | priv->config.cd_size,
|
|
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_DESC_SIZE);
|
|
writel(((cd_fetch_cnt *
|
|
(cd_size_rnd << priv->hwconfig.hwdataw)) << 16) |
|
|
- (cd_fetch_cnt * priv->config.cd_offset),
|
|
+ (cd_fetch_cnt * (priv->config.cd_offset / sizeof(u32))),
|
|
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_CFG);
|
|
|
|
/* Configure DMA tx control */
|
|
@@ -527,13 +549,13 @@ static int safexcel_hw_setup_rdesc_rings
|
|
writel(upper_32_bits(priv->ring[i].rdr.base_dma),
|
|
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_RING_BASE_ADDR_HI);
|
|
|
|
- writel(EIP197_xDR_DESC_MODE_64BIT | (priv->config.rd_offset << 16) |
|
|
+ writel(EIP197_xDR_DESC_MODE_64BIT | (priv->config.rd_offset << 14) |
|
|
priv->config.rd_size,
|
|
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_DESC_SIZE);
|
|
|
|
writel(((rd_fetch_cnt *
|
|
(rd_size_rnd << priv->hwconfig.hwdataw)) << 16) |
|
|
- (rd_fetch_cnt * priv->config.rd_offset),
|
|
+ (rd_fetch_cnt * (priv->config.rd_offset / sizeof(u32))),
|
|
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_CFG);
|
|
|
|
/* Configure DMA tx control */
|
|
@@ -559,7 +581,7 @@ static int safexcel_hw_setup_rdesc_rings
|
|
static int safexcel_hw_init(struct safexcel_crypto_priv *priv)
|
|
{
|
|
u32 val;
|
|
- int i, ret, pe;
|
|
+ int i, ret, pe, opbuflo, opbufhi;
|
|
|
|
dev_dbg(priv->dev, "HW init: using %d pipe(s) and %d ring(s)\n",
|
|
priv->config.pes, priv->config.rings);
|
|
@@ -595,8 +617,8 @@ static int safexcel_hw_init(struct safex
|
|
writel(EIP197_DxE_THR_CTRL_RESET_PE,
|
|
EIP197_HIA_DFE_THR(priv) + EIP197_HIA_DFE_THR_CTRL(pe));
|
|
|
|
- if (priv->flags & SAFEXCEL_HW_EIP197)
|
|
- /* Reset HIA input interface arbiter (EIP197 only) */
|
|
+ if (priv->flags & EIP197_PE_ARB)
|
|
+ /* Reset HIA input interface arbiter (if present) */
|
|
writel(EIP197_HIA_RA_PE_CTRL_RESET,
|
|
EIP197_HIA_AIC(priv) + EIP197_HIA_RA_PE_CTRL(pe));
|
|
|
|
@@ -639,9 +661,16 @@ static int safexcel_hw_init(struct safex
|
|
;
|
|
|
|
/* DMA transfer size to use */
|
|
+ if (priv->hwconfig.hwnumpes > 4) {
|
|
+ opbuflo = 9;
|
|
+ opbufhi = 10;
|
|
+ } else {
|
|
+ opbuflo = 7;
|
|
+ opbufhi = 8;
|
|
+ }
|
|
val = EIP197_HIA_DSE_CFG_DIS_DEBUG;
|
|
- val |= EIP197_HIA_DxE_CFG_MIN_DATA_SIZE(7) |
|
|
- EIP197_HIA_DxE_CFG_MAX_DATA_SIZE(8);
|
|
+ val |= EIP197_HIA_DxE_CFG_MIN_DATA_SIZE(opbuflo) |
|
|
+ EIP197_HIA_DxE_CFG_MAX_DATA_SIZE(opbufhi);
|
|
val |= EIP197_HIA_DxE_CFG_DATA_CACHE_CTRL(WR_CACHE_3BITS);
|
|
val |= EIP197_HIA_DSE_CFG_ALWAYS_BUFFERABLE;
|
|
/* FIXME: instability issues can occur for EIP97 but disabling
|
|
@@ -655,8 +684,8 @@ static int safexcel_hw_init(struct safex
|
|
writel(0, EIP197_HIA_DSE_THR(priv) + EIP197_HIA_DSE_THR_CTRL(pe));
|
|
|
|
/* Configure the procesing engine thresholds */
|
|
- writel(EIP197_PE_OUT_DBUF_THRES_MIN(7) |
|
|
- EIP197_PE_OUT_DBUF_THRES_MAX(8),
|
|
+ writel(EIP197_PE_OUT_DBUF_THRES_MIN(opbuflo) |
|
|
+ EIP197_PE_OUT_DBUF_THRES_MAX(opbufhi),
|
|
EIP197_PE(priv) + EIP197_PE_OUT_DBUF_THRES(pe));
|
|
|
|
/* Processing Engine configuration */
|
|
@@ -696,7 +725,7 @@ static int safexcel_hw_init(struct safex
|
|
writel(0,
|
|
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_PROC_PNTR);
|
|
|
|
- writel((EIP197_DEFAULT_RING_SIZE * priv->config.cd_offset) << 2,
|
|
+ writel((EIP197_DEFAULT_RING_SIZE * priv->config.cd_offset),
|
|
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_RING_SIZE);
|
|
}
|
|
|
|
@@ -719,7 +748,7 @@ static int safexcel_hw_init(struct safex
|
|
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_PROC_PNTR);
|
|
|
|
/* Ring size */
|
|
- writel((EIP197_DEFAULT_RING_SIZE * priv->config.rd_offset) << 2,
|
|
+ writel((EIP197_DEFAULT_RING_SIZE * priv->config.rd_offset),
|
|
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_RING_SIZE);
|
|
}
|
|
|
|
@@ -736,19 +765,28 @@ static int safexcel_hw_init(struct safex
|
|
/* Clear any HIA interrupt */
|
|
writel(GENMASK(30, 20), EIP197_HIA_AIC_G(priv) + EIP197_HIA_AIC_G_ACK);
|
|
|
|
- if (priv->flags & SAFEXCEL_HW_EIP197) {
|
|
- eip197_trc_cache_init(priv);
|
|
- priv->flags |= EIP197_TRC_CACHE;
|
|
+ if (priv->flags & EIP197_SIMPLE_TRC) {
|
|
+ writel(EIP197_STRC_CONFIG_INIT |
|
|
+ EIP197_STRC_CONFIG_LARGE_REC(EIP197_CS_TRC_REC_WC) |
|
|
+ EIP197_STRC_CONFIG_SMALL_REC(EIP197_CS_TRC_REC_WC),
|
|
+ priv->base + EIP197_STRC_CONFIG);
|
|
+ writel(EIP197_PE_EIP96_TOKEN_CTRL2_CTX_DONE,
|
|
+ EIP197_PE(priv) + EIP197_PE_EIP96_TOKEN_CTRL2(0));
|
|
+ } else if (priv->flags & SAFEXCEL_HW_EIP197) {
|
|
+ ret = eip197_trc_cache_init(priv);
|
|
+ if (ret)
|
|
+ return ret;
|
|
+ }
|
|
|
|
+ if (priv->flags & EIP197_ICE) {
|
|
ret = eip197_load_firmwares(priv);
|
|
if (ret)
|
|
return ret;
|
|
}
|
|
|
|
- safexcel_hw_setup_cdesc_rings(priv);
|
|
- safexcel_hw_setup_rdesc_rings(priv);
|
|
-
|
|
- return 0;
|
|
+ return safexcel_hw_setup_cdesc_rings(priv) ?:
|
|
+ safexcel_hw_setup_rdesc_rings(priv) ?:
|
|
+ 0;
|
|
}
|
|
|
|
/* Called with ring's lock taken */
|
|
@@ -836,20 +874,24 @@ finalize:
|
|
spin_unlock_bh(&priv->ring[ring].lock);
|
|
|
|
/* let the RDR know we have pending descriptors */
|
|
- writel((rdesc * priv->config.rd_offset) << 2,
|
|
+ writel((rdesc * priv->config.rd_offset),
|
|
EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_PREP_COUNT);
|
|
|
|
/* let the CDR know we have pending descriptors */
|
|
- writel((cdesc * priv->config.cd_offset) << 2,
|
|
+ writel((cdesc * priv->config.cd_offset),
|
|
EIP197_HIA_CDR(priv, ring) + EIP197_HIA_xDR_PREP_COUNT);
|
|
}
|
|
|
|
inline int safexcel_rdesc_check_errors(struct safexcel_crypto_priv *priv,
|
|
- struct safexcel_result_desc *rdesc)
|
|
+ void *rdp)
|
|
{
|
|
- if (likely((!rdesc->descriptor_overflow) &&
|
|
- (!rdesc->buffer_overflow) &&
|
|
- (!rdesc->result_data.error_code)))
|
|
+ struct safexcel_result_desc *rdesc = rdp;
|
|
+ struct result_data_desc *result_data = rdp + priv->config.res_offset;
|
|
+
|
|
+ if (likely((!rdesc->last_seg) || /* Rest only valid if last seg! */
|
|
+ ((!rdesc->descriptor_overflow) &&
|
|
+ (!rdesc->buffer_overflow) &&
|
|
+ (!result_data->error_code))))
|
|
return 0;
|
|
|
|
if (rdesc->descriptor_overflow)
|
|
@@ -858,13 +900,14 @@ inline int safexcel_rdesc_check_errors(s
|
|
if (rdesc->buffer_overflow)
|
|
dev_err(priv->dev, "Buffer overflow detected");
|
|
|
|
- if (rdesc->result_data.error_code & 0x4066) {
|
|
+ if (result_data->error_code & 0x4066) {
|
|
/* Fatal error (bits 1,2,5,6 & 14) */
|
|
dev_err(priv->dev,
|
|
"result descriptor error (%x)",
|
|
- rdesc->result_data.error_code);
|
|
+ result_data->error_code);
|
|
+
|
|
return -EIO;
|
|
- } else if (rdesc->result_data.error_code &
|
|
+ } else if (result_data->error_code &
|
|
(BIT(7) | BIT(4) | BIT(3) | BIT(0))) {
|
|
/*
|
|
* Give priority over authentication fails:
|
|
@@ -872,7 +915,7 @@ inline int safexcel_rdesc_check_errors(s
|
|
* something wrong with the input!
|
|
*/
|
|
return -EINVAL;
|
|
- } else if (rdesc->result_data.error_code & BIT(9)) {
|
|
+ } else if (result_data->error_code & BIT(9)) {
|
|
/* Authentication failed */
|
|
return -EBADMSG;
|
|
}
|
|
@@ -931,16 +974,18 @@ int safexcel_invalidate_cache(struct cry
|
|
{
|
|
struct safexcel_command_desc *cdesc;
|
|
struct safexcel_result_desc *rdesc;
|
|
+ struct safexcel_token *dmmy;
|
|
int ret = 0;
|
|
|
|
/* Prepare command descriptor */
|
|
- cdesc = safexcel_add_cdesc(priv, ring, true, true, 0, 0, 0, ctxr_dma);
|
|
+ cdesc = safexcel_add_cdesc(priv, ring, true, true, 0, 0, 0, ctxr_dma,
|
|
+ &dmmy);
|
|
if (IS_ERR(cdesc))
|
|
return PTR_ERR(cdesc);
|
|
|
|
cdesc->control_data.type = EIP197_TYPE_EXTENDED;
|
|
cdesc->control_data.options = 0;
|
|
- cdesc->control_data.refresh = 0;
|
|
+ cdesc->control_data.context_lo &= ~EIP197_CONTEXT_SIZE_MASK;
|
|
cdesc->control_data.control0 = CONTEXT_CONTROL_INV_TR;
|
|
|
|
/* Prepare result descriptor */
|
|
@@ -1003,7 +1048,7 @@ handle_results:
|
|
acknowledge:
|
|
if (i)
|
|
writel(EIP197_xDR_PROC_xD_PKT(i) |
|
|
- EIP197_xDR_PROC_xD_COUNT(tot_descs * priv->config.rd_offset),
|
|
+ (tot_descs * priv->config.rd_offset),
|
|
EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_PROC_COUNT);
|
|
|
|
/* If the number of requests overflowed the counter, try to proceed more
|
|
@@ -1171,6 +1216,44 @@ static struct safexcel_alg_template *saf
|
|
&safexcel_alg_xts_aes,
|
|
&safexcel_alg_gcm,
|
|
&safexcel_alg_ccm,
|
|
+ &safexcel_alg_crc32,
|
|
+ &safexcel_alg_cbcmac,
|
|
+ &safexcel_alg_xcbcmac,
|
|
+ &safexcel_alg_cmac,
|
|
+ &safexcel_alg_chacha20,
|
|
+ &safexcel_alg_chachapoly,
|
|
+ &safexcel_alg_chachapoly_esp,
|
|
+ &safexcel_alg_sm3,
|
|
+ &safexcel_alg_hmac_sm3,
|
|
+ &safexcel_alg_ecb_sm4,
|
|
+ &safexcel_alg_cbc_sm4,
|
|
+ &safexcel_alg_ofb_sm4,
|
|
+ &safexcel_alg_cfb_sm4,
|
|
+ &safexcel_alg_ctr_sm4,
|
|
+ &safexcel_alg_authenc_hmac_sha1_cbc_sm4,
|
|
+ &safexcel_alg_authenc_hmac_sm3_cbc_sm4,
|
|
+ &safexcel_alg_authenc_hmac_sha1_ctr_sm4,
|
|
+ &safexcel_alg_authenc_hmac_sm3_ctr_sm4,
|
|
+ &safexcel_alg_sha3_224,
|
|
+ &safexcel_alg_sha3_256,
|
|
+ &safexcel_alg_sha3_384,
|
|
+ &safexcel_alg_sha3_512,
|
|
+ &safexcel_alg_hmac_sha3_224,
|
|
+ &safexcel_alg_hmac_sha3_256,
|
|
+ &safexcel_alg_hmac_sha3_384,
|
|
+ &safexcel_alg_hmac_sha3_512,
|
|
+ &safexcel_alg_authenc_hmac_sha1_cbc_des,
|
|
+ &safexcel_alg_authenc_hmac_sha256_cbc_des3_ede,
|
|
+ &safexcel_alg_authenc_hmac_sha224_cbc_des3_ede,
|
|
+ &safexcel_alg_authenc_hmac_sha512_cbc_des3_ede,
|
|
+ &safexcel_alg_authenc_hmac_sha384_cbc_des3_ede,
|
|
+ &safexcel_alg_authenc_hmac_sha256_cbc_des,
|
|
+ &safexcel_alg_authenc_hmac_sha224_cbc_des,
|
|
+ &safexcel_alg_authenc_hmac_sha512_cbc_des,
|
|
+ &safexcel_alg_authenc_hmac_sha384_cbc_des,
|
|
+ &safexcel_alg_rfc4106_gcm,
|
|
+ &safexcel_alg_rfc4543_gcm,
|
|
+ &safexcel_alg_rfc4309_ccm,
|
|
};
|
|
|
|
static int safexcel_register_algorithms(struct safexcel_crypto_priv *priv)
|
|
@@ -1240,30 +1323,30 @@ static void safexcel_unregister_algorith
|
|
|
|
static void safexcel_configure(struct safexcel_crypto_priv *priv)
|
|
{
|
|
- u32 val, mask = 0;
|
|
-
|
|
- val = readl(EIP197_HIA_AIC_G(priv) + EIP197_HIA_OPTIONS);
|
|
-
|
|
- /* Read number of PEs from the engine */
|
|
- if (priv->flags & SAFEXCEL_HW_EIP197)
|
|
- /* Wider field width for all EIP197 type engines */
|
|
- mask = EIP197_N_PES_MASK;
|
|
- else
|
|
- /* Narrow field width for EIP97 type engine */
|
|
- mask = EIP97_N_PES_MASK;
|
|
-
|
|
- priv->config.pes = (val >> EIP197_N_PES_OFFSET) & mask;
|
|
+ u32 mask = BIT(priv->hwconfig.hwdataw) - 1;
|
|
|
|
- priv->config.rings = min_t(u32, val & GENMASK(3, 0), max_rings);
|
|
+ priv->config.pes = priv->hwconfig.hwnumpes;
|
|
+ priv->config.rings = min_t(u32, priv->hwconfig.hwnumrings, max_rings);
|
|
+ /* Cannot currently support more rings than we have ring AICs! */
|
|
+ priv->config.rings = min_t(u32, priv->config.rings,
|
|
+ priv->hwconfig.hwnumraic);
|
|
|
|
- val = (val & GENMASK(27, 25)) >> 25;
|
|
- mask = BIT(val) - 1;
|
|
-
|
|
- priv->config.cd_size = (sizeof(struct safexcel_command_desc) / sizeof(u32));
|
|
+ priv->config.cd_size = EIP197_CD64_FETCH_SIZE;
|
|
priv->config.cd_offset = (priv->config.cd_size + mask) & ~mask;
|
|
+ priv->config.cdsh_offset = (EIP197_MAX_TOKENS + mask) & ~mask;
|
|
|
|
- priv->config.rd_size = (sizeof(struct safexcel_result_desc) / sizeof(u32));
|
|
+ /* res token is behind the descr, but ofs must be rounded to buswdth */
|
|
+ priv->config.res_offset = (EIP197_RD64_FETCH_SIZE + mask) & ~mask;
|
|
+ /* now the size of the descr is this 1st part plus the result struct */
|
|
+ priv->config.rd_size = priv->config.res_offset +
|
|
+ EIP197_RD64_RESULT_SIZE;
|
|
priv->config.rd_offset = (priv->config.rd_size + mask) & ~mask;
|
|
+
|
|
+ /* convert dwords to bytes */
|
|
+ priv->config.cd_offset *= sizeof(u32);
|
|
+ priv->config.cdsh_offset *= sizeof(u32);
|
|
+ priv->config.rd_offset *= sizeof(u32);
|
|
+ priv->config.res_offset *= sizeof(u32);
|
|
}
|
|
|
|
static void safexcel_init_register_offsets(struct safexcel_crypto_priv *priv)
|
|
@@ -1309,7 +1392,7 @@ static int safexcel_probe_generic(void *
|
|
int is_pci_dev)
|
|
{
|
|
struct device *dev = priv->dev;
|
|
- u32 peid, version, mask, val, hiaopt;
|
|
+ u32 peid, version, mask, val, hiaopt, hwopt, peopt;
|
|
int i, ret, hwctg;
|
|
|
|
priv->context_pool = dmam_pool_create("safexcel-context", dev,
|
|
@@ -1371,13 +1454,16 @@ static int safexcel_probe_generic(void *
|
|
*/
|
|
version = readl(EIP197_GLOBAL(priv) + EIP197_VERSION);
|
|
if (((priv->flags & SAFEXCEL_HW_EIP197) &&
|
|
- (EIP197_REG_LO16(version) != EIP197_VERSION_LE)) ||
|
|
+ (EIP197_REG_LO16(version) != EIP197_VERSION_LE) &&
|
|
+ (EIP197_REG_LO16(version) != EIP196_VERSION_LE)) ||
|
|
((!(priv->flags & SAFEXCEL_HW_EIP197) &&
|
|
(EIP197_REG_LO16(version) != EIP97_VERSION_LE)))) {
|
|
/*
|
|
* We did not find the device that matched our initial probing
|
|
* (or our initial probing failed) Report appropriate error.
|
|
*/
|
|
+ dev_err(priv->dev, "Probing for EIP97/EIP19x failed - no such device (read %08x)\n",
|
|
+ version);
|
|
return -ENODEV;
|
|
}
|
|
|
|
@@ -1385,6 +1471,14 @@ static int safexcel_probe_generic(void *
|
|
hwctg = version >> 28;
|
|
peid = version & 255;
|
|
|
|
+ /* Detect EIP206 processing pipe */
|
|
+ version = readl(EIP197_PE(priv) + + EIP197_PE_VERSION(0));
|
|
+ if (EIP197_REG_LO16(version) != EIP206_VERSION_LE) {
|
|
+ dev_err(priv->dev, "EIP%d: EIP206 not detected\n", peid);
|
|
+ return -ENODEV;
|
|
+ }
|
|
+ priv->hwconfig.ppver = EIP197_VERSION_MASK(version);
|
|
+
|
|
/* Detect EIP96 packet engine and version */
|
|
version = readl(EIP197_PE(priv) + EIP197_PE_EIP96_VERSION(0));
|
|
if (EIP197_REG_LO16(version) != EIP96_VERSION_LE) {
|
|
@@ -1393,10 +1487,13 @@ static int safexcel_probe_generic(void *
|
|
}
|
|
priv->hwconfig.pever = EIP197_VERSION_MASK(version);
|
|
|
|
+ hwopt = readl(EIP197_GLOBAL(priv) + EIP197_OPTIONS);
|
|
hiaopt = readl(EIP197_HIA_AIC(priv) + EIP197_HIA_OPTIONS);
|
|
|
|
if (priv->flags & SAFEXCEL_HW_EIP197) {
|
|
/* EIP197 */
|
|
+ peopt = readl(EIP197_PE(priv) + EIP197_PE_OPTIONS(0));
|
|
+
|
|
priv->hwconfig.hwdataw = (hiaopt >> EIP197_HWDATAW_OFFSET) &
|
|
EIP197_HWDATAW_MASK;
|
|
priv->hwconfig.hwcfsize = ((hiaopt >> EIP197_CFSIZE_OFFSET) &
|
|
@@ -1405,6 +1502,19 @@ static int safexcel_probe_generic(void *
|
|
priv->hwconfig.hwrfsize = ((hiaopt >> EIP197_RFSIZE_OFFSET) &
|
|
EIP197_RFSIZE_MASK) +
|
|
EIP197_RFSIZE_ADJUST;
|
|
+ priv->hwconfig.hwnumpes = (hiaopt >> EIP197_N_PES_OFFSET) &
|
|
+ EIP197_N_PES_MASK;
|
|
+ priv->hwconfig.hwnumrings = (hiaopt >> EIP197_N_RINGS_OFFSET) &
|
|
+ EIP197_N_RINGS_MASK;
|
|
+ if (hiaopt & EIP197_HIA_OPT_HAS_PE_ARB)
|
|
+ priv->flags |= EIP197_PE_ARB;
|
|
+ if (EIP206_OPT_ICE_TYPE(peopt) == 1)
|
|
+ priv->flags |= EIP197_ICE;
|
|
+ /* If not a full TRC, then assume simple TRC */
|
|
+ if (!(hwopt & EIP197_OPT_HAS_TRC))
|
|
+ priv->flags |= EIP197_SIMPLE_TRC;
|
|
+ /* EIP197 always has SOME form of TRC */
|
|
+ priv->flags |= EIP197_TRC_CACHE;
|
|
} else {
|
|
/* EIP97 */
|
|
priv->hwconfig.hwdataw = (hiaopt >> EIP197_HWDATAW_OFFSET) &
|
|
@@ -1413,6 +1523,23 @@ static int safexcel_probe_generic(void *
|
|
EIP97_CFSIZE_MASK;
|
|
priv->hwconfig.hwrfsize = (hiaopt >> EIP97_RFSIZE_OFFSET) &
|
|
EIP97_RFSIZE_MASK;
|
|
+ priv->hwconfig.hwnumpes = 1; /* by definition */
|
|
+ priv->hwconfig.hwnumrings = (hiaopt >> EIP197_N_RINGS_OFFSET) &
|
|
+ EIP197_N_RINGS_MASK;
|
|
+ }
|
|
+
|
|
+ /* Scan for ring AIC's */
|
|
+ for (i = 0; i < EIP197_MAX_RING_AIC; i++) {
|
|
+ version = readl(EIP197_HIA_AIC_R(priv) +
|
|
+ EIP197_HIA_AIC_R_VERSION(i));
|
|
+ if (EIP197_REG_LO16(version) != EIP201_VERSION_LE)
|
|
+ break;
|
|
+ }
|
|
+ priv->hwconfig.hwnumraic = i;
|
|
+ /* Low-end EIP196 may not have any ring AIC's ... */
|
|
+ if (!priv->hwconfig.hwnumraic) {
|
|
+ dev_err(priv->dev, "No ring interrupt controller present!\n");
|
|
+ return -ENODEV;
|
|
}
|
|
|
|
/* Get supported algorithms from EIP96 transform engine */
|
|
@@ -1420,10 +1547,12 @@ static int safexcel_probe_generic(void *
|
|
EIP197_PE_EIP96_OPTIONS(0));
|
|
|
|
/* Print single info line describing what we just detected */
|
|
- dev_info(priv->dev, "EIP%d:%x(%d)-HIA:%x(%d,%d,%d),PE:%x,alg:%08x\n",
|
|
- peid, priv->hwconfig.hwver, hwctg, priv->hwconfig.hiaver,
|
|
- priv->hwconfig.hwdataw, priv->hwconfig.hwcfsize,
|
|
- priv->hwconfig.hwrfsize, priv->hwconfig.pever,
|
|
+ dev_info(priv->dev, "EIP%d:%x(%d,%d,%d,%d)-HIA:%x(%d,%d,%d),PE:%x/%x,alg:%08x\n",
|
|
+ peid, priv->hwconfig.hwver, hwctg, priv->hwconfig.hwnumpes,
|
|
+ priv->hwconfig.hwnumrings, priv->hwconfig.hwnumraic,
|
|
+ priv->hwconfig.hiaver, priv->hwconfig.hwdataw,
|
|
+ priv->hwconfig.hwcfsize, priv->hwconfig.hwrfsize,
|
|
+ priv->hwconfig.ppver, priv->hwconfig.pever,
|
|
priv->hwconfig.algo_flags);
|
|
|
|
safexcel_configure(priv);
|
|
@@ -1547,7 +1676,6 @@ static void safexcel_hw_reset_rings(stru
|
|
}
|
|
}
|
|
|
|
-#if IS_ENABLED(CONFIG_OF)
|
|
/* for Device Tree platform driver */
|
|
|
|
static int safexcel_probe(struct platform_device *pdev)
|
|
@@ -1625,6 +1753,7 @@ static int safexcel_remove(struct platfo
|
|
safexcel_unregister_algorithms(priv);
|
|
safexcel_hw_reset_rings(priv);
|
|
|
|
+ clk_disable_unprepare(priv->reg_clk);
|
|
clk_disable_unprepare(priv->clk);
|
|
|
|
for (i = 0; i < priv->config.rings; i++)
|
|
@@ -1666,9 +1795,7 @@ static struct platform_driver crypto_sa
|
|
.of_match_table = safexcel_of_match_table,
|
|
},
|
|
};
|
|
-#endif
|
|
|
|
-#if IS_ENABLED(CONFIG_PCI)
|
|
/* PCIE devices - i.e. Inside Secure development boards */
|
|
|
|
static int safexcel_pci_probe(struct pci_dev *pdev,
|
|
@@ -1759,7 +1886,7 @@ static int safexcel_pci_probe(struct pci
|
|
return rc;
|
|
}
|
|
|
|
-void safexcel_pci_remove(struct pci_dev *pdev)
|
|
+static void safexcel_pci_remove(struct pci_dev *pdev)
|
|
{
|
|
struct safexcel_crypto_priv *priv = pci_get_drvdata(pdev);
|
|
int i;
|
|
@@ -1789,54 +1916,32 @@ static struct pci_driver safexcel_pci_dr
|
|
.probe = safexcel_pci_probe,
|
|
.remove = safexcel_pci_remove,
|
|
};
|
|
-#endif
|
|
-
|
|
-/* Unfortunately, we have to resort to global variables here */
|
|
-#if IS_ENABLED(CONFIG_PCI)
|
|
-int pcireg_rc = -EINVAL; /* Default safe value */
|
|
-#endif
|
|
-#if IS_ENABLED(CONFIG_OF)
|
|
-int ofreg_rc = -EINVAL; /* Default safe value */
|
|
-#endif
|
|
|
|
static int __init safexcel_init(void)
|
|
{
|
|
-#if IS_ENABLED(CONFIG_PCI)
|
|
+ int ret;
|
|
+
|
|
/* Register PCI driver */
|
|
- pcireg_rc = pci_register_driver(&safexcel_pci_driver);
|
|
-#endif
|
|
+ ret = pci_register_driver(&safexcel_pci_driver);
|
|
|
|
-#if IS_ENABLED(CONFIG_OF)
|
|
/* Register platform driver */
|
|
- ofreg_rc = platform_driver_register(&crypto_safexcel);
|
|
- #if IS_ENABLED(CONFIG_PCI)
|
|
- /* Return success if either PCI or OF registered OK */
|
|
- return pcireg_rc ? ofreg_rc : 0;
|
|
- #else
|
|
- return ofreg_rc;
|
|
- #endif
|
|
-#else
|
|
- #if IS_ENABLED(CONFIG_PCI)
|
|
- return pcireg_rc;
|
|
- #else
|
|
- return -EINVAL;
|
|
- #endif
|
|
-#endif
|
|
+ if (IS_ENABLED(CONFIG_OF) && !ret) {
|
|
+ ret = platform_driver_register(&crypto_safexcel);
|
|
+ if (ret)
|
|
+ pci_unregister_driver(&safexcel_pci_driver);
|
|
+ }
|
|
+
|
|
+ return ret;
|
|
}
|
|
|
|
static void __exit safexcel_exit(void)
|
|
{
|
|
-#if IS_ENABLED(CONFIG_OF)
|
|
/* Unregister platform driver */
|
|
- if (!ofreg_rc)
|
|
+ if (IS_ENABLED(CONFIG_OF))
|
|
platform_driver_unregister(&crypto_safexcel);
|
|
-#endif
|
|
|
|
-#if IS_ENABLED(CONFIG_PCI)
|
|
/* Unregister PCI driver if successfully registered before */
|
|
- if (!pcireg_rc)
|
|
- pci_unregister_driver(&safexcel_pci_driver);
|
|
-#endif
|
|
+ pci_unregister_driver(&safexcel_pci_driver);
|
|
}
|
|
|
|
module_init(safexcel_init);
|
|
--- a/drivers/crypto/inside-secure/safexcel_cipher.c
|
|
+++ b/drivers/crypto/inside-secure/safexcel_cipher.c
|
|
@@ -5,18 +5,22 @@
|
|
* Antoine Tenart <antoine.tenart@free-electrons.com>
|
|
*/
|
|
|
|
+#include <asm/unaligned.h>
|
|
#include <linux/device.h>
|
|
#include <linux/dma-mapping.h>
|
|
#include <linux/dmapool.h>
|
|
-
|
|
#include <crypto/aead.h>
|
|
#include <crypto/aes.h>
|
|
#include <crypto/authenc.h>
|
|
+#include <crypto/chacha.h>
|
|
#include <crypto/ctr.h>
|
|
#include <crypto/internal/des.h>
|
|
#include <crypto/gcm.h>
|
|
#include <crypto/ghash.h>
|
|
+#include <crypto/poly1305.h>
|
|
#include <crypto/sha.h>
|
|
+#include <crypto/sm3.h>
|
|
+#include <crypto/sm4.h>
|
|
#include <crypto/xts.h>
|
|
#include <crypto/skcipher.h>
|
|
#include <crypto/internal/aead.h>
|
|
@@ -33,6 +37,8 @@ enum safexcel_cipher_alg {
|
|
SAFEXCEL_DES,
|
|
SAFEXCEL_3DES,
|
|
SAFEXCEL_AES,
|
|
+ SAFEXCEL_CHACHA20,
|
|
+ SAFEXCEL_SM4,
|
|
};
|
|
|
|
struct safexcel_cipher_ctx {
|
|
@@ -41,8 +47,12 @@ struct safexcel_cipher_ctx {
|
|
|
|
u32 mode;
|
|
enum safexcel_cipher_alg alg;
|
|
- bool aead;
|
|
- int xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
|
|
+ u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
|
|
+ u8 xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
|
|
+ u8 aadskip;
|
|
+ u8 blocksz;
|
|
+ u32 ivmask;
|
|
+ u32 ctrinit;
|
|
|
|
__le32 key[16];
|
|
u32 nonce;
|
|
@@ -51,10 +61,11 @@ struct safexcel_cipher_ctx {
|
|
/* All the below is AEAD specific */
|
|
u32 hash_alg;
|
|
u32 state_sz;
|
|
- u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
|
|
- u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
|
|
+ __be32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
|
|
+ __be32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
|
|
|
|
struct crypto_cipher *hkaes;
|
|
+ struct crypto_aead *fback;
|
|
};
|
|
|
|
struct safexcel_cipher_req {
|
|
@@ -65,206 +76,298 @@ struct safexcel_cipher_req {
|
|
int nr_src, nr_dst;
|
|
};
|
|
|
|
-static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
|
|
- struct safexcel_command_desc *cdesc)
|
|
+static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
|
|
+ struct safexcel_command_desc *cdesc)
|
|
{
|
|
- u32 block_sz = 0;
|
|
-
|
|
if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
|
|
cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
|
|
-
|
|
/* 32 bit nonce */
|
|
cdesc->control_data.token[0] = ctx->nonce;
|
|
/* 64 bit IV part */
|
|
memcpy(&cdesc->control_data.token[1], iv, 8);
|
|
- /* 32 bit counter, start at 1 (big endian!) */
|
|
- cdesc->control_data.token[3] = cpu_to_be32(1);
|
|
-
|
|
- return;
|
|
- } else if (ctx->xcm == EIP197_XCM_MODE_GCM) {
|
|
- cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
|
|
-
|
|
- /* 96 bit IV part */
|
|
- memcpy(&cdesc->control_data.token[0], iv, 12);
|
|
- /* 32 bit counter, start at 1 (big endian!) */
|
|
- cdesc->control_data.token[3] = cpu_to_be32(1);
|
|
-
|
|
- return;
|
|
- } else if (ctx->xcm == EIP197_XCM_MODE_CCM) {
|
|
+ /* 32 bit counter, start at 0 or 1 (big endian!) */
|
|
+ cdesc->control_data.token[3] =
|
|
+ (__force u32)cpu_to_be32(ctx->ctrinit);
|
|
+ return 4;
|
|
+ }
|
|
+ if (ctx->alg == SAFEXCEL_CHACHA20) {
|
|
cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
|
|
-
|
|
- /* Variable length IV part */
|
|
- memcpy(&cdesc->control_data.token[0], iv, 15 - iv[0]);
|
|
- /* Start variable length counter at 0 */
|
|
- memset((u8 *)&cdesc->control_data.token[0] + 15 - iv[0],
|
|
- 0, iv[0] + 1);
|
|
-
|
|
- return;
|
|
+ /* 96 bit nonce part */
|
|
+ memcpy(&cdesc->control_data.token[0], &iv[4], 12);
|
|
+ /* 32 bit counter */
|
|
+ cdesc->control_data.token[3] = *(u32 *)iv;
|
|
+ return 4;
|
|
}
|
|
|
|
- if (ctx->mode != CONTEXT_CONTROL_CRYPTO_MODE_ECB) {
|
|
- switch (ctx->alg) {
|
|
- case SAFEXCEL_DES:
|
|
- block_sz = DES_BLOCK_SIZE;
|
|
- cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
- break;
|
|
- case SAFEXCEL_3DES:
|
|
- block_sz = DES3_EDE_BLOCK_SIZE;
|
|
- cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
- break;
|
|
- case SAFEXCEL_AES:
|
|
- block_sz = AES_BLOCK_SIZE;
|
|
- cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
|
|
- break;
|
|
- }
|
|
- memcpy(cdesc->control_data.token, iv, block_sz);
|
|
- }
|
|
+ cdesc->control_data.options |= ctx->ivmask;
|
|
+ memcpy(cdesc->control_data.token, iv, ctx->blocksz);
|
|
+ return ctx->blocksz / sizeof(u32);
|
|
}
|
|
|
|
static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
|
|
struct safexcel_command_desc *cdesc,
|
|
+ struct safexcel_token *atoken,
|
|
u32 length)
|
|
{
|
|
struct safexcel_token *token;
|
|
+ int ivlen;
|
|
|
|
- safexcel_cipher_token(ctx, iv, cdesc);
|
|
-
|
|
- /* skip over worst case IV of 4 dwords, no need to be exact */
|
|
- token = (struct safexcel_token *)(cdesc->control_data.token + 4);
|
|
+ ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
|
|
+ if (ivlen == 4) {
|
|
+ /* No space in cdesc, instruction moves to atoken */
|
|
+ cdesc->additional_cdata_size = 1;
|
|
+ token = atoken;
|
|
+ } else {
|
|
+ /* Everything fits in cdesc */
|
|
+ token = (struct safexcel_token *)(cdesc->control_data.token + 2);
|
|
+ /* Need to pad with NOP */
|
|
+ eip197_noop_token(&token[1]);
|
|
+ }
|
|
+
|
|
+ token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
|
|
+ token->packet_length = length;
|
|
+ token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
|
|
+ EIP197_TOKEN_STAT_LAST_HASH;
|
|
+ token->instructions = EIP197_TOKEN_INS_LAST |
|
|
+ EIP197_TOKEN_INS_TYPE_CRYPTO |
|
|
+ EIP197_TOKEN_INS_TYPE_OUTPUT;
|
|
+}
|
|
|
|
- token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
|
|
- token[0].packet_length = length;
|
|
- token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
|
|
- EIP197_TOKEN_STAT_LAST_HASH;
|
|
- token[0].instructions = EIP197_TOKEN_INS_LAST |
|
|
- EIP197_TOKEN_INS_TYPE_CRYPTO |
|
|
- EIP197_TOKEN_INS_TYPE_OUTPUT;
|
|
+static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
|
|
+ struct safexcel_command_desc *cdesc)
|
|
+{
|
|
+ if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
|
|
+ ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
|
|
+ /* 32 bit nonce */
|
|
+ cdesc->control_data.token[0] = ctx->nonce;
|
|
+ /* 64 bit IV part */
|
|
+ memcpy(&cdesc->control_data.token[1], iv, 8);
|
|
+ /* 32 bit counter, start at 0 or 1 (big endian!) */
|
|
+ cdesc->control_data.token[3] =
|
|
+ (__force u32)cpu_to_be32(ctx->ctrinit);
|
|
+ return;
|
|
+ }
|
|
+ if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
|
|
+ /* 96 bit IV part */
|
|
+ memcpy(&cdesc->control_data.token[0], iv, 12);
|
|
+ /* 32 bit counter, start at 0 or 1 (big endian!) */
|
|
+ cdesc->control_data.token[3] =
|
|
+ (__force u32)cpu_to_be32(ctx->ctrinit);
|
|
+ return;
|
|
+ }
|
|
+ /* CBC */
|
|
+ memcpy(cdesc->control_data.token, iv, ctx->blocksz);
|
|
}
|
|
|
|
static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
|
|
struct safexcel_command_desc *cdesc,
|
|
+ struct safexcel_token *atoken,
|
|
enum safexcel_cipher_direction direction,
|
|
u32 cryptlen, u32 assoclen, u32 digestsize)
|
|
{
|
|
- struct safexcel_token *token;
|
|
+ struct safexcel_token *aadref;
|
|
+ int atoksize = 2; /* Start with minimum size */
|
|
+ int assocadj = assoclen - ctx->aadskip, aadalign;
|
|
|
|
- safexcel_cipher_token(ctx, iv, cdesc);
|
|
+ /* Always 4 dwords of embedded IV for AEAD modes */
|
|
+ cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
|
|
|
|
- if (direction == SAFEXCEL_ENCRYPT) {
|
|
- /* align end of instruction sequence to end of token */
|
|
- token = (struct safexcel_token *)(cdesc->control_data.token +
|
|
- EIP197_MAX_TOKENS - 13);
|
|
-
|
|
- token[12].opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
- token[12].packet_length = digestsize;
|
|
- token[12].stat = EIP197_TOKEN_STAT_LAST_HASH |
|
|
- EIP197_TOKEN_STAT_LAST_PACKET;
|
|
- token[12].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
|
|
- EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
|
|
- } else {
|
|
+ if (direction == SAFEXCEL_DECRYPT)
|
|
cryptlen -= digestsize;
|
|
|
|
- /* align end of instruction sequence to end of token */
|
|
- token = (struct safexcel_token *)(cdesc->control_data.token +
|
|
- EIP197_MAX_TOKENS - 14);
|
|
-
|
|
- token[12].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
|
|
- token[12].packet_length = digestsize;
|
|
- token[12].stat = EIP197_TOKEN_STAT_LAST_HASH |
|
|
- EIP197_TOKEN_STAT_LAST_PACKET;
|
|
- token[12].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
|
|
-
|
|
- token[13].opcode = EIP197_TOKEN_OPCODE_VERIFY;
|
|
- token[13].packet_length = digestsize |
|
|
- EIP197_TOKEN_HASH_RESULT_VERIFY;
|
|
- token[13].stat = EIP197_TOKEN_STAT_LAST_HASH |
|
|
- EIP197_TOKEN_STAT_LAST_PACKET;
|
|
- token[13].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
|
|
- }
|
|
-
|
|
- token[6].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
|
|
- token[6].packet_length = assoclen;
|
|
-
|
|
- if (likely(cryptlen)) {
|
|
- token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
|
|
-
|
|
- token[10].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
|
|
- token[10].packet_length = cryptlen;
|
|
- token[10].stat = EIP197_TOKEN_STAT_LAST_HASH;
|
|
- token[10].instructions = EIP197_TOKEN_INS_LAST |
|
|
- EIP197_TOKEN_INS_TYPE_CRYPTO |
|
|
- EIP197_TOKEN_INS_TYPE_HASH |
|
|
- EIP197_TOKEN_INS_TYPE_OUTPUT;
|
|
- } else if (ctx->xcm != EIP197_XCM_MODE_CCM) {
|
|
- token[6].stat = EIP197_TOKEN_STAT_LAST_HASH;
|
|
- token[6].instructions = EIP197_TOKEN_INS_LAST |
|
|
- EIP197_TOKEN_INS_TYPE_HASH;
|
|
- }
|
|
-
|
|
- if (!ctx->xcm)
|
|
- return;
|
|
-
|
|
- token[8].opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
|
|
- token[8].packet_length = 0;
|
|
- token[8].instructions = AES_BLOCK_SIZE;
|
|
-
|
|
- token[9].opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
- token[9].packet_length = AES_BLOCK_SIZE;
|
|
- token[9].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
|
|
- EIP197_TOKEN_INS_TYPE_CRYPTO;
|
|
-
|
|
- if (ctx->xcm == EIP197_XCM_MODE_GCM) {
|
|
- token[6].instructions = EIP197_TOKEN_INS_LAST |
|
|
- EIP197_TOKEN_INS_TYPE_HASH;
|
|
- } else {
|
|
- u8 *cbcmaciv = (u8 *)&token[1];
|
|
- u32 *aadlen = (u32 *)&token[5];
|
|
-
|
|
+ if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
|
|
/* Construct IV block B0 for the CBC-MAC */
|
|
- token[0].opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
- token[0].packet_length = AES_BLOCK_SIZE +
|
|
- ((assoclen > 0) << 1);
|
|
- token[0].instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
|
|
- EIP197_TOKEN_INS_TYPE_HASH;
|
|
- /* Variable length IV part */
|
|
- memcpy(cbcmaciv, iv, 15 - iv[0]);
|
|
- /* fixup flags byte */
|
|
- cbcmaciv[0] |= ((assoclen > 0) << 6) | ((digestsize - 2) << 2);
|
|
- /* Clear upper bytes of variable message length to 0 */
|
|
- memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
|
|
- /* insert lower 2 bytes of message length */
|
|
- cbcmaciv[14] = cryptlen >> 8;
|
|
- cbcmaciv[15] = cryptlen & 255;
|
|
-
|
|
- if (assoclen) {
|
|
- *aadlen = cpu_to_le32(cpu_to_be16(assoclen));
|
|
- assoclen += 2;
|
|
+ u8 *final_iv = (u8 *)cdesc->control_data.token;
|
|
+ u8 *cbcmaciv = (u8 *)&atoken[1];
|
|
+ __le32 *aadlen = (__le32 *)&atoken[5];
|
|
+
|
|
+ if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
|
|
+ /* Length + nonce */
|
|
+ cdesc->control_data.token[0] = ctx->nonce;
|
|
+ /* Fixup flags byte */
|
|
+ *(__le32 *)cbcmaciv =
|
|
+ cpu_to_le32(ctx->nonce |
|
|
+ ((assocadj > 0) << 6) |
|
|
+ ((digestsize - 2) << 2));
|
|
+ /* 64 bit IV part */
|
|
+ memcpy(&cdesc->control_data.token[1], iv, 8);
|
|
+ memcpy(cbcmaciv + 4, iv, 8);
|
|
+ /* Start counter at 0 */
|
|
+ cdesc->control_data.token[3] = 0;
|
|
+ /* Message length */
|
|
+ *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
|
|
+ } else {
|
|
+ /* Variable length IV part */
|
|
+ memcpy(final_iv, iv, 15 - iv[0]);
|
|
+ memcpy(cbcmaciv, iv, 15 - iv[0]);
|
|
+ /* Start variable length counter at 0 */
|
|
+ memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
|
|
+ memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
|
|
+ /* fixup flags byte */
|
|
+ cbcmaciv[0] |= ((assocadj > 0) << 6) |
|
|
+ ((digestsize - 2) << 2);
|
|
+ /* insert lower 2 bytes of message length */
|
|
+ cbcmaciv[14] = cryptlen >> 8;
|
|
+ cbcmaciv[15] = cryptlen & 255;
|
|
+ }
|
|
+
|
|
+ atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
+ atoken->packet_length = AES_BLOCK_SIZE +
|
|
+ ((assocadj > 0) << 1);
|
|
+ atoken->stat = 0;
|
|
+ atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
|
|
+ EIP197_TOKEN_INS_TYPE_HASH;
|
|
+
|
|
+ if (likely(assocadj)) {
|
|
+ *aadlen = cpu_to_le32((assocadj >> 8) |
|
|
+ (assocadj & 255) << 8);
|
|
+ atoken += 6;
|
|
+ atoksize += 7;
|
|
+ } else {
|
|
+ atoken += 5;
|
|
+ atoksize += 6;
|
|
}
|
|
|
|
- token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
|
|
-
|
|
- /* Align AAD data towards hash engine */
|
|
- token[7].opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
- assoclen &= 15;
|
|
- token[7].packet_length = assoclen ? 16 - assoclen : 0;
|
|
-
|
|
+ /* Process AAD data */
|
|
+ aadref = atoken;
|
|
+ atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
|
|
+ atoken->packet_length = assocadj;
|
|
+ atoken->stat = 0;
|
|
+ atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
|
|
+ atoken++;
|
|
+
|
|
+ /* For CCM only, align AAD data towards hash engine */
|
|
+ atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
+ aadalign = (assocadj + 2) & 15;
|
|
+ atoken->packet_length = assocadj && aadalign ?
|
|
+ 16 - aadalign :
|
|
+ 0;
|
|
if (likely(cryptlen)) {
|
|
- token[7].instructions = EIP197_TOKEN_INS_TYPE_HASH;
|
|
-
|
|
- /* Align crypto data towards hash engine */
|
|
- token[10].stat = 0;
|
|
+ atoken->stat = 0;
|
|
+ atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
|
|
+ } else {
|
|
+ atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
|
|
+ atoken->instructions = EIP197_TOKEN_INS_LAST |
|
|
+ EIP197_TOKEN_INS_TYPE_HASH;
|
|
+ }
|
|
+ } else {
|
|
+ safexcel_aead_iv(ctx, iv, cdesc);
|
|
|
|
- token[11].opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
- cryptlen &= 15;
|
|
- token[11].packet_length = cryptlen ? 16 - cryptlen : 0;
|
|
- token[11].stat = EIP197_TOKEN_STAT_LAST_HASH;
|
|
- token[11].instructions = EIP197_TOKEN_INS_TYPE_HASH;
|
|
+ /* Process AAD data */
|
|
+ aadref = atoken;
|
|
+ atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
|
|
+ atoken->packet_length = assocadj;
|
|
+ atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
|
|
+ atoken->instructions = EIP197_TOKEN_INS_LAST |
|
|
+ EIP197_TOKEN_INS_TYPE_HASH;
|
|
+ }
|
|
+ atoken++;
|
|
+
|
|
+ if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
|
|
+ /* For ESP mode (and not GMAC), skip over the IV */
|
|
+ atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
|
|
+ atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
|
|
+ atoken->stat = 0;
|
|
+ atoken->instructions = 0;
|
|
+ atoken++;
|
|
+ atoksize++;
|
|
+ } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
|
|
+ direction == SAFEXCEL_DECRYPT)) {
|
|
+ /* Poly-chacha decryption needs a dummy NOP here ... */
|
|
+ atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
+ atoken->packet_length = 16; /* According to Op Manual */
|
|
+ atoken->stat = 0;
|
|
+ atoken->instructions = 0;
|
|
+ atoken++;
|
|
+ atoksize++;
|
|
+ }
|
|
+
|
|
+ if (ctx->xcm) {
|
|
+ /* For GCM and CCM, obtain enc(Y0) */
|
|
+ atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
|
|
+ atoken->packet_length = 0;
|
|
+ atoken->stat = 0;
|
|
+ atoken->instructions = AES_BLOCK_SIZE;
|
|
+ atoken++;
|
|
+
|
|
+ atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
+ atoken->packet_length = AES_BLOCK_SIZE;
|
|
+ atoken->stat = 0;
|
|
+ atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
|
|
+ EIP197_TOKEN_INS_TYPE_CRYPTO;
|
|
+ atoken++;
|
|
+ atoksize += 2;
|
|
+ }
|
|
+
|
|
+ if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
|
|
+ /* Fixup stat field for AAD direction instruction */
|
|
+ aadref->stat = 0;
|
|
+
|
|
+ /* Process crypto data */
|
|
+ atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
|
|
+ atoken->packet_length = cryptlen;
|
|
+
|
|
+ if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
|
|
+ /* Fixup instruction field for AAD dir instruction */
|
|
+ aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
|
|
+
|
|
+ /* Do not send to crypt engine in case of GMAC */
|
|
+ atoken->instructions = EIP197_TOKEN_INS_LAST |
|
|
+ EIP197_TOKEN_INS_TYPE_HASH |
|
|
+ EIP197_TOKEN_INS_TYPE_OUTPUT;
|
|
+ } else {
|
|
+ atoken->instructions = EIP197_TOKEN_INS_LAST |
|
|
+ EIP197_TOKEN_INS_TYPE_CRYPTO |
|
|
+ EIP197_TOKEN_INS_TYPE_HASH |
|
|
+ EIP197_TOKEN_INS_TYPE_OUTPUT;
|
|
+ }
|
|
+
|
|
+ cryptlen &= 15;
|
|
+ if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
|
|
+ atoken->stat = 0;
|
|
+ /* For CCM only, pad crypto data to the hash engine */
|
|
+ atoken++;
|
|
+ atoksize++;
|
|
+ atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
+ atoken->packet_length = 16 - cryptlen;
|
|
+ atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
|
|
+ atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
|
|
} else {
|
|
- token[7].stat = EIP197_TOKEN_STAT_LAST_HASH;
|
|
- token[7].instructions = EIP197_TOKEN_INS_LAST |
|
|
- EIP197_TOKEN_INS_TYPE_HASH;
|
|
+ atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
|
|
}
|
|
+ atoken++;
|
|
+ atoksize++;
|
|
}
|
|
+
|
|
+ if (direction == SAFEXCEL_ENCRYPT) {
|
|
+ /* Append ICV */
|
|
+ atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
+ atoken->packet_length = digestsize;
|
|
+ atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
|
|
+ EIP197_TOKEN_STAT_LAST_PACKET;
|
|
+ atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
|
|
+ EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
|
|
+ } else {
|
|
+ /* Extract ICV */
|
|
+ atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
|
|
+ atoken->packet_length = digestsize;
|
|
+ atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
|
|
+ EIP197_TOKEN_STAT_LAST_PACKET;
|
|
+ atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
|
|
+ atoken++;
|
|
+ atoksize++;
|
|
+
|
|
+ /* Verify ICV */
|
|
+ atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
|
|
+ atoken->packet_length = digestsize |
|
|
+ EIP197_TOKEN_HASH_RESULT_VERIFY;
|
|
+ atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
|
|
+ EIP197_TOKEN_STAT_LAST_PACKET;
|
|
+ atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
|
|
+ }
|
|
+
|
|
+ /* Fixup length of the token in the command descriptor */
|
|
+ cdesc->additional_cdata_size = atoksize;
|
|
}
|
|
|
|
static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
|
|
@@ -277,14 +380,12 @@ static int safexcel_skcipher_aes_setkey(
|
|
int ret, i;
|
|
|
|
ret = aes_expandkey(&aes, key, len);
|
|
- if (ret) {
|
|
- crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
+ if (ret)
|
|
return ret;
|
|
- }
|
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
|
|
for (i = 0; i < len / sizeof(u32); i++) {
|
|
- if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
|
|
+ if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
|
|
ctx->base.needs_inv = true;
|
|
break;
|
|
}
|
|
@@ -309,43 +410,57 @@ static int safexcel_aead_setkey(struct c
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
|
struct crypto_authenc_keys keys;
|
|
struct crypto_aes_ctx aes;
|
|
- int err = -EINVAL;
|
|
+ int err = -EINVAL, i;
|
|
|
|
- if (crypto_authenc_extractkeys(&keys, key, len) != 0)
|
|
+ if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
|
|
goto badkey;
|
|
|
|
if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
|
|
- /* Minimum keysize is minimum AES key size + nonce size */
|
|
- if (keys.enckeylen < (AES_MIN_KEY_SIZE +
|
|
- CTR_RFC3686_NONCE_SIZE))
|
|
+ /* Must have at least space for the nonce here */
|
|
+ if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
|
|
goto badkey;
|
|
/* last 4 bytes of key are the nonce! */
|
|
ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
|
|
CTR_RFC3686_NONCE_SIZE);
|
|
/* exclude the nonce here */
|
|
- keys.enckeylen -= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
|
|
+ keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
|
|
}
|
|
|
|
/* Encryption key */
|
|
switch (ctx->alg) {
|
|
+ case SAFEXCEL_DES:
|
|
+ err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
|
|
+ if (unlikely(err))
|
|
+ goto badkey;
|
|
+ break;
|
|
case SAFEXCEL_3DES:
|
|
err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
|
|
if (unlikely(err))
|
|
- goto badkey_expflags;
|
|
+ goto badkey;
|
|
break;
|
|
case SAFEXCEL_AES:
|
|
err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
|
|
if (unlikely(err))
|
|
goto badkey;
|
|
break;
|
|
+ case SAFEXCEL_SM4:
|
|
+ if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
|
|
+ goto badkey;
|
|
+ break;
|
|
default:
|
|
dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
|
|
goto badkey;
|
|
}
|
|
|
|
- if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
|
|
- memcmp(ctx->key, keys.enckey, keys.enckeylen))
|
|
- ctx->base.needs_inv = true;
|
|
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
|
|
+ for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
|
|
+ if (le32_to_cpu(ctx->key[i]) !=
|
|
+ ((u32 *)keys.enckey)[i]) {
|
|
+ ctx->base.needs_inv = true;
|
|
+ break;
|
|
+ }
|
|
+ }
|
|
+ }
|
|
|
|
/* Auth key */
|
|
switch (ctx->hash_alg) {
|
|
@@ -374,21 +489,24 @@ static int safexcel_aead_setkey(struct c
|
|
keys.authkeylen, &istate, &ostate))
|
|
goto badkey;
|
|
break;
|
|
+ case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
|
|
+ if (safexcel_hmac_setkey("safexcel-sm3", keys.authkey,
|
|
+ keys.authkeylen, &istate, &ostate))
|
|
+ goto badkey;
|
|
+ break;
|
|
default:
|
|
dev_err(priv->dev, "aead: unsupported hash algorithm\n");
|
|
goto badkey;
|
|
}
|
|
|
|
- crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
|
|
- CRYPTO_TFM_RES_MASK);
|
|
-
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
|
|
(memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
|
|
memcmp(ctx->opad, ostate.state, ctx->state_sz)))
|
|
ctx->base.needs_inv = true;
|
|
|
|
/* Now copy the keys into the context */
|
|
- memcpy(ctx->key, keys.enckey, keys.enckeylen);
|
|
+ for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
|
|
+ ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
|
|
ctx->key_len = keys.enckeylen;
|
|
|
|
memcpy(ctx->ipad, &istate.state, ctx->state_sz);
|
|
@@ -398,8 +516,6 @@ static int safexcel_aead_setkey(struct c
|
|
return 0;
|
|
|
|
badkey:
|
|
- crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
-badkey_expflags:
|
|
memzero_explicit(&keys, sizeof(keys));
|
|
return err;
|
|
}
|
|
@@ -423,6 +539,17 @@ static int safexcel_context_control(stru
|
|
CONTEXT_CONTROL_DIGEST_XCM |
|
|
ctx->hash_alg |
|
|
CONTEXT_CONTROL_SIZE(ctrl_size);
|
|
+ } else if (ctx->alg == SAFEXCEL_CHACHA20) {
|
|
+ /* Chacha20-Poly1305 */
|
|
+ cdesc->control_data.control0 =
|
|
+ CONTEXT_CONTROL_KEY_EN |
|
|
+ CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
|
|
+ (sreq->direction == SAFEXCEL_ENCRYPT ?
|
|
+ CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
|
|
+ CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
|
|
+ ctx->hash_alg |
|
|
+ CONTEXT_CONTROL_SIZE(ctrl_size);
|
|
+ return 0;
|
|
} else {
|
|
ctrl_size += ctx->state_sz / sizeof(u32) * 2;
|
|
cdesc->control_data.control0 =
|
|
@@ -431,17 +558,21 @@ static int safexcel_context_control(stru
|
|
ctx->hash_alg |
|
|
CONTEXT_CONTROL_SIZE(ctrl_size);
|
|
}
|
|
- if (sreq->direction == SAFEXCEL_ENCRYPT)
|
|
- cdesc->control_data.control0 |=
|
|
- (ctx->xcm == EIP197_XCM_MODE_CCM) ?
|
|
- CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT :
|
|
- CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
|
|
|
|
+ if (sreq->direction == SAFEXCEL_ENCRYPT &&
|
|
+ (ctx->xcm == EIP197_XCM_MODE_CCM ||
|
|
+ ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
|
|
+ cdesc->control_data.control0 |=
|
|
+ CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
|
|
+ else if (sreq->direction == SAFEXCEL_ENCRYPT)
|
|
+ cdesc->control_data.control0 |=
|
|
+ CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
|
|
+ else if (ctx->xcm == EIP197_XCM_MODE_CCM)
|
|
+ cdesc->control_data.control0 |=
|
|
+ CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
|
|
else
|
|
cdesc->control_data.control0 |=
|
|
- (ctx->xcm == EIP197_XCM_MODE_CCM) ?
|
|
- CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN :
|
|
- CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
|
|
+ CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
|
|
} else {
|
|
if (sreq->direction == SAFEXCEL_ENCRYPT)
|
|
cdesc->control_data.control0 =
|
|
@@ -480,6 +611,12 @@ static int safexcel_context_control(stru
|
|
ctx->key_len >> ctx->xts);
|
|
return -EINVAL;
|
|
}
|
|
+ } else if (ctx->alg == SAFEXCEL_CHACHA20) {
|
|
+ cdesc->control_data.control0 |=
|
|
+ CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
|
|
+ } else if (ctx->alg == SAFEXCEL_SM4) {
|
|
+ cdesc->control_data.control0 |=
|
|
+ CONTEXT_CONTROL_CRYPTO_ALG_SM4;
|
|
}
|
|
|
|
return 0;
|
|
@@ -563,6 +700,7 @@ static int safexcel_send_req(struct cryp
|
|
unsigned int totlen;
|
|
unsigned int totlen_src = cryptlen + assoclen;
|
|
unsigned int totlen_dst = totlen_src;
|
|
+ struct safexcel_token *atoken;
|
|
int n_cdesc = 0, n_rdesc = 0;
|
|
int queued, i, ret = 0;
|
|
bool first = true;
|
|
@@ -637,56 +775,60 @@ static int safexcel_send_req(struct cryp
|
|
|
|
memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
|
|
|
|
- /* The EIP cannot deal with zero length input packets! */
|
|
- if (totlen == 0)
|
|
- totlen = 1;
|
|
+ if (!totlen) {
|
|
+ /*
|
|
+ * The EIP97 cannot deal with zero length input packets!
|
|
+ * So stuff a dummy command descriptor indicating a 1 byte
|
|
+ * (dummy) input packet, using the context record as source.
|
|
+ */
|
|
+ first_cdesc = safexcel_add_cdesc(priv, ring,
|
|
+ 1, 1, ctx->base.ctxr_dma,
|
|
+ 1, 1, ctx->base.ctxr_dma,
|
|
+ &atoken);
|
|
+ if (IS_ERR(first_cdesc)) {
|
|
+ /* No space left in the command descriptor ring */
|
|
+ ret = PTR_ERR(first_cdesc);
|
|
+ goto cdesc_rollback;
|
|
+ }
|
|
+ n_cdesc = 1;
|
|
+ goto skip_cdesc;
|
|
+ }
|
|
|
|
/* command descriptors */
|
|
for_each_sg(src, sg, sreq->nr_src, i) {
|
|
int len = sg_dma_len(sg);
|
|
|
|
/* Do not overflow the request */
|
|
- if (queued - len < 0)
|
|
+ if (queued < len)
|
|
len = queued;
|
|
|
|
cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
|
|
!(queued - len),
|
|
sg_dma_address(sg), len, totlen,
|
|
- ctx->base.ctxr_dma);
|
|
+ ctx->base.ctxr_dma, &atoken);
|
|
if (IS_ERR(cdesc)) {
|
|
/* No space left in the command descriptor ring */
|
|
ret = PTR_ERR(cdesc);
|
|
goto cdesc_rollback;
|
|
}
|
|
- n_cdesc++;
|
|
|
|
- if (n_cdesc == 1) {
|
|
+ if (!n_cdesc)
|
|
first_cdesc = cdesc;
|
|
- }
|
|
|
|
+ n_cdesc++;
|
|
queued -= len;
|
|
if (!queued)
|
|
break;
|
|
}
|
|
-
|
|
- if (unlikely(!n_cdesc)) {
|
|
- /*
|
|
- * Special case: zero length input buffer.
|
|
- * The engine always needs the 1st command descriptor, however!
|
|
- */
|
|
- first_cdesc = safexcel_add_cdesc(priv, ring, 1, 1, 0, 0, totlen,
|
|
- ctx->base.ctxr_dma);
|
|
- n_cdesc = 1;
|
|
- }
|
|
-
|
|
+skip_cdesc:
|
|
/* Add context control words and token to first command descriptor */
|
|
safexcel_context_control(ctx, base, sreq, first_cdesc);
|
|
if (ctx->aead)
|
|
- safexcel_aead_token(ctx, iv, first_cdesc,
|
|
+ safexcel_aead_token(ctx, iv, first_cdesc, atoken,
|
|
sreq->direction, cryptlen,
|
|
assoclen, digestsize);
|
|
else
|
|
- safexcel_skcipher_token(ctx, iv, first_cdesc,
|
|
+ safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
|
|
cryptlen);
|
|
|
|
/* result descriptors */
|
|
@@ -1073,6 +1215,8 @@ static int safexcel_skcipher_cra_init(st
|
|
|
|
ctx->base.send = safexcel_skcipher_send;
|
|
ctx->base.handle_result = safexcel_skcipher_handle_result;
|
|
+ ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
|
|
+ ctx->ctrinit = 1;
|
|
return 0;
|
|
}
|
|
|
|
@@ -1137,6 +1281,8 @@ static int safexcel_skcipher_aes_ecb_cra
|
|
safexcel_skcipher_cra_init(tfm);
|
|
ctx->alg = SAFEXCEL_AES;
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
|
|
+ ctx->blocksz = 0;
|
|
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
return 0;
|
|
}
|
|
|
|
@@ -1171,6 +1317,7 @@ static int safexcel_skcipher_aes_cbc_cra
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
ctx->alg = SAFEXCEL_AES;
|
|
+ ctx->blocksz = AES_BLOCK_SIZE;
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
|
|
return 0;
|
|
}
|
|
@@ -1207,6 +1354,7 @@ static int safexcel_skcipher_aes_cfb_cra
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
ctx->alg = SAFEXCEL_AES;
|
|
+ ctx->blocksz = AES_BLOCK_SIZE;
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
|
|
return 0;
|
|
}
|
|
@@ -1243,6 +1391,7 @@ static int safexcel_skcipher_aes_ofb_cra
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
ctx->alg = SAFEXCEL_AES;
|
|
+ ctx->blocksz = AES_BLOCK_SIZE;
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
|
|
return 0;
|
|
}
|
|
@@ -1288,14 +1437,12 @@ static int safexcel_skcipher_aesctr_setk
|
|
/* exclude the nonce here */
|
|
keylen = len - CTR_RFC3686_NONCE_SIZE;
|
|
ret = aes_expandkey(&aes, key, keylen);
|
|
- if (ret) {
|
|
- crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
+ if (ret)
|
|
return ret;
|
|
- }
|
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
|
|
for (i = 0; i < keylen / sizeof(u32); i++) {
|
|
- if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
|
|
+ if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
|
|
ctx->base.needs_inv = true;
|
|
break;
|
|
}
|
|
@@ -1317,6 +1464,7 @@ static int safexcel_skcipher_aes_ctr_cra
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
ctx->alg = SAFEXCEL_AES;
|
|
+ ctx->blocksz = AES_BLOCK_SIZE;
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
|
|
return 0;
|
|
}
|
|
@@ -1352,6 +1500,7 @@ static int safexcel_des_setkey(struct cr
|
|
unsigned int len)
|
|
{
|
|
struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
|
|
+ struct safexcel_crypto_priv *priv = ctx->priv;
|
|
int ret;
|
|
|
|
ret = verify_skcipher_des_key(ctfm, key);
|
|
@@ -1359,7 +1508,7 @@ static int safexcel_des_setkey(struct cr
|
|
return ret;
|
|
|
|
/* if context exits and key changed, need to invalidate it */
|
|
- if (ctx->base.ctxr_dma)
|
|
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
|
|
if (memcmp(ctx->key, key, len))
|
|
ctx->base.needs_inv = true;
|
|
|
|
@@ -1375,6 +1524,8 @@ static int safexcel_skcipher_des_cbc_cra
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
ctx->alg = SAFEXCEL_DES;
|
|
+ ctx->blocksz = DES_BLOCK_SIZE;
|
|
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
|
|
return 0;
|
|
}
|
|
@@ -1412,6 +1563,8 @@ static int safexcel_skcipher_des_ecb_cra
|
|
safexcel_skcipher_cra_init(tfm);
|
|
ctx->alg = SAFEXCEL_DES;
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
|
|
+ ctx->blocksz = 0;
|
|
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
return 0;
|
|
}
|
|
|
|
@@ -1444,6 +1597,7 @@ static int safexcel_des3_ede_setkey(stru
|
|
const u8 *key, unsigned int len)
|
|
{
|
|
struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
|
|
+ struct safexcel_crypto_priv *priv = ctx->priv;
|
|
int err;
|
|
|
|
err = verify_skcipher_des3_key(ctfm, key);
|
|
@@ -1451,13 +1605,11 @@ static int safexcel_des3_ede_setkey(stru
|
|
return err;
|
|
|
|
/* if context exits and key changed, need to invalidate it */
|
|
- if (ctx->base.ctxr_dma) {
|
|
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
|
|
if (memcmp(ctx->key, key, len))
|
|
ctx->base.needs_inv = true;
|
|
- }
|
|
|
|
memcpy(ctx->key, key, len);
|
|
-
|
|
ctx->key_len = len;
|
|
|
|
return 0;
|
|
@@ -1469,6 +1621,8 @@ static int safexcel_skcipher_des3_cbc_cr
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
ctx->alg = SAFEXCEL_3DES;
|
|
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
|
|
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
|
|
return 0;
|
|
}
|
|
@@ -1506,6 +1660,8 @@ static int safexcel_skcipher_des3_ecb_cr
|
|
safexcel_skcipher_cra_init(tfm);
|
|
ctx->alg = SAFEXCEL_3DES;
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
|
|
+ ctx->blocksz = 0;
|
|
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
return 0;
|
|
}
|
|
|
|
@@ -1561,6 +1717,9 @@ static int safexcel_aead_cra_init(struct
|
|
ctx->priv = tmpl->priv;
|
|
|
|
ctx->alg = SAFEXCEL_AES; /* default */
|
|
+ ctx->blocksz = AES_BLOCK_SIZE;
|
|
+ ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
|
|
+ ctx->ctrinit = 1;
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
|
|
ctx->aead = true;
|
|
ctx->base.send = safexcel_aead_send;
|
|
@@ -1749,6 +1908,8 @@ static int safexcel_aead_sha1_des3_cra_i
|
|
|
|
safexcel_aead_sha1_cra_init(tfm);
|
|
ctx->alg = SAFEXCEL_3DES; /* override default */
|
|
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
|
|
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
return 0;
|
|
}
|
|
|
|
@@ -1777,6 +1938,330 @@ struct safexcel_alg_template safexcel_al
|
|
},
|
|
};
|
|
|
|
+static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_aead_sha256_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_3DES; /* override default */
|
|
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
|
|
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_aead_setkey,
|
|
+ .encrypt = safexcel_aead_encrypt,
|
|
+ .decrypt = safexcel_aead_decrypt,
|
|
+ .ivsize = DES3_EDE_BLOCK_SIZE,
|
|
+ .maxauthsize = SHA256_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
|
|
+ .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_aead_sha256_des3_cra_init,
|
|
+ .cra_exit = safexcel_aead_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_aead_sha224_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_3DES; /* override default */
|
|
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
|
|
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_aead_setkey,
|
|
+ .encrypt = safexcel_aead_encrypt,
|
|
+ .decrypt = safexcel_aead_decrypt,
|
|
+ .ivsize = DES3_EDE_BLOCK_SIZE,
|
|
+ .maxauthsize = SHA224_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
|
|
+ .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_aead_sha224_des3_cra_init,
|
|
+ .cra_exit = safexcel_aead_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_aead_sha512_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_3DES; /* override default */
|
|
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
|
|
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_aead_setkey,
|
|
+ .encrypt = safexcel_aead_encrypt,
|
|
+ .decrypt = safexcel_aead_decrypt,
|
|
+ .ivsize = DES3_EDE_BLOCK_SIZE,
|
|
+ .maxauthsize = SHA512_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
|
|
+ .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_aead_sha512_des3_cra_init,
|
|
+ .cra_exit = safexcel_aead_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_aead_sha384_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_3DES; /* override default */
|
|
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
|
|
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_aead_setkey,
|
|
+ .encrypt = safexcel_aead_encrypt,
|
|
+ .decrypt = safexcel_aead_decrypt,
|
|
+ .ivsize = DES3_EDE_BLOCK_SIZE,
|
|
+ .maxauthsize = SHA384_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
|
|
+ .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_aead_sha384_des3_cra_init,
|
|
+ .cra_exit = safexcel_aead_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_aead_sha1_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_DES; /* override default */
|
|
+ ctx->blocksz = DES_BLOCK_SIZE;
|
|
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_aead_setkey,
|
|
+ .encrypt = safexcel_aead_encrypt,
|
|
+ .decrypt = safexcel_aead_decrypt,
|
|
+ .ivsize = DES_BLOCK_SIZE,
|
|
+ .maxauthsize = SHA1_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "authenc(hmac(sha1),cbc(des))",
|
|
+ .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = DES_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_aead_sha1_des_cra_init,
|
|
+ .cra_exit = safexcel_aead_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_aead_sha256_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_DES; /* override default */
|
|
+ ctx->blocksz = DES_BLOCK_SIZE;
|
|
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_aead_setkey,
|
|
+ .encrypt = safexcel_aead_encrypt,
|
|
+ .decrypt = safexcel_aead_decrypt,
|
|
+ .ivsize = DES_BLOCK_SIZE,
|
|
+ .maxauthsize = SHA256_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "authenc(hmac(sha256),cbc(des))",
|
|
+ .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = DES_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_aead_sha256_des_cra_init,
|
|
+ .cra_exit = safexcel_aead_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_aead_sha224_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_DES; /* override default */
|
|
+ ctx->blocksz = DES_BLOCK_SIZE;
|
|
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_aead_setkey,
|
|
+ .encrypt = safexcel_aead_encrypt,
|
|
+ .decrypt = safexcel_aead_decrypt,
|
|
+ .ivsize = DES_BLOCK_SIZE,
|
|
+ .maxauthsize = SHA224_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "authenc(hmac(sha224),cbc(des))",
|
|
+ .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = DES_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_aead_sha224_des_cra_init,
|
|
+ .cra_exit = safexcel_aead_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_aead_sha512_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_DES; /* override default */
|
|
+ ctx->blocksz = DES_BLOCK_SIZE;
|
|
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_aead_setkey,
|
|
+ .encrypt = safexcel_aead_encrypt,
|
|
+ .decrypt = safexcel_aead_decrypt,
|
|
+ .ivsize = DES_BLOCK_SIZE,
|
|
+ .maxauthsize = SHA512_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "authenc(hmac(sha512),cbc(des))",
|
|
+ .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = DES_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_aead_sha512_des_cra_init,
|
|
+ .cra_exit = safexcel_aead_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_aead_sha384_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_DES; /* override default */
|
|
+ ctx->blocksz = DES_BLOCK_SIZE;
|
|
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_aead_setkey,
|
|
+ .encrypt = safexcel_aead_encrypt,
|
|
+ .decrypt = safexcel_aead_decrypt,
|
|
+ .ivsize = DES_BLOCK_SIZE,
|
|
+ .maxauthsize = SHA384_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "authenc(hmac(sha384),cbc(des))",
|
|
+ .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = DES_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_aead_sha384_des_cra_init,
|
|
+ .cra_exit = safexcel_aead_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
|
|
{
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
@@ -1965,14 +2450,12 @@ static int safexcel_skcipher_aesxts_setk
|
|
/* Only half of the key data is cipher key */
|
|
keylen = (len >> 1);
|
|
ret = aes_expandkey(&aes, key, keylen);
|
|
- if (ret) {
|
|
- crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
+ if (ret)
|
|
return ret;
|
|
- }
|
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
|
|
for (i = 0; i < keylen / sizeof(u32); i++) {
|
|
- if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
|
|
+ if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
|
|
ctx->base.needs_inv = true;
|
|
break;
|
|
}
|
|
@@ -1984,15 +2467,13 @@ static int safexcel_skcipher_aesxts_setk
|
|
|
|
/* The other half is the tweak key */
|
|
ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
|
|
- if (ret) {
|
|
- crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
+ if (ret)
|
|
return ret;
|
|
- }
|
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
|
|
for (i = 0; i < keylen / sizeof(u32); i++) {
|
|
- if (ctx->key[i + keylen / sizeof(u32)] !=
|
|
- cpu_to_le32(aes.key_enc[i])) {
|
|
+ if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
|
|
+ aes.key_enc[i]) {
|
|
ctx->base.needs_inv = true;
|
|
break;
|
|
}
|
|
@@ -2015,6 +2496,7 @@ static int safexcel_skcipher_aes_xts_cra
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
ctx->alg = SAFEXCEL_AES;
|
|
+ ctx->blocksz = AES_BLOCK_SIZE;
|
|
ctx->xts = 1;
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
|
|
return 0;
|
|
@@ -2075,14 +2557,13 @@ static int safexcel_aead_gcm_setkey(stru
|
|
|
|
ret = aes_expandkey(&aes, key, len);
|
|
if (ret) {
|
|
- crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
memzero_explicit(&aes, sizeof(aes));
|
|
return ret;
|
|
}
|
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
|
|
for (i = 0; i < len / sizeof(u32); i++) {
|
|
- if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
|
|
+ if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
|
|
ctx->base.needs_inv = true;
|
|
break;
|
|
}
|
|
@@ -2099,8 +2580,6 @@ static int safexcel_aead_gcm_setkey(stru
|
|
crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
|
|
CRYPTO_TFM_REQ_MASK);
|
|
ret = crypto_cipher_setkey(ctx->hkaes, key, len);
|
|
- crypto_aead_set_flags(ctfm, crypto_cipher_get_flags(ctx->hkaes) &
|
|
- CRYPTO_TFM_RES_MASK);
|
|
if (ret)
|
|
return ret;
|
|
|
|
@@ -2109,7 +2588,7 @@ static int safexcel_aead_gcm_setkey(stru
|
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
|
|
for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
|
|
- if (ctx->ipad[i] != cpu_to_be32(hashkey[i])) {
|
|
+ if (be32_to_cpu(ctx->ipad[i]) != hashkey[i]) {
|
|
ctx->base.needs_inv = true;
|
|
break;
|
|
}
|
|
@@ -2135,10 +2614,7 @@ static int safexcel_aead_gcm_cra_init(st
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
|
|
|
|
ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
|
|
- if (IS_ERR(ctx->hkaes))
|
|
- return PTR_ERR(ctx->hkaes);
|
|
-
|
|
- return 0;
|
|
+ return PTR_ERR_OR_ZERO(ctx->hkaes);
|
|
}
|
|
|
|
static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
|
|
@@ -2192,14 +2668,13 @@ static int safexcel_aead_ccm_setkey(stru
|
|
|
|
ret = aes_expandkey(&aes, key, len);
|
|
if (ret) {
|
|
- crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
memzero_explicit(&aes, sizeof(aes));
|
|
return ret;
|
|
}
|
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
|
|
for (i = 0; i < len / sizeof(u32); i++) {
|
|
- if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
|
|
+ if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
|
|
ctx->base.needs_inv = true;
|
|
break;
|
|
}
|
|
@@ -2235,6 +2710,7 @@ static int safexcel_aead_ccm_cra_init(st
|
|
ctx->state_sz = 3 * AES_BLOCK_SIZE;
|
|
ctx->xcm = EIP197_XCM_MODE_CCM;
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
|
|
+ ctx->ctrinit = 0;
|
|
return 0;
|
|
}
|
|
|
|
@@ -2301,5 +2777,949 @@ struct safexcel_alg_template safexcel_al
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
.cra_module = THIS_MODULE,
|
|
},
|
|
+ },
|
|
+};
|
|
+
|
|
+static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
|
|
+ const u8 *key)
|
|
+{
|
|
+ struct safexcel_crypto_priv *priv = ctx->priv;
|
|
+
|
|
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
|
|
+ if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
|
|
+ ctx->base.needs_inv = true;
|
|
+
|
|
+ memcpy(ctx->key, key, CHACHA_KEY_SIZE);
|
|
+ ctx->key_len = CHACHA_KEY_SIZE;
|
|
+}
|
|
+
|
|
+static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
|
|
+ const u8 *key, unsigned int len)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
|
|
+
|
|
+ if (len != CHACHA_KEY_SIZE)
|
|
+ return -EINVAL;
|
|
+
|
|
+ safexcel_chacha20_setkey(ctx, key);
|
|
+
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_skcipher_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_CHACHA20;
|
|
+ ctx->ctrinit = 0;
|
|
+ ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_chacha20 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
|
+ .algo_mask = SAFEXCEL_ALG_CHACHA20,
|
|
+ .alg.skcipher = {
|
|
+ .setkey = safexcel_skcipher_chacha20_setkey,
|
|
+ .encrypt = safexcel_encrypt,
|
|
+ .decrypt = safexcel_decrypt,
|
|
+ .min_keysize = CHACHA_KEY_SIZE,
|
|
+ .max_keysize = CHACHA_KEY_SIZE,
|
|
+ .ivsize = CHACHA_IV_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "chacha20",
|
|
+ .cra_driver_name = "safexcel-chacha20",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = 1,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_skcipher_chacha20_cra_init,
|
|
+ .cra_exit = safexcel_skcipher_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
|
|
+ const u8 *key, unsigned int len)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
|
|
+
|
|
+ if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
|
|
+ len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
|
|
+ /* ESP variant has nonce appended to key */
|
|
+ len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
|
|
+ ctx->nonce = *(u32 *)(key + len);
|
|
+ }
|
|
+ if (len != CHACHA_KEY_SIZE)
|
|
+ return -EINVAL;
|
|
+
|
|
+ safexcel_chacha20_setkey(ctx, key);
|
|
+
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
|
|
+ unsigned int authsize)
|
|
+{
|
|
+ if (authsize != POLY1305_DIGEST_SIZE)
|
|
+ return -EINVAL;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
|
|
+ enum safexcel_cipher_direction dir)
|
|
+{
|
|
+ struct safexcel_cipher_req *creq = aead_request_ctx(req);
|
|
+ struct crypto_aead *aead = crypto_aead_reqtfm(req);
|
|
+ struct crypto_tfm *tfm = crypto_aead_tfm(aead);
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+ struct aead_request *subreq = aead_request_ctx(req);
|
|
+ u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
|
|
+ int ret = 0;
|
|
+
|
|
+ /*
|
|
+ * Instead of wasting time detecting umpteen silly corner cases,
|
|
+ * just dump all "small" requests to the fallback implementation.
|
|
+ * HW would not be faster on such small requests anyway.
|
|
+ */
|
|
+ if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
|
|
+ req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
|
|
+ req->cryptlen > POLY1305_DIGEST_SIZE)) {
|
|
+ return safexcel_queue_req(&req->base, creq, dir);
|
|
+ }
|
|
+
|
|
+ /* HW cannot do full (AAD+payload) zero length, use fallback */
|
|
+ memcpy(key, ctx->key, CHACHA_KEY_SIZE);
|
|
+ if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
|
|
+ /* ESP variant has nonce appended to the key */
|
|
+ key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
|
|
+ ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
|
|
+ CHACHA_KEY_SIZE +
|
|
+ EIP197_AEAD_IPSEC_NONCE_SIZE);
|
|
+ } else {
|
|
+ ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
|
|
+ CHACHA_KEY_SIZE);
|
|
+ }
|
|
+ if (ret) {
|
|
+ crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
|
|
+ crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
|
|
+ CRYPTO_TFM_REQ_MASK);
|
|
+ return ret;
|
|
+ }
|
|
+
|
|
+ aead_request_set_tfm(subreq, ctx->fback);
|
|
+ aead_request_set_callback(subreq, req->base.flags, req->base.complete,
|
|
+ req->base.data);
|
|
+ aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
|
|
+ req->iv);
|
|
+ aead_request_set_ad(subreq, req->assoclen);
|
|
+
|
|
+ return (dir == SAFEXCEL_ENCRYPT) ?
|
|
+ crypto_aead_encrypt(subreq) :
|
|
+ crypto_aead_decrypt(subreq);
|
|
+}
|
|
+
|
|
+static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
|
|
+{
|
|
+ return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
|
|
+}
|
|
+
|
|
+static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
|
|
+{
|
|
+ return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
|
|
+}
|
|
+
|
|
+static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct crypto_aead *aead = __crypto_aead_cast(tfm);
|
|
+ struct aead_alg *alg = crypto_aead_alg(aead);
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_aead_cra_init(tfm);
|
|
+
|
|
+ /* Allocate fallback implementation */
|
|
+ ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
|
|
+ CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_NEED_FALLBACK);
|
|
+ if (IS_ERR(ctx->fback))
|
|
+ return PTR_ERR(ctx->fback);
|
|
+
|
|
+ crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
|
|
+ sizeof(struct aead_request) +
|
|
+ crypto_aead_reqsize(ctx->fback)));
|
|
+
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_aead_fallback_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_CHACHA20;
|
|
+ ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
|
|
+ CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
|
|
+ ctx->ctrinit = 0;
|
|
+ ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
|
|
+ ctx->state_sz = 0; /* Precomputed by HW */
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ crypto_free_aead(ctx->fback);
|
|
+ safexcel_aead_cra_exit(tfm);
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_chachapoly = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_aead_chachapoly_setkey,
|
|
+ .setauthsize = safexcel_aead_chachapoly_setauthsize,
|
|
+ .encrypt = safexcel_aead_chachapoly_encrypt,
|
|
+ .decrypt = safexcel_aead_chachapoly_decrypt,
|
|
+ .ivsize = CHACHAPOLY_IV_SIZE,
|
|
+ .maxauthsize = POLY1305_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "rfc7539(chacha20,poly1305)",
|
|
+ .cra_driver_name = "safexcel-chacha20-poly1305",
|
|
+ /* +1 to put it above HW chacha + SW poly */
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
|
|
+ CRYPTO_ALG_NEED_FALLBACK,
|
|
+ .cra_blocksize = 1,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_aead_chachapoly_cra_init,
|
|
+ .cra_exit = safexcel_aead_fallback_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+ int ret;
|
|
+
|
|
+ ret = safexcel_aead_chachapoly_cra_init(tfm);
|
|
+ ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
|
|
+ ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
|
|
+ return ret;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_aead_chachapoly_setkey,
|
|
+ .setauthsize = safexcel_aead_chachapoly_setauthsize,
|
|
+ .encrypt = safexcel_aead_chachapoly_encrypt,
|
|
+ .decrypt = safexcel_aead_chachapoly_decrypt,
|
|
+ .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
|
|
+ .maxauthsize = POLY1305_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "rfc7539esp(chacha20,poly1305)",
|
|
+ .cra_driver_name = "safexcel-chacha20-poly1305-esp",
|
|
+ /* +1 to put it above HW chacha + SW poly */
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
|
|
+ CRYPTO_ALG_NEED_FALLBACK,
|
|
+ .cra_blocksize = 1,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_aead_chachapolyesp_cra_init,
|
|
+ .cra_exit = safexcel_aead_fallback_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
|
|
+ const u8 *key, unsigned int len)
|
|
+{
|
|
+ struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+ struct safexcel_crypto_priv *priv = ctx->priv;
|
|
+
|
|
+ if (len != SM4_KEY_SIZE)
|
|
+ return -EINVAL;
|
|
+
|
|
+ if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
|
|
+ if (memcmp(ctx->key, key, SM4_KEY_SIZE))
|
|
+ ctx->base.needs_inv = true;
|
|
+
|
|
+ memcpy(ctx->key, key, SM4_KEY_SIZE);
|
|
+ ctx->key_len = SM4_KEY_SIZE;
|
|
+
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
|
|
+{
|
|
+ /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
|
|
+ if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
|
|
+ return -EINVAL;
|
|
+ else
|
|
+ return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
|
|
+ SAFEXCEL_ENCRYPT);
|
|
+}
|
|
+
|
|
+static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
|
|
+{
|
|
+ /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
|
|
+ if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
|
|
+ return -EINVAL;
|
|
+ else
|
|
+ return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
|
|
+ SAFEXCEL_DECRYPT);
|
|
+}
|
|
+
|
|
+static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_skcipher_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_SM4;
|
|
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
|
|
+ ctx->blocksz = 0;
|
|
+ ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
|
+ .algo_mask = SAFEXCEL_ALG_SM4,
|
|
+ .alg.skcipher = {
|
|
+ .setkey = safexcel_skcipher_sm4_setkey,
|
|
+ .encrypt = safexcel_sm4_blk_encrypt,
|
|
+ .decrypt = safexcel_sm4_blk_decrypt,
|
|
+ .min_keysize = SM4_KEY_SIZE,
|
|
+ .max_keysize = SM4_KEY_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "ecb(sm4)",
|
|
+ .cra_driver_name = "safexcel-ecb-sm4",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = SM4_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
|
|
+ .cra_exit = safexcel_skcipher_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_skcipher_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_SM4;
|
|
+ ctx->blocksz = SM4_BLOCK_SIZE;
|
|
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
|
+ .algo_mask = SAFEXCEL_ALG_SM4,
|
|
+ .alg.skcipher = {
|
|
+ .setkey = safexcel_skcipher_sm4_setkey,
|
|
+ .encrypt = safexcel_sm4_blk_encrypt,
|
|
+ .decrypt = safexcel_sm4_blk_decrypt,
|
|
+ .min_keysize = SM4_KEY_SIZE,
|
|
+ .max_keysize = SM4_KEY_SIZE,
|
|
+ .ivsize = SM4_BLOCK_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "cbc(sm4)",
|
|
+ .cra_driver_name = "safexcel-cbc-sm4",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = SM4_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
|
|
+ .cra_exit = safexcel_skcipher_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_skcipher_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_SM4;
|
|
+ ctx->blocksz = SM4_BLOCK_SIZE;
|
|
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
|
+ .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
|
|
+ .alg.skcipher = {
|
|
+ .setkey = safexcel_skcipher_sm4_setkey,
|
|
+ .encrypt = safexcel_encrypt,
|
|
+ .decrypt = safexcel_decrypt,
|
|
+ .min_keysize = SM4_KEY_SIZE,
|
|
+ .max_keysize = SM4_KEY_SIZE,
|
|
+ .ivsize = SM4_BLOCK_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "ofb(sm4)",
|
|
+ .cra_driver_name = "safexcel-ofb-sm4",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = 1,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_skcipher_sm4_ofb_cra_init,
|
|
+ .cra_exit = safexcel_skcipher_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_skcipher_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_SM4;
|
|
+ ctx->blocksz = SM4_BLOCK_SIZE;
|
|
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
|
+ .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
|
|
+ .alg.skcipher = {
|
|
+ .setkey = safexcel_skcipher_sm4_setkey,
|
|
+ .encrypt = safexcel_encrypt,
|
|
+ .decrypt = safexcel_decrypt,
|
|
+ .min_keysize = SM4_KEY_SIZE,
|
|
+ .max_keysize = SM4_KEY_SIZE,
|
|
+ .ivsize = SM4_BLOCK_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "cfb(sm4)",
|
|
+ .cra_driver_name = "safexcel-cfb-sm4",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = 1,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_skcipher_sm4_cfb_cra_init,
|
|
+ .cra_exit = safexcel_skcipher_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
|
|
+ const u8 *key, unsigned int len)
|
|
+{
|
|
+ struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ /* last 4 bytes of key are the nonce! */
|
|
+ ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
|
|
+ /* exclude the nonce here */
|
|
+ len -= CTR_RFC3686_NONCE_SIZE;
|
|
+
|
|
+ return safexcel_skcipher_sm4_setkey(ctfm, key, len);
|
|
+}
|
|
+
|
|
+static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_skcipher_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_SM4;
|
|
+ ctx->blocksz = SM4_BLOCK_SIZE;
|
|
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
|
+ .algo_mask = SAFEXCEL_ALG_SM4,
|
|
+ .alg.skcipher = {
|
|
+ .setkey = safexcel_skcipher_sm4ctr_setkey,
|
|
+ .encrypt = safexcel_encrypt,
|
|
+ .decrypt = safexcel_decrypt,
|
|
+ /* Add nonce size */
|
|
+ .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
|
|
+ .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
|
|
+ .ivsize = CTR_RFC3686_IV_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "rfc3686(ctr(sm4))",
|
|
+ .cra_driver_name = "safexcel-ctr-sm4",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = 1,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
|
|
+ .cra_exit = safexcel_skcipher_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
|
|
+{
|
|
+ /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
|
|
+ if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
|
|
+ return -EINVAL;
|
|
+
|
|
+ return safexcel_queue_req(&req->base, aead_request_ctx(req),
|
|
+ SAFEXCEL_ENCRYPT);
|
|
+}
|
|
+
|
|
+static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
|
|
+{
|
|
+ struct crypto_aead *tfm = crypto_aead_reqtfm(req);
|
|
+
|
|
+ /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
|
|
+ if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
|
|
+ return -EINVAL;
|
|
+
|
|
+ return safexcel_queue_req(&req->base, aead_request_ctx(req),
|
|
+ SAFEXCEL_DECRYPT);
|
|
+}
|
|
+
|
|
+static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_aead_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_SM4;
|
|
+ ctx->blocksz = SM4_BLOCK_SIZE;
|
|
+ ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
|
|
+ ctx->state_sz = SHA1_DIGEST_SIZE;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_aead_setkey,
|
|
+ .encrypt = safexcel_aead_sm4_blk_encrypt,
|
|
+ .decrypt = safexcel_aead_sm4_blk_decrypt,
|
|
+ .ivsize = SM4_BLOCK_SIZE,
|
|
+ .maxauthsize = SHA1_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "authenc(hmac(sha1),cbc(sm4))",
|
|
+ .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = SM4_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
|
|
+ .cra_exit = safexcel_aead_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
|
|
+ const u8 *key, unsigned int len)
|
|
+{
|
|
+ struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ /* Keep fallback cipher synchronized */
|
|
+ return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
|
|
+ safexcel_aead_setkey(ctfm, key, len);
|
|
+}
|
|
+
|
|
+static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
|
|
+ unsigned int authsize)
|
|
+{
|
|
+ struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ /* Keep fallback cipher synchronized */
|
|
+ return crypto_aead_setauthsize(ctx->fback, authsize);
|
|
+}
|
|
+
|
|
+static int safexcel_aead_fallback_crypt(struct aead_request *req,
|
|
+ enum safexcel_cipher_direction dir)
|
|
+{
|
|
+ struct crypto_aead *aead = crypto_aead_reqtfm(req);
|
|
+ struct crypto_tfm *tfm = crypto_aead_tfm(aead);
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+ struct aead_request *subreq = aead_request_ctx(req);
|
|
+
|
|
+ aead_request_set_tfm(subreq, ctx->fback);
|
|
+ aead_request_set_callback(subreq, req->base.flags, req->base.complete,
|
|
+ req->base.data);
|
|
+ aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
|
|
+ req->iv);
|
|
+ aead_request_set_ad(subreq, req->assoclen);
|
|
+
|
|
+ return (dir == SAFEXCEL_ENCRYPT) ?
|
|
+ crypto_aead_encrypt(subreq) :
|
|
+ crypto_aead_decrypt(subreq);
|
|
+}
|
|
+
|
|
+static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
|
|
+{
|
|
+ struct safexcel_cipher_req *creq = aead_request_ctx(req);
|
|
+
|
|
+ /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
|
|
+ if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
|
|
+ return -EINVAL;
|
|
+ else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
|
|
+ return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
|
|
+
|
|
+ /* HW cannot do full (AAD+payload) zero length, use fallback */
|
|
+ return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
|
|
+}
|
|
+
|
|
+static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
|
|
+{
|
|
+ struct safexcel_cipher_req *creq = aead_request_ctx(req);
|
|
+ struct crypto_aead *tfm = crypto_aead_reqtfm(req);
|
|
+
|
|
+ /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
|
|
+ if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
|
|
+ return -EINVAL;
|
|
+ else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
|
|
+ /* If input length > 0 only */
|
|
+ return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
|
|
+
|
|
+ /* HW cannot do full (AAD+payload) zero length, use fallback */
|
|
+ return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
|
|
+}
|
|
+
|
|
+static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_aead_fallback_cra_init(tfm);
|
|
+ ctx->alg = SAFEXCEL_SM4;
|
|
+ ctx->blocksz = SM4_BLOCK_SIZE;
|
|
+ ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
|
|
+ ctx->state_sz = SM3_DIGEST_SIZE;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_aead_fallback_setkey,
|
|
+ .setauthsize = safexcel_aead_fallback_setauthsize,
|
|
+ .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
|
|
+ .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
|
|
+ .ivsize = SM4_BLOCK_SIZE,
|
|
+ .maxauthsize = SM3_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "authenc(hmac(sm3),cbc(sm4))",
|
|
+ .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
|
|
+ CRYPTO_ALG_NEED_FALLBACK,
|
|
+ .cra_blocksize = SM4_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
|
|
+ .cra_exit = safexcel_aead_fallback_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_aead_sm4cbc_sha1_cra_init(tfm);
|
|
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_aead_setkey,
|
|
+ .encrypt = safexcel_aead_encrypt,
|
|
+ .decrypt = safexcel_aead_decrypt,
|
|
+ .ivsize = CTR_RFC3686_IV_SIZE,
|
|
+ .maxauthsize = SHA1_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
|
|
+ .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = 1,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
|
|
+ .cra_exit = safexcel_aead_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_aead_sm4cbc_sm3_cra_init(tfm);
|
|
+ ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_aead_setkey,
|
|
+ .encrypt = safexcel_aead_encrypt,
|
|
+ .decrypt = safexcel_aead_decrypt,
|
|
+ .ivsize = CTR_RFC3686_IV_SIZE,
|
|
+ .maxauthsize = SM3_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
|
|
+ .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = 1,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
|
|
+ .cra_exit = safexcel_aead_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
|
|
+ unsigned int len)
|
|
+{
|
|
+ struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ /* last 4 bytes of key are the nonce! */
|
|
+ ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
|
|
+
|
|
+ len -= CTR_RFC3686_NONCE_SIZE;
|
|
+ return safexcel_aead_gcm_setkey(ctfm, key, len);
|
|
+}
|
|
+
|
|
+static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
|
|
+ unsigned int authsize)
|
|
+{
|
|
+ return crypto_rfc4106_check_authsize(authsize);
|
|
+}
|
|
+
|
|
+static int safexcel_rfc4106_encrypt(struct aead_request *req)
|
|
+{
|
|
+ return crypto_ipsec_check_assoclen(req->assoclen) ?:
|
|
+ safexcel_aead_encrypt(req);
|
|
+}
|
|
+
|
|
+static int safexcel_rfc4106_decrypt(struct aead_request *req)
|
|
+{
|
|
+ return crypto_ipsec_check_assoclen(req->assoclen) ?:
|
|
+ safexcel_aead_decrypt(req);
|
|
+}
|
|
+
|
|
+static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+ int ret;
|
|
+
|
|
+ ret = safexcel_aead_gcm_cra_init(tfm);
|
|
+ ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
|
|
+ ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
|
|
+ return ret;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_rfc4106_gcm_setkey,
|
|
+ .setauthsize = safexcel_rfc4106_gcm_setauthsize,
|
|
+ .encrypt = safexcel_rfc4106_encrypt,
|
|
+ .decrypt = safexcel_rfc4106_decrypt,
|
|
+ .ivsize = GCM_RFC4106_IV_SIZE,
|
|
+ .maxauthsize = GHASH_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "rfc4106(gcm(aes))",
|
|
+ .cra_driver_name = "safexcel-rfc4106-gcm-aes",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = 1,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_rfc4106_gcm_cra_init,
|
|
+ .cra_exit = safexcel_aead_gcm_cra_exit,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
|
|
+ unsigned int authsize)
|
|
+{
|
|
+ if (authsize != GHASH_DIGEST_SIZE)
|
|
+ return -EINVAL;
|
|
+
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+ int ret;
|
|
+
|
|
+ ret = safexcel_aead_gcm_cra_init(tfm);
|
|
+ ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
|
|
+ return ret;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_rfc4106_gcm_setkey,
|
|
+ .setauthsize = safexcel_rfc4543_gcm_setauthsize,
|
|
+ .encrypt = safexcel_rfc4106_encrypt,
|
|
+ .decrypt = safexcel_rfc4106_decrypt,
|
|
+ .ivsize = GCM_RFC4543_IV_SIZE,
|
|
+ .maxauthsize = GHASH_DIGEST_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "rfc4543(gcm(aes))",
|
|
+ .cra_driver_name = "safexcel-rfc4543-gcm-aes",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = 1,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_rfc4543_gcm_cra_init,
|
|
+ .cra_exit = safexcel_aead_gcm_cra_exit,
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
|
|
+ unsigned int len)
|
|
+{
|
|
+ struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
|
|
+ *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
|
|
+ /* last 3 bytes of key are the nonce! */
|
|
+ memcpy((u8 *)&ctx->nonce + 1, key + len -
|
|
+ EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
|
|
+ EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
|
|
+
|
|
+ len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
|
|
+ return safexcel_aead_ccm_setkey(ctfm, key, len);
|
|
+}
|
|
+
|
|
+static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
|
|
+ unsigned int authsize)
|
|
+{
|
|
+ /* Borrowed from crypto/ccm.c */
|
|
+ switch (authsize) {
|
|
+ case 8:
|
|
+ case 12:
|
|
+ case 16:
|
|
+ break;
|
|
+ default:
|
|
+ return -EINVAL;
|
|
+ }
|
|
+
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
|
|
+{
|
|
+ struct safexcel_cipher_req *creq = aead_request_ctx(req);
|
|
+
|
|
+ /* Borrowed from crypto/ccm.c */
|
|
+ if (req->assoclen != 16 && req->assoclen != 20)
|
|
+ return -EINVAL;
|
|
+
|
|
+ return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
|
|
+}
|
|
+
|
|
+static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
|
|
+{
|
|
+ struct safexcel_cipher_req *creq = aead_request_ctx(req);
|
|
+
|
|
+ /* Borrowed from crypto/ccm.c */
|
|
+ if (req->assoclen != 16 && req->assoclen != 20)
|
|
+ return -EINVAL;
|
|
+
|
|
+ return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
|
|
+}
|
|
+
|
|
+static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+ int ret;
|
|
+
|
|
+ ret = safexcel_aead_ccm_cra_init(tfm);
|
|
+ ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
|
|
+ ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
|
|
+ return ret;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
+ .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
|
|
+ .alg.aead = {
|
|
+ .setkey = safexcel_rfc4309_ccm_setkey,
|
|
+ .setauthsize = safexcel_rfc4309_ccm_setauthsize,
|
|
+ .encrypt = safexcel_rfc4309_ccm_encrypt,
|
|
+ .decrypt = safexcel_rfc4309_ccm_decrypt,
|
|
+ .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
|
|
+ .maxauthsize = AES_BLOCK_SIZE,
|
|
+ .base = {
|
|
+ .cra_name = "rfc4309(ccm(aes))",
|
|
+ .cra_driver_name = "safexcel-rfc4309-ccm-aes",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = 1,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
+ .cra_alignmask = 0,
|
|
+ .cra_init = safexcel_rfc4309_ccm_cra_init,
|
|
+ .cra_exit = safexcel_aead_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
},
|
|
};
|
|
--- a/drivers/crypto/inside-secure/safexcel.h
|
|
+++ b/drivers/crypto/inside-secure/safexcel.h
|
|
@@ -17,8 +17,11 @@
|
|
#define EIP197_HIA_VERSION_BE 0xca35
|
|
#define EIP197_HIA_VERSION_LE 0x35ca
|
|
#define EIP97_VERSION_LE 0x9e61
|
|
+#define EIP196_VERSION_LE 0x3bc4
|
|
#define EIP197_VERSION_LE 0x3ac5
|
|
#define EIP96_VERSION_LE 0x9f60
|
|
+#define EIP201_VERSION_LE 0x36c9
|
|
+#define EIP206_VERSION_LE 0x31ce
|
|
#define EIP197_REG_LO16(reg) (reg & 0xffff)
|
|
#define EIP197_REG_HI16(reg) ((reg >> 16) & 0xffff)
|
|
#define EIP197_VERSION_MASK(reg) ((reg >> 16) & 0xfff)
|
|
@@ -26,12 +29,23 @@
|
|
((reg >> 4) & 0xf0) | \
|
|
((reg >> 12) & 0xf))
|
|
|
|
+/* EIP197 HIA OPTIONS ENCODING */
|
|
+#define EIP197_HIA_OPT_HAS_PE_ARB BIT(29)
|
|
+
|
|
+/* EIP206 OPTIONS ENCODING */
|
|
+#define EIP206_OPT_ICE_TYPE(n) ((n>>8)&3)
|
|
+
|
|
+/* EIP197 OPTIONS ENCODING */
|
|
+#define EIP197_OPT_HAS_TRC BIT(31)
|
|
+
|
|
/* Static configuration */
|
|
#define EIP197_DEFAULT_RING_SIZE 400
|
|
-#define EIP197_MAX_TOKENS 18
|
|
+#define EIP197_EMB_TOKENS 4 /* Pad CD to 16 dwords */
|
|
+#define EIP197_MAX_TOKENS 16
|
|
#define EIP197_MAX_RINGS 4
|
|
#define EIP197_FETCH_DEPTH 2
|
|
#define EIP197_MAX_BATCH_SZ 64
|
|
+#define EIP197_MAX_RING_AIC 14
|
|
|
|
#define EIP197_GFP_FLAGS(base) ((base).flags & CRYPTO_TFM_REQ_MAY_SLEEP ? \
|
|
GFP_KERNEL : GFP_ATOMIC)
|
|
@@ -138,6 +152,7 @@
|
|
#define EIP197_HIA_AIC_R_ENABLED_STAT(r) (0xe010 - EIP197_HIA_AIC_R_OFF(r))
|
|
#define EIP197_HIA_AIC_R_ACK(r) (0xe010 - EIP197_HIA_AIC_R_OFF(r))
|
|
#define EIP197_HIA_AIC_R_ENABLE_CLR(r) (0xe014 - EIP197_HIA_AIC_R_OFF(r))
|
|
+#define EIP197_HIA_AIC_R_VERSION(r) (0xe01c - EIP197_HIA_AIC_R_OFF(r))
|
|
#define EIP197_HIA_AIC_G_ENABLE_CTRL 0xf808
|
|
#define EIP197_HIA_AIC_G_ENABLED_STAT 0xf810
|
|
#define EIP197_HIA_AIC_G_ACK 0xf810
|
|
@@ -157,12 +172,16 @@
|
|
#define EIP197_PE_EIP96_FUNCTION_EN(n) (0x1004 + (0x2000 * (n)))
|
|
#define EIP197_PE_EIP96_CONTEXT_CTRL(n) (0x1008 + (0x2000 * (n)))
|
|
#define EIP197_PE_EIP96_CONTEXT_STAT(n) (0x100c + (0x2000 * (n)))
|
|
+#define EIP197_PE_EIP96_TOKEN_CTRL2(n) (0x102c + (0x2000 * (n)))
|
|
#define EIP197_PE_EIP96_FUNCTION2_EN(n) (0x1030 + (0x2000 * (n)))
|
|
#define EIP197_PE_EIP96_OPTIONS(n) (0x13f8 + (0x2000 * (n)))
|
|
#define EIP197_PE_EIP96_VERSION(n) (0x13fc + (0x2000 * (n)))
|
|
#define EIP197_PE_OUT_DBUF_THRES(n) (0x1c00 + (0x2000 * (n)))
|
|
#define EIP197_PE_OUT_TBUF_THRES(n) (0x1d00 + (0x2000 * (n)))
|
|
+#define EIP197_PE_OPTIONS(n) (0x1ff8 + (0x2000 * (n)))
|
|
+#define EIP197_PE_VERSION(n) (0x1ffc + (0x2000 * (n)))
|
|
#define EIP197_MST_CTRL 0xfff4
|
|
+#define EIP197_OPTIONS 0xfff8
|
|
#define EIP197_VERSION 0xfffc
|
|
|
|
/* EIP197-specific registers, no indirection */
|
|
@@ -178,6 +197,7 @@
|
|
#define EIP197_TRC_ECCADMINSTAT 0xf0838
|
|
#define EIP197_TRC_ECCDATASTAT 0xf083c
|
|
#define EIP197_TRC_ECCDATA 0xf0840
|
|
+#define EIP197_STRC_CONFIG 0xf43f0
|
|
#define EIP197_FLUE_CACHEBASE_LO(n) (0xf6000 + (32 * (n)))
|
|
#define EIP197_FLUE_CACHEBASE_HI(n) (0xf6004 + (32 * (n)))
|
|
#define EIP197_FLUE_CONFIG(n) (0xf6010 + (32 * (n)))
|
|
@@ -188,6 +208,7 @@
|
|
|
|
/* EIP197_HIA_xDR_DESC_SIZE */
|
|
#define EIP197_xDR_DESC_MODE_64BIT BIT(31)
|
|
+#define EIP197_CDR_DESC_MODE_ADCP BIT(30)
|
|
|
|
/* EIP197_HIA_xDR_DMA_CFG */
|
|
#define EIP197_HIA_xDR_WR_RES_BUF BIT(22)
|
|
@@ -213,7 +234,6 @@
|
|
/* EIP197_HIA_xDR_PROC_COUNT */
|
|
#define EIP197_xDR_PROC_xD_PKT_OFFSET 24
|
|
#define EIP197_xDR_PROC_xD_PKT_MASK GENMASK(6, 0)
|
|
-#define EIP197_xDR_PROC_xD_COUNT(n) ((n) << 2)
|
|
#define EIP197_xDR_PROC_xD_PKT(n) ((n) << 24)
|
|
#define EIP197_xDR_PROC_CLR_COUNT BIT(31)
|
|
|
|
@@ -228,6 +248,8 @@
|
|
#define EIP197_HIA_RA_PE_CTRL_EN BIT(30)
|
|
|
|
/* EIP197_HIA_OPTIONS */
|
|
+#define EIP197_N_RINGS_OFFSET 0
|
|
+#define EIP197_N_RINGS_MASK GENMASK(3, 0)
|
|
#define EIP197_N_PES_OFFSET 4
|
|
#define EIP197_N_PES_MASK GENMASK(4, 0)
|
|
#define EIP97_N_PES_MASK GENMASK(2, 0)
|
|
@@ -237,13 +259,13 @@
|
|
#define EIP197_CFSIZE_OFFSET 9
|
|
#define EIP197_CFSIZE_ADJUST 4
|
|
#define EIP97_CFSIZE_OFFSET 8
|
|
-#define EIP197_CFSIZE_MASK GENMASK(3, 0)
|
|
-#define EIP97_CFSIZE_MASK GENMASK(4, 0)
|
|
+#define EIP197_CFSIZE_MASK GENMASK(2, 0)
|
|
+#define EIP97_CFSIZE_MASK GENMASK(3, 0)
|
|
#define EIP197_RFSIZE_OFFSET 12
|
|
#define EIP197_RFSIZE_ADJUST 4
|
|
#define EIP97_RFSIZE_OFFSET 12
|
|
-#define EIP197_RFSIZE_MASK GENMASK(3, 0)
|
|
-#define EIP97_RFSIZE_MASK GENMASK(4, 0)
|
|
+#define EIP197_RFSIZE_MASK GENMASK(2, 0)
|
|
+#define EIP97_RFSIZE_MASK GENMASK(3, 0)
|
|
|
|
/* EIP197_HIA_AIC_R_ENABLE_CTRL */
|
|
#define EIP197_CDR_IRQ(n) BIT((n) * 2)
|
|
@@ -257,9 +279,9 @@
|
|
#define EIP197_HIA_DxE_CFG_MIN_CTRL_SIZE(n) ((n) << 16)
|
|
#define EIP197_HIA_DxE_CFG_CTRL_CACHE_CTRL(n) (((n) & 0x7) << 20)
|
|
#define EIP197_HIA_DxE_CFG_MAX_CTRL_SIZE(n) ((n) << 24)
|
|
-#define EIP197_HIA_DFE_CFG_DIS_DEBUG (BIT(31) | BIT(29))
|
|
+#define EIP197_HIA_DFE_CFG_DIS_DEBUG GENMASK(31, 29)
|
|
#define EIP197_HIA_DSE_CFG_EN_SINGLE_WR BIT(29)
|
|
-#define EIP197_HIA_DSE_CFG_DIS_DEBUG BIT(31)
|
|
+#define EIP197_HIA_DSE_CFG_DIS_DEBUG GENMASK(31, 30)
|
|
|
|
/* EIP197_HIA_DFE/DSE_THR_CTRL */
|
|
#define EIP197_DxE_THR_CTRL_EN BIT(30)
|
|
@@ -327,13 +349,21 @@
|
|
#define EIP197_ADDRESS_MODE BIT(8)
|
|
#define EIP197_CONTROL_MODE BIT(9)
|
|
|
|
+/* EIP197_PE_EIP96_TOKEN_CTRL2 */
|
|
+#define EIP197_PE_EIP96_TOKEN_CTRL2_CTX_DONE BIT(3)
|
|
+
|
|
+/* EIP197_STRC_CONFIG */
|
|
+#define EIP197_STRC_CONFIG_INIT BIT(31)
|
|
+#define EIP197_STRC_CONFIG_LARGE_REC(s) (s<<8)
|
|
+#define EIP197_STRC_CONFIG_SMALL_REC(s) (s<<0)
|
|
+
|
|
/* EIP197_FLUE_CONFIG */
|
|
#define EIP197_FLUE_CONFIG_MAGIC 0xc7000004
|
|
|
|
/* Context Control */
|
|
struct safexcel_context_record {
|
|
- u32 control0;
|
|
- u32 control1;
|
|
+ __le32 control0;
|
|
+ __le32 control1;
|
|
|
|
__le32 data[40];
|
|
} __packed;
|
|
@@ -358,10 +388,14 @@ struct safexcel_context_record {
|
|
#define CONTEXT_CONTROL_CRYPTO_ALG_AES128 (0x5 << 17)
|
|
#define CONTEXT_CONTROL_CRYPTO_ALG_AES192 (0x6 << 17)
|
|
#define CONTEXT_CONTROL_CRYPTO_ALG_AES256 (0x7 << 17)
|
|
+#define CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 (0x8 << 17)
|
|
+#define CONTEXT_CONTROL_CRYPTO_ALG_SM4 (0xd << 17)
|
|
+#define CONTEXT_CONTROL_DIGEST_INITIAL (0x0 << 21)
|
|
#define CONTEXT_CONTROL_DIGEST_PRECOMPUTED (0x1 << 21)
|
|
#define CONTEXT_CONTROL_DIGEST_XCM (0x2 << 21)
|
|
#define CONTEXT_CONTROL_DIGEST_HMAC (0x3 << 21)
|
|
#define CONTEXT_CONTROL_CRYPTO_ALG_MD5 (0x0 << 23)
|
|
+#define CONTEXT_CONTROL_CRYPTO_ALG_CRC32 (0x0 << 23)
|
|
#define CONTEXT_CONTROL_CRYPTO_ALG_SHA1 (0x2 << 23)
|
|
#define CONTEXT_CONTROL_CRYPTO_ALG_SHA224 (0x4 << 23)
|
|
#define CONTEXT_CONTROL_CRYPTO_ALG_SHA256 (0x3 << 23)
|
|
@@ -371,17 +405,25 @@ struct safexcel_context_record {
|
|
#define CONTEXT_CONTROL_CRYPTO_ALG_XCBC128 (0x1 << 23)
|
|
#define CONTEXT_CONTROL_CRYPTO_ALG_XCBC192 (0x2 << 23)
|
|
#define CONTEXT_CONTROL_CRYPTO_ALG_XCBC256 (0x3 << 23)
|
|
+#define CONTEXT_CONTROL_CRYPTO_ALG_SM3 (0x7 << 23)
|
|
+#define CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256 (0xb << 23)
|
|
+#define CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224 (0xc << 23)
|
|
+#define CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512 (0xd << 23)
|
|
+#define CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384 (0xe << 23)
|
|
+#define CONTEXT_CONTROL_CRYPTO_ALG_POLY1305 (0xf << 23)
|
|
#define CONTEXT_CONTROL_INV_FR (0x5 << 24)
|
|
#define CONTEXT_CONTROL_INV_TR (0x6 << 24)
|
|
|
|
/* control1 */
|
|
#define CONTEXT_CONTROL_CRYPTO_MODE_ECB (0 << 0)
|
|
#define CONTEXT_CONTROL_CRYPTO_MODE_CBC (1 << 0)
|
|
+#define CONTEXT_CONTROL_CHACHA20_MODE_256_32 (2 << 0)
|
|
#define CONTEXT_CONTROL_CRYPTO_MODE_OFB (4 << 0)
|
|
#define CONTEXT_CONTROL_CRYPTO_MODE_CFB (5 << 0)
|
|
#define CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD (6 << 0)
|
|
#define CONTEXT_CONTROL_CRYPTO_MODE_XTS (7 << 0)
|
|
#define CONTEXT_CONTROL_CRYPTO_MODE_XCM ((6 << 0) | BIT(17))
|
|
+#define CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK (12 << 0)
|
|
#define CONTEXT_CONTROL_IV0 BIT(5)
|
|
#define CONTEXT_CONTROL_IV1 BIT(6)
|
|
#define CONTEXT_CONTROL_IV2 BIT(7)
|
|
@@ -394,6 +436,13 @@ struct safexcel_context_record {
|
|
#define EIP197_XCM_MODE_GCM 1
|
|
#define EIP197_XCM_MODE_CCM 2
|
|
|
|
+#define EIP197_AEAD_TYPE_IPSEC_ESP 2
|
|
+#define EIP197_AEAD_TYPE_IPSEC_ESP_GMAC 3
|
|
+#define EIP197_AEAD_IPSEC_IV_SIZE 8
|
|
+#define EIP197_AEAD_IPSEC_NONCE_SIZE 4
|
|
+#define EIP197_AEAD_IPSEC_COUNTER_SIZE 4
|
|
+#define EIP197_AEAD_IPSEC_CCM_NONCE_SIZE 3
|
|
+
|
|
/* The hash counter given to the engine in the context has a granularity of
|
|
* 64 bits.
|
|
*/
|
|
@@ -423,6 +472,8 @@ struct safexcel_context_record {
|
|
#define EIP197_TRC_PARAMS2_RC_SZ_SMALL(n) ((n) << 18)
|
|
|
|
/* Cache helpers */
|
|
+#define EIP197_MIN_DSIZE 1024
|
|
+#define EIP197_MIN_ASIZE 8
|
|
#define EIP197_CS_TRC_REC_WC 64
|
|
#define EIP197_CS_RC_SIZE (4 * sizeof(u32))
|
|
#define EIP197_CS_RC_NEXT(x) (x)
|
|
@@ -447,7 +498,7 @@ struct result_data_desc {
|
|
u16 application_id;
|
|
u16 rsvd1;
|
|
|
|
- u32 rsvd2;
|
|
+ u32 rsvd2[5];
|
|
} __packed;
|
|
|
|
|
|
@@ -465,16 +516,15 @@ struct safexcel_result_desc {
|
|
|
|
u32 data_lo;
|
|
u32 data_hi;
|
|
-
|
|
- struct result_data_desc result_data;
|
|
} __packed;
|
|
|
|
/*
|
|
* The EIP(1)97 only needs to fetch the descriptor part of
|
|
* the result descriptor, not the result token part!
|
|
*/
|
|
-#define EIP197_RD64_FETCH_SIZE ((sizeof(struct safexcel_result_desc) -\
|
|
- sizeof(struct result_data_desc)) /\
|
|
+#define EIP197_RD64_FETCH_SIZE (sizeof(struct safexcel_result_desc) /\
|
|
+ sizeof(u32))
|
|
+#define EIP197_RD64_RESULT_SIZE (sizeof(struct result_data_desc) /\
|
|
sizeof(u32))
|
|
|
|
struct safexcel_token {
|
|
@@ -505,6 +555,8 @@ static inline void eip197_noop_token(str
|
|
{
|
|
token->opcode = EIP197_TOKEN_OPCODE_NOOP;
|
|
token->packet_length = BIT(2);
|
|
+ token->stat = 0;
|
|
+ token->instructions = 0;
|
|
}
|
|
|
|
/* Instructions */
|
|
@@ -526,14 +578,13 @@ struct safexcel_control_data_desc {
|
|
u16 application_id;
|
|
u16 rsvd;
|
|
|
|
- u8 refresh:2;
|
|
- u32 context_lo:30;
|
|
+ u32 context_lo;
|
|
u32 context_hi;
|
|
|
|
u32 control0;
|
|
u32 control1;
|
|
|
|
- u32 token[EIP197_MAX_TOKENS];
|
|
+ u32 token[EIP197_EMB_TOKENS];
|
|
} __packed;
|
|
|
|
#define EIP197_OPTION_MAGIC_VALUE BIT(0)
|
|
@@ -543,7 +594,10 @@ struct safexcel_control_data_desc {
|
|
#define EIP197_OPTION_2_TOKEN_IV_CMD GENMASK(11, 10)
|
|
#define EIP197_OPTION_4_TOKEN_IV_CMD GENMASK(11, 9)
|
|
|
|
+#define EIP197_TYPE_BCLA 0x0
|
|
#define EIP197_TYPE_EXTENDED 0x3
|
|
+#define EIP197_CONTEXT_SMALL 0x2
|
|
+#define EIP197_CONTEXT_SIZE_MASK 0x3
|
|
|
|
/* Basic Command Descriptor format */
|
|
struct safexcel_command_desc {
|
|
@@ -551,16 +605,22 @@ struct safexcel_command_desc {
|
|
u8 rsvd0:5;
|
|
u8 last_seg:1;
|
|
u8 first_seg:1;
|
|
- u16 additional_cdata_size:8;
|
|
+ u8 additional_cdata_size:8;
|
|
|
|
u32 rsvd1;
|
|
|
|
u32 data_lo;
|
|
u32 data_hi;
|
|
|
|
+ u32 atok_lo;
|
|
+ u32 atok_hi;
|
|
+
|
|
struct safexcel_control_data_desc control_data;
|
|
} __packed;
|
|
|
|
+#define EIP197_CD64_FETCH_SIZE (sizeof(struct safexcel_command_desc) /\
|
|
+ sizeof(u32))
|
|
+
|
|
/*
|
|
* Internal structures & functions
|
|
*/
|
|
@@ -578,15 +638,20 @@ enum eip197_fw {
|
|
|
|
struct safexcel_desc_ring {
|
|
void *base;
|
|
+ void *shbase;
|
|
void *base_end;
|
|
+ void *shbase_end;
|
|
dma_addr_t base_dma;
|
|
+ dma_addr_t shbase_dma;
|
|
|
|
/* write and read pointers */
|
|
void *write;
|
|
+ void *shwrite;
|
|
void *read;
|
|
|
|
/* descriptor element offset */
|
|
- unsigned offset;
|
|
+ unsigned int offset;
|
|
+ unsigned int shoffset;
|
|
};
|
|
|
|
enum safexcel_alg_type {
|
|
@@ -601,9 +666,11 @@ struct safexcel_config {
|
|
|
|
u32 cd_size;
|
|
u32 cd_offset;
|
|
+ u32 cdsh_offset;
|
|
|
|
u32 rd_size;
|
|
u32 rd_offset;
|
|
+ u32 res_offset;
|
|
};
|
|
|
|
struct safexcel_work_data {
|
|
@@ -654,6 +721,12 @@ enum safexcel_eip_version {
|
|
/* Priority we use for advertising our algorithms */
|
|
#define SAFEXCEL_CRA_PRIORITY 300
|
|
|
|
+/* SM3 digest result for zero length message */
|
|
+#define EIP197_SM3_ZEROM_HASH "\x1A\xB2\x1D\x83\x55\xCF\xA1\x7F" \
|
|
+ "\x8E\x61\x19\x48\x31\xE8\x1A\x8F" \
|
|
+ "\x22\xBE\xC8\xC7\x28\xFE\xFB\x74" \
|
|
+ "\x7E\xD0\x35\xEB\x50\x82\xAA\x2B"
|
|
+
|
|
/* EIP algorithm presence flags */
|
|
enum safexcel_eip_algorithms {
|
|
SAFEXCEL_ALG_BC0 = BIT(5),
|
|
@@ -697,16 +770,23 @@ struct safexcel_register_offsets {
|
|
enum safexcel_flags {
|
|
EIP197_TRC_CACHE = BIT(0),
|
|
SAFEXCEL_HW_EIP197 = BIT(1),
|
|
+ EIP197_PE_ARB = BIT(2),
|
|
+ EIP197_ICE = BIT(3),
|
|
+ EIP197_SIMPLE_TRC = BIT(4),
|
|
};
|
|
|
|
struct safexcel_hwconfig {
|
|
enum safexcel_eip_algorithms algo_flags;
|
|
int hwver;
|
|
int hiaver;
|
|
+ int ppver;
|
|
int pever;
|
|
int hwdataw;
|
|
int hwcfsize;
|
|
int hwrfsize;
|
|
+ int hwnumpes;
|
|
+ int hwnumrings;
|
|
+ int hwnumraic;
|
|
};
|
|
|
|
struct safexcel_crypto_priv {
|
|
@@ -778,7 +858,7 @@ struct safexcel_inv_result {
|
|
|
|
void safexcel_dequeue(struct safexcel_crypto_priv *priv, int ring);
|
|
int safexcel_rdesc_check_errors(struct safexcel_crypto_priv *priv,
|
|
- struct safexcel_result_desc *rdesc);
|
|
+ void *rdp);
|
|
void safexcel_complete(struct safexcel_crypto_priv *priv, int ring);
|
|
int safexcel_invalidate_cache(struct crypto_async_request *async,
|
|
struct safexcel_crypto_priv *priv,
|
|
@@ -797,7 +877,8 @@ struct safexcel_command_desc *safexcel_a
|
|
bool first, bool last,
|
|
dma_addr_t data, u32 len,
|
|
u32 full_data_len,
|
|
- dma_addr_t context);
|
|
+ dma_addr_t context,
|
|
+ struct safexcel_token **atoken);
|
|
struct safexcel_result_desc *safexcel_add_rdesc(struct safexcel_crypto_priv *priv,
|
|
int ring_id,
|
|
bool first, bool last,
|
|
@@ -853,5 +934,43 @@ extern struct safexcel_alg_template safe
|
|
extern struct safexcel_alg_template safexcel_alg_xts_aes;
|
|
extern struct safexcel_alg_template safexcel_alg_gcm;
|
|
extern struct safexcel_alg_template safexcel_alg_ccm;
|
|
+extern struct safexcel_alg_template safexcel_alg_crc32;
|
|
+extern struct safexcel_alg_template safexcel_alg_cbcmac;
|
|
+extern struct safexcel_alg_template safexcel_alg_xcbcmac;
|
|
+extern struct safexcel_alg_template safexcel_alg_cmac;
|
|
+extern struct safexcel_alg_template safexcel_alg_chacha20;
|
|
+extern struct safexcel_alg_template safexcel_alg_chachapoly;
|
|
+extern struct safexcel_alg_template safexcel_alg_chachapoly_esp;
|
|
+extern struct safexcel_alg_template safexcel_alg_sm3;
|
|
+extern struct safexcel_alg_template safexcel_alg_hmac_sm3;
|
|
+extern struct safexcel_alg_template safexcel_alg_ecb_sm4;
|
|
+extern struct safexcel_alg_template safexcel_alg_cbc_sm4;
|
|
+extern struct safexcel_alg_template safexcel_alg_ofb_sm4;
|
|
+extern struct safexcel_alg_template safexcel_alg_cfb_sm4;
|
|
+extern struct safexcel_alg_template safexcel_alg_ctr_sm4;
|
|
+extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4;
|
|
+extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4;
|
|
+extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4;
|
|
+extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4;
|
|
+extern struct safexcel_alg_template safexcel_alg_sha3_224;
|
|
+extern struct safexcel_alg_template safexcel_alg_sha3_256;
|
|
+extern struct safexcel_alg_template safexcel_alg_sha3_384;
|
|
+extern struct safexcel_alg_template safexcel_alg_sha3_512;
|
|
+extern struct safexcel_alg_template safexcel_alg_hmac_sha3_224;
|
|
+extern struct safexcel_alg_template safexcel_alg_hmac_sha3_256;
|
|
+extern struct safexcel_alg_template safexcel_alg_hmac_sha3_384;
|
|
+extern struct safexcel_alg_template safexcel_alg_hmac_sha3_512;
|
|
+extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des;
|
|
+extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede;
|
|
+extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede;
|
|
+extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede;
|
|
+extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede;
|
|
+extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des;
|
|
+extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des;
|
|
+extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des;
|
|
+extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des;
|
|
+extern struct safexcel_alg_template safexcel_alg_rfc4106_gcm;
|
|
+extern struct safexcel_alg_template safexcel_alg_rfc4543_gcm;
|
|
+extern struct safexcel_alg_template safexcel_alg_rfc4309_ccm;
|
|
|
|
#endif
|
|
--- a/drivers/crypto/inside-secure/safexcel_hash.c
|
|
+++ b/drivers/crypto/inside-secure/safexcel_hash.c
|
|
@@ -5,9 +5,13 @@
|
|
* Antoine Tenart <antoine.tenart@free-electrons.com>
|
|
*/
|
|
|
|
+#include <crypto/aes.h>
|
|
#include <crypto/hmac.h>
|
|
#include <crypto/md5.h>
|
|
#include <crypto/sha.h>
|
|
+#include <crypto/sha3.h>
|
|
+#include <crypto/skcipher.h>
|
|
+#include <crypto/sm3.h>
|
|
#include <linux/device.h>
|
|
#include <linux/dma-mapping.h>
|
|
#include <linux/dmapool.h>
|
|
@@ -19,9 +23,19 @@ struct safexcel_ahash_ctx {
|
|
struct safexcel_crypto_priv *priv;
|
|
|
|
u32 alg;
|
|
-
|
|
- u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
|
|
- u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
|
|
+ u8 key_sz;
|
|
+ bool cbcmac;
|
|
+ bool do_fallback;
|
|
+ bool fb_init_done;
|
|
+ bool fb_do_setkey;
|
|
+
|
|
+ __le32 ipad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
|
|
+ __le32 opad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
|
|
+
|
|
+ struct crypto_cipher *kaes;
|
|
+ struct crypto_ahash *fback;
|
|
+ struct crypto_shash *shpre;
|
|
+ struct shash_desc *shdesc;
|
|
};
|
|
|
|
struct safexcel_ahash_req {
|
|
@@ -31,6 +45,8 @@ struct safexcel_ahash_req {
|
|
bool needs_inv;
|
|
bool hmac_zlen;
|
|
bool len_is_le;
|
|
+ bool not_first;
|
|
+ bool xcbcmac;
|
|
|
|
int nents;
|
|
dma_addr_t result_dma;
|
|
@@ -39,7 +55,9 @@ struct safexcel_ahash_req {
|
|
|
|
u8 state_sz; /* expected state size, only set once */
|
|
u8 block_sz; /* block size, only set once */
|
|
- u32 state[SHA512_DIGEST_SIZE / sizeof(u32)] __aligned(sizeof(u32));
|
|
+ u8 digest_sz; /* output digest size, only set once */
|
|
+ __le32 state[SHA3_512_BLOCK_SIZE /
|
|
+ sizeof(__le32)] __aligned(sizeof(__le32));
|
|
|
|
u64 len;
|
|
u64 processed;
|
|
@@ -57,22 +75,36 @@ static inline u64 safexcel_queued_len(st
|
|
}
|
|
|
|
static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
|
|
- u32 input_length, u32 result_length)
|
|
+ u32 input_length, u32 result_length,
|
|
+ bool cbcmac)
|
|
{
|
|
struct safexcel_token *token =
|
|
(struct safexcel_token *)cdesc->control_data.token;
|
|
|
|
token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
|
|
token[0].packet_length = input_length;
|
|
- token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
|
|
token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
|
|
|
|
- token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
- token[1].packet_length = result_length;
|
|
- token[1].stat = EIP197_TOKEN_STAT_LAST_HASH |
|
|
+ input_length &= 15;
|
|
+ if (unlikely(cbcmac && input_length)) {
|
|
+ token[0].stat = 0;
|
|
+ token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
+ token[1].packet_length = 16 - input_length;
|
|
+ token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
|
|
+ token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
|
|
+ } else {
|
|
+ token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
|
|
+ eip197_noop_token(&token[1]);
|
|
+ }
|
|
+
|
|
+ token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
+ token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
|
|
EIP197_TOKEN_STAT_LAST_PACKET;
|
|
- token[1].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
|
|
+ token[2].packet_length = result_length;
|
|
+ token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
|
|
EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
|
|
+
|
|
+ eip197_noop_token(&token[3]);
|
|
}
|
|
|
|
static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
|
|
@@ -82,29 +114,49 @@ static void safexcel_context_control(str
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
|
u64 count = 0;
|
|
|
|
- cdesc->control_data.control0 |= ctx->alg;
|
|
+ cdesc->control_data.control0 = ctx->alg;
|
|
+ cdesc->control_data.control1 = 0;
|
|
|
|
/*
|
|
* Copy the input digest if needed, and setup the context
|
|
* fields. Do this now as we need it to setup the first command
|
|
* descriptor.
|
|
*/
|
|
- if (!req->processed) {
|
|
- /* First - and possibly only - block of basic hash only */
|
|
- if (req->finish) {
|
|
+ if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
|
|
+ if (req->xcbcmac)
|
|
+ memcpy(ctx->base.ctxr->data, ctx->ipad, ctx->key_sz);
|
|
+ else
|
|
+ memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
|
|
+
|
|
+ if (!req->finish && req->xcbcmac)
|
|
+ cdesc->control_data.control0 |=
|
|
+ CONTEXT_CONTROL_DIGEST_XCM |
|
|
+ CONTEXT_CONTROL_TYPE_HASH_OUT |
|
|
+ CONTEXT_CONTROL_NO_FINISH_HASH |
|
|
+ CONTEXT_CONTROL_SIZE(req->state_sz /
|
|
+ sizeof(u32));
|
|
+ else
|
|
cdesc->control_data.control0 |=
|
|
+ CONTEXT_CONTROL_DIGEST_XCM |
|
|
+ CONTEXT_CONTROL_TYPE_HASH_OUT |
|
|
+ CONTEXT_CONTROL_SIZE(req->state_sz /
|
|
+ sizeof(u32));
|
|
+ return;
|
|
+ } else if (!req->processed) {
|
|
+ /* First - and possibly only - block of basic hash only */
|
|
+ if (req->finish)
|
|
+ cdesc->control_data.control0 |= req->digest |
|
|
CONTEXT_CONTROL_TYPE_HASH_OUT |
|
|
CONTEXT_CONTROL_RESTART_HASH |
|
|
/* ensure its not 0! */
|
|
CONTEXT_CONTROL_SIZE(1);
|
|
- } else {
|
|
- cdesc->control_data.control0 |=
|
|
+ else
|
|
+ cdesc->control_data.control0 |= req->digest |
|
|
CONTEXT_CONTROL_TYPE_HASH_OUT |
|
|
CONTEXT_CONTROL_RESTART_HASH |
|
|
CONTEXT_CONTROL_NO_FINISH_HASH |
|
|
/* ensure its not 0! */
|
|
CONTEXT_CONTROL_SIZE(1);
|
|
- }
|
|
return;
|
|
}
|
|
|
|
@@ -204,7 +256,7 @@ static int safexcel_handle_req_result(st
|
|
}
|
|
|
|
if (sreq->result_dma) {
|
|
- dma_unmap_single(priv->dev, sreq->result_dma, sreq->state_sz,
|
|
+ dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
|
|
DMA_FROM_DEVICE);
|
|
sreq->result_dma = 0;
|
|
}
|
|
@@ -223,14 +275,15 @@ static int safexcel_handle_req_result(st
|
|
memcpy(sreq->cache, sreq->state,
|
|
crypto_ahash_digestsize(ahash));
|
|
|
|
- memcpy(sreq->state, ctx->opad, sreq->state_sz);
|
|
+ memcpy(sreq->state, ctx->opad, sreq->digest_sz);
|
|
|
|
sreq->len = sreq->block_sz +
|
|
crypto_ahash_digestsize(ahash);
|
|
sreq->processed = sreq->block_sz;
|
|
sreq->hmac = 0;
|
|
|
|
- ctx->base.needs_inv = true;
|
|
+ if (priv->flags & EIP197_TRC_CACHE)
|
|
+ ctx->base.needs_inv = true;
|
|
areq->nbytes = 0;
|
|
safexcel_ahash_enqueue(areq);
|
|
|
|
@@ -238,8 +291,14 @@ static int safexcel_handle_req_result(st
|
|
return 1;
|
|
}
|
|
|
|
- memcpy(areq->result, sreq->state,
|
|
- crypto_ahash_digestsize(ahash));
|
|
+ if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
|
|
+ ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
|
|
+ /* Undo final XOR with 0xffffffff ...*/
|
|
+ *(__le32 *)areq->result = ~sreq->state[0];
|
|
+ } else {
|
|
+ memcpy(areq->result, sreq->state,
|
|
+ crypto_ahash_digestsize(ahash));
|
|
+ }
|
|
}
|
|
|
|
cache_len = safexcel_queued_len(sreq);
|
|
@@ -261,10 +320,11 @@ static int safexcel_ahash_send_req(struc
|
|
struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
|
|
struct safexcel_result_desc *rdesc;
|
|
struct scatterlist *sg;
|
|
- int i, extra = 0, n_cdesc = 0, ret = 0;
|
|
- u64 queued, len, cache_len;
|
|
+ struct safexcel_token *dmmy;
|
|
+ int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
|
|
+ u64 queued, len;
|
|
|
|
- queued = len = safexcel_queued_len(req);
|
|
+ queued = safexcel_queued_len(req);
|
|
if (queued <= HASH_CACHE_SIZE)
|
|
cache_len = queued;
|
|
else
|
|
@@ -287,15 +347,52 @@ static int safexcel_ahash_send_req(struc
|
|
areq->nbytes - extra);
|
|
|
|
queued -= extra;
|
|
- len -= extra;
|
|
|
|
if (!queued) {
|
|
*commands = 0;
|
|
*results = 0;
|
|
return 0;
|
|
}
|
|
+
|
|
+ extra = 0;
|
|
+ }
|
|
+
|
|
+ if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
|
|
+ if (unlikely(cache_len < AES_BLOCK_SIZE)) {
|
|
+ /*
|
|
+ * Cache contains less than 1 full block, complete.
|
|
+ */
|
|
+ extra = AES_BLOCK_SIZE - cache_len;
|
|
+ if (queued > cache_len) {
|
|
+ /* More data follows: borrow bytes */
|
|
+ u64 tmp = queued - cache_len;
|
|
+
|
|
+ skip = min_t(u64, tmp, extra);
|
|
+ sg_pcopy_to_buffer(areq->src,
|
|
+ sg_nents(areq->src),
|
|
+ req->cache + cache_len,
|
|
+ skip, 0);
|
|
+ }
|
|
+ extra -= skip;
|
|
+ memset(req->cache + cache_len + skip, 0, extra);
|
|
+ if (!ctx->cbcmac && extra) {
|
|
+ // 10- padding for XCBCMAC & CMAC
|
|
+ req->cache[cache_len + skip] = 0x80;
|
|
+ // HW will use K2 iso K3 - compensate!
|
|
+ for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
|
|
+ ((__be32 *)req->cache)[i] ^=
|
|
+ cpu_to_be32(le32_to_cpu(
|
|
+ ctx->ipad[i] ^ ctx->ipad[i + 4]));
|
|
+ }
|
|
+ cache_len = AES_BLOCK_SIZE;
|
|
+ queued = queued + extra;
|
|
+ }
|
|
+
|
|
+ /* XCBC continue: XOR previous result into 1st word */
|
|
+ crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
|
|
}
|
|
|
|
+ len = queued;
|
|
/* Add a command descriptor for the cached data, if any */
|
|
if (cache_len) {
|
|
req->cache_dma = dma_map_single(priv->dev, req->cache,
|
|
@@ -306,8 +403,9 @@ static int safexcel_ahash_send_req(struc
|
|
req->cache_sz = cache_len;
|
|
first_cdesc = safexcel_add_cdesc(priv, ring, 1,
|
|
(cache_len == len),
|
|
- req->cache_dma, cache_len, len,
|
|
- ctx->base.ctxr_dma);
|
|
+ req->cache_dma, cache_len,
|
|
+ len, ctx->base.ctxr_dma,
|
|
+ &dmmy);
|
|
if (IS_ERR(first_cdesc)) {
|
|
ret = PTR_ERR(first_cdesc);
|
|
goto unmap_cache;
|
|
@@ -319,10 +417,6 @@ static int safexcel_ahash_send_req(struc
|
|
goto send_command;
|
|
}
|
|
|
|
- /* Skip descriptor generation for zero-length requests */
|
|
- if (!areq->nbytes)
|
|
- goto send_command;
|
|
-
|
|
/* Now handle the current ahash request buffer(s) */
|
|
req->nents = dma_map_sg(priv->dev, areq->src,
|
|
sg_nents_for_len(areq->src,
|
|
@@ -336,26 +430,34 @@ static int safexcel_ahash_send_req(struc
|
|
for_each_sg(areq->src, sg, req->nents, i) {
|
|
int sglen = sg_dma_len(sg);
|
|
|
|
+ if (unlikely(sglen <= skip)) {
|
|
+ skip -= sglen;
|
|
+ continue;
|
|
+ }
|
|
+
|
|
/* Do not overflow the request */
|
|
- if (queued < sglen)
|
|
+ if ((queued + skip) <= sglen)
|
|
sglen = queued;
|
|
+ else
|
|
+ sglen -= skip;
|
|
|
|
cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
|
|
!(queued - sglen),
|
|
- sg_dma_address(sg),
|
|
- sglen, len, ctx->base.ctxr_dma);
|
|
+ sg_dma_address(sg) + skip, sglen,
|
|
+ len, ctx->base.ctxr_dma, &dmmy);
|
|
if (IS_ERR(cdesc)) {
|
|
ret = PTR_ERR(cdesc);
|
|
goto unmap_sg;
|
|
}
|
|
- n_cdesc++;
|
|
|
|
- if (n_cdesc == 1)
|
|
+ if (!n_cdesc)
|
|
first_cdesc = cdesc;
|
|
+ n_cdesc++;
|
|
|
|
queued -= sglen;
|
|
if (!queued)
|
|
break;
|
|
+ skip = 0;
|
|
}
|
|
|
|
send_command:
|
|
@@ -363,9 +465,9 @@ send_command:
|
|
safexcel_context_control(ctx, req, first_cdesc);
|
|
|
|
/* Add the token */
|
|
- safexcel_hash_token(first_cdesc, len, req->state_sz);
|
|
+ safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
|
|
|
|
- req->result_dma = dma_map_single(priv->dev, req->state, req->state_sz,
|
|
+ req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
|
|
DMA_FROM_DEVICE);
|
|
if (dma_mapping_error(priv->dev, req->result_dma)) {
|
|
ret = -EINVAL;
|
|
@@ -374,7 +476,7 @@ send_command:
|
|
|
|
/* Add a result descriptor */
|
|
rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
|
|
- req->state_sz);
|
|
+ req->digest_sz);
|
|
if (IS_ERR(rdesc)) {
|
|
ret = PTR_ERR(rdesc);
|
|
goto unmap_result;
|
|
@@ -382,17 +484,20 @@ send_command:
|
|
|
|
safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
|
|
|
|
- req->processed += len;
|
|
+ req->processed += len - extra;
|
|
|
|
*commands = n_cdesc;
|
|
*results = 1;
|
|
return 0;
|
|
|
|
unmap_result:
|
|
- dma_unmap_single(priv->dev, req->result_dma, req->state_sz,
|
|
+ dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
|
|
DMA_FROM_DEVICE);
|
|
unmap_sg:
|
|
- dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
|
|
+ if (req->nents) {
|
|
+ dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
|
|
+ req->nents = 0;
|
|
+ }
|
|
cdesc_rollback:
|
|
for (i = 0; i < n_cdesc; i++)
|
|
safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
|
|
@@ -590,16 +695,12 @@ static int safexcel_ahash_enqueue(struct
|
|
|
|
if (ctx->base.ctxr) {
|
|
if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
|
|
- req->processed &&
|
|
- (/* invalidate for basic hash continuation finish */
|
|
- (req->finish &&
|
|
- (req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED)) ||
|
|
+ /* invalidate for *any* non-XCBC continuation */
|
|
+ ((req->not_first && !req->xcbcmac) ||
|
|
/* invalidate if (i)digest changed */
|
|
memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
|
|
- /* invalidate for HMAC continuation finish */
|
|
- (req->finish && (req->processed != req->block_sz)) ||
|
|
/* invalidate for HMAC finish with odigest changed */
|
|
- (req->finish &&
|
|
+ (req->finish && req->hmac &&
|
|
memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
|
|
ctx->opad, req->state_sz))))
|
|
/*
|
|
@@ -622,6 +723,7 @@ static int safexcel_ahash_enqueue(struct
|
|
if (!ctx->base.ctxr)
|
|
return -ENOMEM;
|
|
}
|
|
+ req->not_first = true;
|
|
|
|
ring = ctx->base.ring;
|
|
|
|
@@ -691,8 +793,34 @@ static int safexcel_ahash_final(struct a
|
|
else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
|
|
memcpy(areq->result, sha512_zero_message_hash,
|
|
SHA512_DIGEST_SIZE);
|
|
+ else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
|
|
+ memcpy(areq->result,
|
|
+ EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
|
|
+ }
|
|
|
|
return 0;
|
|
+ } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
|
|
+ ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
|
|
+ req->len == sizeof(u32) && !areq->nbytes)) {
|
|
+ /* Zero length CRC32 */
|
|
+ memcpy(areq->result, ctx->ipad, sizeof(u32));
|
|
+ return 0;
|
|
+ } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
|
|
+ !areq->nbytes)) {
|
|
+ /* Zero length CBC MAC */
|
|
+ memset(areq->result, 0, AES_BLOCK_SIZE);
|
|
+ return 0;
|
|
+ } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
|
|
+ !areq->nbytes)) {
|
|
+ /* Zero length (X)CBC/CMAC */
|
|
+ int i;
|
|
+
|
|
+ for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
|
|
+ ((__be32 *)areq->result)[i] =
|
|
+ cpu_to_be32(le32_to_cpu(ctx->ipad[i + 4]));//K3
|
|
+ areq->result[0] ^= 0x80; // 10- padding
|
|
+ crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
|
|
+ return 0;
|
|
} else if (unlikely(req->hmac &&
|
|
(req->len == req->block_sz) &&
|
|
!areq->nbytes)) {
|
|
@@ -792,6 +920,7 @@ static int safexcel_ahash_cra_init(struc
|
|
ctx->priv = tmpl->priv;
|
|
ctx->base.send = safexcel_ahash_send;
|
|
ctx->base.handle_result = safexcel_handle_result;
|
|
+ ctx->fb_do_setkey = false;
|
|
|
|
crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
|
|
sizeof(struct safexcel_ahash_req));
|
|
@@ -808,6 +937,7 @@ static int safexcel_sha1_init(struct aha
|
|
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
|
|
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
|
|
req->state_sz = SHA1_DIGEST_SIZE;
|
|
+ req->digest_sz = SHA1_DIGEST_SIZE;
|
|
req->block_sz = SHA1_BLOCK_SIZE;
|
|
|
|
return 0;
|
|
@@ -889,6 +1019,7 @@ static int safexcel_hmac_sha1_init(struc
|
|
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
|
|
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
|
|
req->state_sz = SHA1_DIGEST_SIZE;
|
|
+ req->digest_sz = SHA1_DIGEST_SIZE;
|
|
req->block_sz = SHA1_BLOCK_SIZE;
|
|
req->hmac = true;
|
|
|
|
@@ -1125,6 +1256,7 @@ static int safexcel_sha256_init(struct a
|
|
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
|
|
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
|
|
req->state_sz = SHA256_DIGEST_SIZE;
|
|
+ req->digest_sz = SHA256_DIGEST_SIZE;
|
|
req->block_sz = SHA256_BLOCK_SIZE;
|
|
|
|
return 0;
|
|
@@ -1180,6 +1312,7 @@ static int safexcel_sha224_init(struct a
|
|
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
|
|
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
|
|
req->state_sz = SHA256_DIGEST_SIZE;
|
|
+ req->digest_sz = SHA256_DIGEST_SIZE;
|
|
req->block_sz = SHA256_BLOCK_SIZE;
|
|
|
|
return 0;
|
|
@@ -1248,6 +1381,7 @@ static int safexcel_hmac_sha224_init(str
|
|
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
|
|
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
|
|
req->state_sz = SHA256_DIGEST_SIZE;
|
|
+ req->digest_sz = SHA256_DIGEST_SIZE;
|
|
req->block_sz = SHA256_BLOCK_SIZE;
|
|
req->hmac = true;
|
|
|
|
@@ -1318,6 +1452,7 @@ static int safexcel_hmac_sha256_init(str
|
|
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
|
|
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
|
|
req->state_sz = SHA256_DIGEST_SIZE;
|
|
+ req->digest_sz = SHA256_DIGEST_SIZE;
|
|
req->block_sz = SHA256_BLOCK_SIZE;
|
|
req->hmac = true;
|
|
|
|
@@ -1375,6 +1510,7 @@ static int safexcel_sha512_init(struct a
|
|
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
|
|
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
|
|
req->state_sz = SHA512_DIGEST_SIZE;
|
|
+ req->digest_sz = SHA512_DIGEST_SIZE;
|
|
req->block_sz = SHA512_BLOCK_SIZE;
|
|
|
|
return 0;
|
|
@@ -1430,6 +1566,7 @@ static int safexcel_sha384_init(struct a
|
|
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
|
|
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
|
|
req->state_sz = SHA512_DIGEST_SIZE;
|
|
+ req->digest_sz = SHA512_DIGEST_SIZE;
|
|
req->block_sz = SHA512_BLOCK_SIZE;
|
|
|
|
return 0;
|
|
@@ -1498,6 +1635,7 @@ static int safexcel_hmac_sha512_init(str
|
|
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
|
|
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
|
|
req->state_sz = SHA512_DIGEST_SIZE;
|
|
+ req->digest_sz = SHA512_DIGEST_SIZE;
|
|
req->block_sz = SHA512_BLOCK_SIZE;
|
|
req->hmac = true;
|
|
|
|
@@ -1568,6 +1706,7 @@ static int safexcel_hmac_sha384_init(str
|
|
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
|
|
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
|
|
req->state_sz = SHA512_DIGEST_SIZE;
|
|
+ req->digest_sz = SHA512_DIGEST_SIZE;
|
|
req->block_sz = SHA512_BLOCK_SIZE;
|
|
req->hmac = true;
|
|
|
|
@@ -1625,6 +1764,7 @@ static int safexcel_md5_init(struct ahas
|
|
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
|
|
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
|
|
req->state_sz = MD5_DIGEST_SIZE;
|
|
+ req->digest_sz = MD5_DIGEST_SIZE;
|
|
req->block_sz = MD5_HMAC_BLOCK_SIZE;
|
|
|
|
return 0;
|
|
@@ -1686,6 +1826,7 @@ static int safexcel_hmac_md5_init(struct
|
|
ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
|
|
req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
|
|
req->state_sz = MD5_DIGEST_SIZE;
|
|
+ req->digest_sz = MD5_DIGEST_SIZE;
|
|
req->block_sz = MD5_HMAC_BLOCK_SIZE;
|
|
req->len_is_le = true; /* MD5 is little endian! ... */
|
|
req->hmac = true;
|
|
@@ -1738,5 +1879,1235 @@ struct safexcel_alg_template safexcel_al
|
|
.cra_module = THIS_MODULE,
|
|
},
|
|
},
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+ int ret = safexcel_ahash_cra_init(tfm);
|
|
+
|
|
+ /* Default 'key' is all zeroes */
|
|
+ memset(ctx->ipad, 0, sizeof(u32));
|
|
+ return ret;
|
|
+}
|
|
+
|
|
+static int safexcel_crc32_init(struct ahash_request *areq)
|
|
+{
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
|
|
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
|
|
+
|
|
+ memset(req, 0, sizeof(*req));
|
|
+
|
|
+ /* Start from loaded key */
|
|
+ req->state[0] = (__force __le32)le32_to_cpu(~ctx->ipad[0]);
|
|
+ /* Set processed to non-zero to enable invalidation detection */
|
|
+ req->len = sizeof(u32);
|
|
+ req->processed = sizeof(u32);
|
|
+
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
|
|
+ req->digest = CONTEXT_CONTROL_DIGEST_XCM;
|
|
+ req->state_sz = sizeof(u32);
|
|
+ req->digest_sz = sizeof(u32);
|
|
+ req->block_sz = sizeof(u32);
|
|
+
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
|
|
+ unsigned int keylen)
|
|
+{
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
|
|
+
|
|
+ if (keylen != sizeof(u32))
|
|
+ return -EINVAL;
|
|
+
|
|
+ memcpy(ctx->ipad, key, sizeof(u32));
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_crc32_digest(struct ahash_request *areq)
|
|
+{
|
|
+ return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_crc32 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
|
|
+ .algo_mask = 0,
|
|
+ .alg.ahash = {
|
|
+ .init = safexcel_crc32_init,
|
|
+ .update = safexcel_ahash_update,
|
|
+ .final = safexcel_ahash_final,
|
|
+ .finup = safexcel_ahash_finup,
|
|
+ .digest = safexcel_crc32_digest,
|
|
+ .setkey = safexcel_crc32_setkey,
|
|
+ .export = safexcel_ahash_export,
|
|
+ .import = safexcel_ahash_import,
|
|
+ .halg = {
|
|
+ .digestsize = sizeof(u32),
|
|
+ .statesize = sizeof(struct safexcel_ahash_export_state),
|
|
+ .base = {
|
|
+ .cra_name = "crc32",
|
|
+ .cra_driver_name = "safexcel-crc32",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
|
|
+ CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = 1,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
|
|
+ .cra_init = safexcel_crc32_cra_init,
|
|
+ .cra_exit = safexcel_ahash_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_cbcmac_init(struct ahash_request *areq)
|
|
+{
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
|
|
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
|
|
+
|
|
+ memset(req, 0, sizeof(*req));
|
|
+
|
|
+ /* Start from loaded keys */
|
|
+ memcpy(req->state, ctx->ipad, ctx->key_sz);
|
|
+ /* Set processed to non-zero to enable invalidation detection */
|
|
+ req->len = AES_BLOCK_SIZE;
|
|
+ req->processed = AES_BLOCK_SIZE;
|
|
+
|
|
+ req->digest = CONTEXT_CONTROL_DIGEST_XCM;
|
|
+ req->state_sz = ctx->key_sz;
|
|
+ req->digest_sz = AES_BLOCK_SIZE;
|
|
+ req->block_sz = AES_BLOCK_SIZE;
|
|
+ req->xcbcmac = true;
|
|
+
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
|
|
+ unsigned int len)
|
|
+{
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
|
|
+ struct crypto_aes_ctx aes;
|
|
+ int ret, i;
|
|
+
|
|
+ ret = aes_expandkey(&aes, key, len);
|
|
+ if (ret)
|
|
+ return ret;
|
|
+
|
|
+ memset(ctx->ipad, 0, 2 * AES_BLOCK_SIZE);
|
|
+ for (i = 0; i < len / sizeof(u32); i++)
|
|
+ ctx->ipad[i + 8] = (__force __le32)cpu_to_be32(aes.key_enc[i]);
|
|
+
|
|
+ if (len == AES_KEYSIZE_192) {
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
|
|
+ ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
|
|
+ } else if (len == AES_KEYSIZE_256) {
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
|
|
+ ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
|
|
+ } else {
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
|
|
+ ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
|
|
+ }
|
|
+ ctx->cbcmac = true;
|
|
+
|
|
+ memzero_explicit(&aes, sizeof(aes));
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_cbcmac_digest(struct ahash_request *areq)
|
|
+{
|
|
+ return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_cbcmac = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
|
|
+ .algo_mask = 0,
|
|
+ .alg.ahash = {
|
|
+ .init = safexcel_cbcmac_init,
|
|
+ .update = safexcel_ahash_update,
|
|
+ .final = safexcel_ahash_final,
|
|
+ .finup = safexcel_ahash_finup,
|
|
+ .digest = safexcel_cbcmac_digest,
|
|
+ .setkey = safexcel_cbcmac_setkey,
|
|
+ .export = safexcel_ahash_export,
|
|
+ .import = safexcel_ahash_import,
|
|
+ .halg = {
|
|
+ .digestsize = AES_BLOCK_SIZE,
|
|
+ .statesize = sizeof(struct safexcel_ahash_export_state),
|
|
+ .base = {
|
|
+ .cra_name = "cbcmac(aes)",
|
|
+ .cra_driver_name = "safexcel-cbcmac-aes",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = 1,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
|
|
+ .cra_init = safexcel_ahash_cra_init,
|
|
+ .cra_exit = safexcel_ahash_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
|
|
+ unsigned int len)
|
|
+{
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
|
|
+ struct crypto_aes_ctx aes;
|
|
+ u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
|
|
+ int ret, i;
|
|
+
|
|
+ ret = aes_expandkey(&aes, key, len);
|
|
+ if (ret)
|
|
+ return ret;
|
|
+
|
|
+ /* precompute the XCBC key material */
|
|
+ crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
|
|
+ crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
|
|
+ CRYPTO_TFM_REQ_MASK);
|
|
+ ret = crypto_cipher_setkey(ctx->kaes, key, len);
|
|
+ if (ret)
|
|
+ return ret;
|
|
+
|
|
+ crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
|
|
+ "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
|
|
+ crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
|
|
+ "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
|
|
+ crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
|
|
+ "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
|
|
+ for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
|
|
+ ctx->ipad[i] =
|
|
+ cpu_to_le32((__force u32)cpu_to_be32(key_tmp[i]));
|
|
+
|
|
+ crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
|
|
+ crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
|
|
+ CRYPTO_TFM_REQ_MASK);
|
|
+ ret = crypto_cipher_setkey(ctx->kaes,
|
|
+ (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
|
|
+ AES_MIN_KEY_SIZE);
|
|
+ if (ret)
|
|
+ return ret;
|
|
+
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
|
|
+ ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
|
|
+ ctx->cbcmac = false;
|
|
+
|
|
+ memzero_explicit(&aes, sizeof(aes));
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_ahash_cra_init(tfm);
|
|
+ ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
|
|
+ return PTR_ERR_OR_ZERO(ctx->kaes);
|
|
+}
|
|
+
|
|
+static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ crypto_free_cipher(ctx->kaes);
|
|
+ safexcel_ahash_cra_exit(tfm);
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_xcbcmac = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
|
|
+ .algo_mask = 0,
|
|
+ .alg.ahash = {
|
|
+ .init = safexcel_cbcmac_init,
|
|
+ .update = safexcel_ahash_update,
|
|
+ .final = safexcel_ahash_final,
|
|
+ .finup = safexcel_ahash_finup,
|
|
+ .digest = safexcel_cbcmac_digest,
|
|
+ .setkey = safexcel_xcbcmac_setkey,
|
|
+ .export = safexcel_ahash_export,
|
|
+ .import = safexcel_ahash_import,
|
|
+ .halg = {
|
|
+ .digestsize = AES_BLOCK_SIZE,
|
|
+ .statesize = sizeof(struct safexcel_ahash_export_state),
|
|
+ .base = {
|
|
+ .cra_name = "xcbc(aes)",
|
|
+ .cra_driver_name = "safexcel-xcbc-aes",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = AES_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
|
|
+ .cra_init = safexcel_xcbcmac_cra_init,
|
|
+ .cra_exit = safexcel_xcbcmac_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
|
|
+ unsigned int len)
|
|
+{
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
|
|
+ struct crypto_aes_ctx aes;
|
|
+ __be64 consts[4];
|
|
+ u64 _const[2];
|
|
+ u8 msb_mask, gfmask;
|
|
+ int ret, i;
|
|
+
|
|
+ ret = aes_expandkey(&aes, key, len);
|
|
+ if (ret)
|
|
+ return ret;
|
|
+
|
|
+ for (i = 0; i < len / sizeof(u32); i++)
|
|
+ ctx->ipad[i + 8] =
|
|
+ cpu_to_le32((__force u32)cpu_to_be32(aes.key_enc[i]));
|
|
+
|
|
+ /* precompute the CMAC key material */
|
|
+ crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
|
|
+ crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
|
|
+ CRYPTO_TFM_REQ_MASK);
|
|
+ ret = crypto_cipher_setkey(ctx->kaes, key, len);
|
|
+ if (ret)
|
|
+ return ret;
|
|
+
|
|
+ /* code below borrowed from crypto/cmac.c */
|
|
+ /* encrypt the zero block */
|
|
+ memset(consts, 0, AES_BLOCK_SIZE);
|
|
+ crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts);
|
|
+
|
|
+ gfmask = 0x87;
|
|
+ _const[0] = be64_to_cpu(consts[1]);
|
|
+ _const[1] = be64_to_cpu(consts[0]);
|
|
+
|
|
+ /* gf(2^128) multiply zero-ciphertext with u and u^2 */
|
|
+ for (i = 0; i < 4; i += 2) {
|
|
+ msb_mask = ((s64)_const[1] >> 63) & gfmask;
|
|
+ _const[1] = (_const[1] << 1) | (_const[0] >> 63);
|
|
+ _const[0] = (_const[0] << 1) ^ msb_mask;
|
|
+
|
|
+ consts[i + 0] = cpu_to_be64(_const[1]);
|
|
+ consts[i + 1] = cpu_to_be64(_const[0]);
|
|
+ }
|
|
+ /* end of code borrowed from crypto/cmac.c */
|
|
+
|
|
+ for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
|
|
+ ctx->ipad[i] = (__force __le32)cpu_to_be32(((u32 *)consts)[i]);
|
|
+
|
|
+ if (len == AES_KEYSIZE_192) {
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
|
|
+ ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
|
|
+ } else if (len == AES_KEYSIZE_256) {
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
|
|
+ ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
|
|
+ } else {
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
|
|
+ ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
|
|
+ }
|
|
+ ctx->cbcmac = false;
|
|
+
|
|
+ memzero_explicit(&aes, sizeof(aes));
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_cmac = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
|
|
+ .algo_mask = 0,
|
|
+ .alg.ahash = {
|
|
+ .init = safexcel_cbcmac_init,
|
|
+ .update = safexcel_ahash_update,
|
|
+ .final = safexcel_ahash_final,
|
|
+ .finup = safexcel_ahash_finup,
|
|
+ .digest = safexcel_cbcmac_digest,
|
|
+ .setkey = safexcel_cmac_setkey,
|
|
+ .export = safexcel_ahash_export,
|
|
+ .import = safexcel_ahash_import,
|
|
+ .halg = {
|
|
+ .digestsize = AES_BLOCK_SIZE,
|
|
+ .statesize = sizeof(struct safexcel_ahash_export_state),
|
|
+ .base = {
|
|
+ .cra_name = "cmac(aes)",
|
|
+ .cra_driver_name = "safexcel-cmac-aes",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = AES_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
|
|
+ .cra_init = safexcel_xcbcmac_cra_init,
|
|
+ .cra_exit = safexcel_xcbcmac_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_sm3_init(struct ahash_request *areq)
|
|
+{
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
|
|
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
|
|
+
|
|
+ memset(req, 0, sizeof(*req));
|
|
+
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
|
|
+ req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
|
|
+ req->state_sz = SM3_DIGEST_SIZE;
|
|
+ req->digest_sz = SM3_DIGEST_SIZE;
|
|
+ req->block_sz = SM3_BLOCK_SIZE;
|
|
+
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_sm3_digest(struct ahash_request *areq)
|
|
+{
|
|
+ int ret = safexcel_sm3_init(areq);
|
|
+
|
|
+ if (ret)
|
|
+ return ret;
|
|
+
|
|
+ return safexcel_ahash_finup(areq);
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_sm3 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
|
|
+ .algo_mask = SAFEXCEL_ALG_SM3,
|
|
+ .alg.ahash = {
|
|
+ .init = safexcel_sm3_init,
|
|
+ .update = safexcel_ahash_update,
|
|
+ .final = safexcel_ahash_final,
|
|
+ .finup = safexcel_ahash_finup,
|
|
+ .digest = safexcel_sm3_digest,
|
|
+ .export = safexcel_ahash_export,
|
|
+ .import = safexcel_ahash_import,
|
|
+ .halg = {
|
|
+ .digestsize = SM3_DIGEST_SIZE,
|
|
+ .statesize = sizeof(struct safexcel_ahash_export_state),
|
|
+ .base = {
|
|
+ .cra_name = "sm3",
|
|
+ .cra_driver_name = "safexcel-sm3",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = SM3_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
|
|
+ .cra_init = safexcel_ahash_cra_init,
|
|
+ .cra_exit = safexcel_ahash_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
|
|
+ unsigned int keylen)
|
|
+{
|
|
+ return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
|
|
+ SM3_DIGEST_SIZE);
|
|
+}
|
|
+
|
|
+static int safexcel_hmac_sm3_init(struct ahash_request *areq)
|
|
+{
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
|
|
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
|
|
+
|
|
+ memset(req, 0, sizeof(*req));
|
|
+
|
|
+ /* Start from ipad precompute */
|
|
+ memcpy(req->state, ctx->ipad, SM3_DIGEST_SIZE);
|
|
+ /* Already processed the key^ipad part now! */
|
|
+ req->len = SM3_BLOCK_SIZE;
|
|
+ req->processed = SM3_BLOCK_SIZE;
|
|
+
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
|
|
+ req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
|
|
+ req->state_sz = SM3_DIGEST_SIZE;
|
|
+ req->digest_sz = SM3_DIGEST_SIZE;
|
|
+ req->block_sz = SM3_BLOCK_SIZE;
|
|
+ req->hmac = true;
|
|
+
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
|
|
+{
|
|
+ int ret = safexcel_hmac_sm3_init(areq);
|
|
+
|
|
+ if (ret)
|
|
+ return ret;
|
|
+
|
|
+ return safexcel_ahash_finup(areq);
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
|
|
+ .algo_mask = SAFEXCEL_ALG_SM3,
|
|
+ .alg.ahash = {
|
|
+ .init = safexcel_hmac_sm3_init,
|
|
+ .update = safexcel_ahash_update,
|
|
+ .final = safexcel_ahash_final,
|
|
+ .finup = safexcel_ahash_finup,
|
|
+ .digest = safexcel_hmac_sm3_digest,
|
|
+ .setkey = safexcel_hmac_sm3_setkey,
|
|
+ .export = safexcel_ahash_export,
|
|
+ .import = safexcel_ahash_import,
|
|
+ .halg = {
|
|
+ .digestsize = SM3_DIGEST_SIZE,
|
|
+ .statesize = sizeof(struct safexcel_ahash_export_state),
|
|
+ .base = {
|
|
+ .cra_name = "hmac(sm3)",
|
|
+ .cra_driver_name = "safexcel-hmac-sm3",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
+ .cra_blocksize = SM3_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
|
|
+ .cra_init = safexcel_ahash_cra_init,
|
|
+ .cra_exit = safexcel_ahash_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_sha3_224_init(struct ahash_request *areq)
|
|
+{
|
|
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
|
|
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
|
|
+
|
|
+ memset(req, 0, sizeof(*req));
|
|
+
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
|
|
+ req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
|
|
+ req->state_sz = SHA3_224_DIGEST_SIZE;
|
|
+ req->digest_sz = SHA3_224_DIGEST_SIZE;
|
|
+ req->block_sz = SHA3_224_BLOCK_SIZE;
|
|
+ ctx->do_fallback = false;
|
|
+ ctx->fb_init_done = false;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_sha3_fbcheck(struct ahash_request *req)
|
|
+{
|
|
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
|
|
+ struct ahash_request *subreq = ahash_request_ctx(req);
|
|
+ int ret = 0;
|
|
+
|
|
+ if (ctx->do_fallback) {
|
|
+ ahash_request_set_tfm(subreq, ctx->fback);
|
|
+ ahash_request_set_callback(subreq, req->base.flags,
|
|
+ req->base.complete, req->base.data);
|
|
+ ahash_request_set_crypt(subreq, req->src, req->result,
|
|
+ req->nbytes);
|
|
+ if (!ctx->fb_init_done) {
|
|
+ if (ctx->fb_do_setkey) {
|
|
+ /* Set fallback cipher HMAC key */
|
|
+ u8 key[SHA3_224_BLOCK_SIZE];
|
|
+
|
|
+ memcpy(key, ctx->ipad,
|
|
+ crypto_ahash_blocksize(ctx->fback) / 2);
|
|
+ memcpy(key +
|
|
+ crypto_ahash_blocksize(ctx->fback) / 2,
|
|
+ ctx->opad,
|
|
+ crypto_ahash_blocksize(ctx->fback) / 2);
|
|
+ ret = crypto_ahash_setkey(ctx->fback, key,
|
|
+ crypto_ahash_blocksize(ctx->fback));
|
|
+ memzero_explicit(key,
|
|
+ crypto_ahash_blocksize(ctx->fback));
|
|
+ ctx->fb_do_setkey = false;
|
|
+ }
|
|
+ ret = ret ?: crypto_ahash_init(subreq);
|
|
+ ctx->fb_init_done = true;
|
|
+ }
|
|
+ }
|
|
+ return ret;
|
|
+}
|
|
+
|
|
+static int safexcel_sha3_update(struct ahash_request *req)
|
|
+{
|
|
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
|
|
+ struct ahash_request *subreq = ahash_request_ctx(req);
|
|
+
|
|
+ ctx->do_fallback = true;
|
|
+ return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
|
|
+}
|
|
+
|
|
+static int safexcel_sha3_final(struct ahash_request *req)
|
|
+{
|
|
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
|
|
+ struct ahash_request *subreq = ahash_request_ctx(req);
|
|
+
|
|
+ ctx->do_fallback = true;
|
|
+ return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
|
|
+}
|
|
+
|
|
+static int safexcel_sha3_finup(struct ahash_request *req)
|
|
+{
|
|
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
|
|
+ struct ahash_request *subreq = ahash_request_ctx(req);
|
|
+
|
|
+ ctx->do_fallback |= !req->nbytes;
|
|
+ if (ctx->do_fallback)
|
|
+ /* Update or ex/import happened or len 0, cannot use the HW */
|
|
+ return safexcel_sha3_fbcheck(req) ?:
|
|
+ crypto_ahash_finup(subreq);
|
|
+ else
|
|
+ return safexcel_ahash_finup(req);
|
|
+}
|
|
+
|
|
+static int safexcel_sha3_digest_fallback(struct ahash_request *req)
|
|
+{
|
|
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
|
|
+ struct ahash_request *subreq = ahash_request_ctx(req);
|
|
+
|
|
+ ctx->do_fallback = true;
|
|
+ ctx->fb_init_done = false;
|
|
+ return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
|
|
+}
|
|
+
|
|
+static int safexcel_sha3_224_digest(struct ahash_request *req)
|
|
+{
|
|
+ if (req->nbytes)
|
|
+ return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
|
|
+
|
|
+ /* HW cannot do zero length hash, use fallback instead */
|
|
+ return safexcel_sha3_digest_fallback(req);
|
|
+}
|
|
+
|
|
+static int safexcel_sha3_export(struct ahash_request *req, void *out)
|
|
+{
|
|
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
|
|
+ struct ahash_request *subreq = ahash_request_ctx(req);
|
|
+
|
|
+ ctx->do_fallback = true;
|
|
+ return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
|
|
+}
|
|
+
|
|
+static int safexcel_sha3_import(struct ahash_request *req, const void *in)
|
|
+{
|
|
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
|
|
+ struct ahash_request *subreq = ahash_request_ctx(req);
|
|
+
|
|
+ ctx->do_fallback = true;
|
|
+ return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
|
|
+ // return safexcel_ahash_import(req, in);
|
|
+}
|
|
+
|
|
+static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ safexcel_ahash_cra_init(tfm);
|
|
+
|
|
+ /* Allocate fallback implementation */
|
|
+ ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
|
|
+ CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_NEED_FALLBACK);
|
|
+ if (IS_ERR(ctx->fback))
|
|
+ return PTR_ERR(ctx->fback);
|
|
+
|
|
+ /* Update statesize from fallback algorithm! */
|
|
+ crypto_hash_alg_common(ahash)->statesize =
|
|
+ crypto_ahash_statesize(ctx->fback);
|
|
+ crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req),
|
|
+ sizeof(struct ahash_request) +
|
|
+ crypto_ahash_reqsize(ctx->fback)));
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ crypto_free_ahash(ctx->fback);
|
|
+ safexcel_ahash_cra_exit(tfm);
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_sha3_224 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
|
|
+ .algo_mask = SAFEXCEL_ALG_SHA3,
|
|
+ .alg.ahash = {
|
|
+ .init = safexcel_sha3_224_init,
|
|
+ .update = safexcel_sha3_update,
|
|
+ .final = safexcel_sha3_final,
|
|
+ .finup = safexcel_sha3_finup,
|
|
+ .digest = safexcel_sha3_224_digest,
|
|
+ .export = safexcel_sha3_export,
|
|
+ .import = safexcel_sha3_import,
|
|
+ .halg = {
|
|
+ .digestsize = SHA3_224_DIGEST_SIZE,
|
|
+ .statesize = sizeof(struct safexcel_ahash_export_state),
|
|
+ .base = {
|
|
+ .cra_name = "sha3-224",
|
|
+ .cra_driver_name = "safexcel-sha3-224",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
|
|
+ CRYPTO_ALG_NEED_FALLBACK,
|
|
+ .cra_blocksize = SHA3_224_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
|
|
+ .cra_init = safexcel_sha3_cra_init,
|
|
+ .cra_exit = safexcel_sha3_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_sha3_256_init(struct ahash_request *areq)
|
|
+{
|
|
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
|
|
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
|
|
+
|
|
+ memset(req, 0, sizeof(*req));
|
|
+
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
|
|
+ req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
|
|
+ req->state_sz = SHA3_256_DIGEST_SIZE;
|
|
+ req->digest_sz = SHA3_256_DIGEST_SIZE;
|
|
+ req->block_sz = SHA3_256_BLOCK_SIZE;
|
|
+ ctx->do_fallback = false;
|
|
+ ctx->fb_init_done = false;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_sha3_256_digest(struct ahash_request *req)
|
|
+{
|
|
+ if (req->nbytes)
|
|
+ return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
|
|
+
|
|
+ /* HW cannot do zero length hash, use fallback instead */
|
|
+ return safexcel_sha3_digest_fallback(req);
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_sha3_256 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
|
|
+ .algo_mask = SAFEXCEL_ALG_SHA3,
|
|
+ .alg.ahash = {
|
|
+ .init = safexcel_sha3_256_init,
|
|
+ .update = safexcel_sha3_update,
|
|
+ .final = safexcel_sha3_final,
|
|
+ .finup = safexcel_sha3_finup,
|
|
+ .digest = safexcel_sha3_256_digest,
|
|
+ .export = safexcel_sha3_export,
|
|
+ .import = safexcel_sha3_import,
|
|
+ .halg = {
|
|
+ .digestsize = SHA3_256_DIGEST_SIZE,
|
|
+ .statesize = sizeof(struct safexcel_ahash_export_state),
|
|
+ .base = {
|
|
+ .cra_name = "sha3-256",
|
|
+ .cra_driver_name = "safexcel-sha3-256",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
|
|
+ CRYPTO_ALG_NEED_FALLBACK,
|
|
+ .cra_blocksize = SHA3_256_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
|
|
+ .cra_init = safexcel_sha3_cra_init,
|
|
+ .cra_exit = safexcel_sha3_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_sha3_384_init(struct ahash_request *areq)
|
|
+{
|
|
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
|
|
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
|
|
+
|
|
+ memset(req, 0, sizeof(*req));
|
|
+
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
|
|
+ req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
|
|
+ req->state_sz = SHA3_384_DIGEST_SIZE;
|
|
+ req->digest_sz = SHA3_384_DIGEST_SIZE;
|
|
+ req->block_sz = SHA3_384_BLOCK_SIZE;
|
|
+ ctx->do_fallback = false;
|
|
+ ctx->fb_init_done = false;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_sha3_384_digest(struct ahash_request *req)
|
|
+{
|
|
+ if (req->nbytes)
|
|
+ return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
|
|
+
|
|
+ /* HW cannot do zero length hash, use fallback instead */
|
|
+ return safexcel_sha3_digest_fallback(req);
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_sha3_384 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
|
|
+ .algo_mask = SAFEXCEL_ALG_SHA3,
|
|
+ .alg.ahash = {
|
|
+ .init = safexcel_sha3_384_init,
|
|
+ .update = safexcel_sha3_update,
|
|
+ .final = safexcel_sha3_final,
|
|
+ .finup = safexcel_sha3_finup,
|
|
+ .digest = safexcel_sha3_384_digest,
|
|
+ .export = safexcel_sha3_export,
|
|
+ .import = safexcel_sha3_import,
|
|
+ .halg = {
|
|
+ .digestsize = SHA3_384_DIGEST_SIZE,
|
|
+ .statesize = sizeof(struct safexcel_ahash_export_state),
|
|
+ .base = {
|
|
+ .cra_name = "sha3-384",
|
|
+ .cra_driver_name = "safexcel-sha3-384",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
|
|
+ CRYPTO_ALG_NEED_FALLBACK,
|
|
+ .cra_blocksize = SHA3_384_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
|
|
+ .cra_init = safexcel_sha3_cra_init,
|
|
+ .cra_exit = safexcel_sha3_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_sha3_512_init(struct ahash_request *areq)
|
|
+{
|
|
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
|
|
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
|
|
+
|
|
+ memset(req, 0, sizeof(*req));
|
|
+
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
|
|
+ req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
|
|
+ req->state_sz = SHA3_512_DIGEST_SIZE;
|
|
+ req->digest_sz = SHA3_512_DIGEST_SIZE;
|
|
+ req->block_sz = SHA3_512_BLOCK_SIZE;
|
|
+ ctx->do_fallback = false;
|
|
+ ctx->fb_init_done = false;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_sha3_512_digest(struct ahash_request *req)
|
|
+{
|
|
+ if (req->nbytes)
|
|
+ return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
|
|
+
|
|
+ /* HW cannot do zero length hash, use fallback instead */
|
|
+ return safexcel_sha3_digest_fallback(req);
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_sha3_512 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
|
|
+ .algo_mask = SAFEXCEL_ALG_SHA3,
|
|
+ .alg.ahash = {
|
|
+ .init = safexcel_sha3_512_init,
|
|
+ .update = safexcel_sha3_update,
|
|
+ .final = safexcel_sha3_final,
|
|
+ .finup = safexcel_sha3_finup,
|
|
+ .digest = safexcel_sha3_512_digest,
|
|
+ .export = safexcel_sha3_export,
|
|
+ .import = safexcel_sha3_import,
|
|
+ .halg = {
|
|
+ .digestsize = SHA3_512_DIGEST_SIZE,
|
|
+ .statesize = sizeof(struct safexcel_ahash_export_state),
|
|
+ .base = {
|
|
+ .cra_name = "sha3-512",
|
|
+ .cra_driver_name = "safexcel-sha3-512",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
|
|
+ CRYPTO_ALG_NEED_FALLBACK,
|
|
+ .cra_blocksize = SHA3_512_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
|
|
+ .cra_init = safexcel_sha3_cra_init,
|
|
+ .cra_exit = safexcel_sha3_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
|
|
+{
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+ int ret;
|
|
+
|
|
+ ret = safexcel_sha3_cra_init(tfm);
|
|
+ if (ret)
|
|
+ return ret;
|
|
+
|
|
+ /* Allocate precalc basic digest implementation */
|
|
+ ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
|
|
+ if (IS_ERR(ctx->shpre))
|
|
+ return PTR_ERR(ctx->shpre);
|
|
+
|
|
+ ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
|
|
+ crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
|
|
+ if (!ctx->shdesc) {
|
|
+ crypto_free_shash(ctx->shpre);
|
|
+ return -ENOMEM;
|
|
+ }
|
|
+ ctx->shdesc->tfm = ctx->shpre;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
|
|
+{
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
+
|
|
+ crypto_free_ahash(ctx->fback);
|
|
+ crypto_free_shash(ctx->shpre);
|
|
+ kfree(ctx->shdesc);
|
|
+ safexcel_ahash_cra_exit(tfm);
|
|
+}
|
|
+
|
|
+static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
|
|
+ unsigned int keylen)
|
|
+{
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
|
|
+ int ret = 0;
|
|
+
|
|
+ if (keylen > crypto_ahash_blocksize(tfm)) {
|
|
+ /*
|
|
+ * If the key is larger than the blocksize, then hash it
|
|
+ * first using our fallback cipher
|
|
+ */
|
|
+ ret = crypto_shash_digest(ctx->shdesc, key, keylen,
|
|
+ (u8 *)ctx->ipad);
|
|
+ keylen = crypto_shash_digestsize(ctx->shpre);
|
|
+
|
|
+ /*
|
|
+ * If the digest is larger than half the blocksize, we need to
|
|
+ * move the rest to opad due to the way our HMAC infra works.
|
|
+ */
|
|
+ if (keylen > crypto_ahash_blocksize(tfm) / 2)
|
|
+ /* Buffers overlap, need to use memmove iso memcpy! */
|
|
+ memmove(ctx->opad,
|
|
+ (u8 *)ctx->ipad +
|
|
+ crypto_ahash_blocksize(tfm) / 2,
|
|
+ keylen - crypto_ahash_blocksize(tfm) / 2);
|
|
+ } else {
|
|
+ /*
|
|
+ * Copy the key to our ipad & opad buffers
|
|
+ * Note that ipad and opad each contain one half of the key,
|
|
+ * to match the existing HMAC driver infrastructure.
|
|
+ */
|
|
+ if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
|
|
+ memcpy(ctx->ipad, key, keylen);
|
|
+ } else {
|
|
+ memcpy(ctx->ipad, key,
|
|
+ crypto_ahash_blocksize(tfm) / 2);
|
|
+ memcpy(ctx->opad,
|
|
+ key + crypto_ahash_blocksize(tfm) / 2,
|
|
+ keylen - crypto_ahash_blocksize(tfm) / 2);
|
|
+ }
|
|
+ }
|
|
+
|
|
+ /* Pad key with zeroes */
|
|
+ if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
|
|
+ memset((u8 *)ctx->ipad + keylen, 0,
|
|
+ crypto_ahash_blocksize(tfm) / 2 - keylen);
|
|
+ memset(ctx->opad, 0, crypto_ahash_blocksize(tfm) / 2);
|
|
+ } else {
|
|
+ memset((u8 *)ctx->opad + keylen -
|
|
+ crypto_ahash_blocksize(tfm) / 2, 0,
|
|
+ crypto_ahash_blocksize(tfm) - keylen);
|
|
+ }
|
|
+
|
|
+ /* If doing fallback, still need to set the new key! */
|
|
+ ctx->fb_do_setkey = true;
|
|
+ return ret;
|
|
+}
|
|
+
|
|
+static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
|
|
+{
|
|
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
|
|
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
|
|
+
|
|
+ memset(req, 0, sizeof(*req));
|
|
+
|
|
+ /* Copy (half of) the key */
|
|
+ memcpy(req->state, ctx->ipad, SHA3_224_BLOCK_SIZE / 2);
|
|
+ /* Start of HMAC should have len == processed == blocksize */
|
|
+ req->len = SHA3_224_BLOCK_SIZE;
|
|
+ req->processed = SHA3_224_BLOCK_SIZE;
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
|
|
+ req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
|
|
+ req->state_sz = SHA3_224_BLOCK_SIZE / 2;
|
|
+ req->digest_sz = SHA3_224_DIGEST_SIZE;
|
|
+ req->block_sz = SHA3_224_BLOCK_SIZE;
|
|
+ req->hmac = true;
|
|
+ ctx->do_fallback = false;
|
|
+ ctx->fb_init_done = false;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
|
|
+{
|
|
+ if (req->nbytes)
|
|
+ return safexcel_hmac_sha3_224_init(req) ?:
|
|
+ safexcel_ahash_finup(req);
|
|
+
|
|
+ /* HW cannot do zero length HMAC, use fallback instead */
|
|
+ return safexcel_sha3_digest_fallback(req);
|
|
+}
|
|
+
|
|
+static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
|
|
+ .algo_mask = SAFEXCEL_ALG_SHA3,
|
|
+ .alg.ahash = {
|
|
+ .init = safexcel_hmac_sha3_224_init,
|
|
+ .update = safexcel_sha3_update,
|
|
+ .final = safexcel_sha3_final,
|
|
+ .finup = safexcel_sha3_finup,
|
|
+ .digest = safexcel_hmac_sha3_224_digest,
|
|
+ .setkey = safexcel_hmac_sha3_setkey,
|
|
+ .export = safexcel_sha3_export,
|
|
+ .import = safexcel_sha3_import,
|
|
+ .halg = {
|
|
+ .digestsize = SHA3_224_DIGEST_SIZE,
|
|
+ .statesize = sizeof(struct safexcel_ahash_export_state),
|
|
+ .base = {
|
|
+ .cra_name = "hmac(sha3-224)",
|
|
+ .cra_driver_name = "safexcel-hmac-sha3-224",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
|
|
+ CRYPTO_ALG_NEED_FALLBACK,
|
|
+ .cra_blocksize = SHA3_224_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
|
|
+ .cra_init = safexcel_hmac_sha3_224_cra_init,
|
|
+ .cra_exit = safexcel_hmac_sha3_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
|
|
+{
|
|
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
|
|
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
|
|
+
|
|
+ memset(req, 0, sizeof(*req));
|
|
+
|
|
+ /* Copy (half of) the key */
|
|
+ memcpy(req->state, ctx->ipad, SHA3_256_BLOCK_SIZE / 2);
|
|
+ /* Start of HMAC should have len == processed == blocksize */
|
|
+ req->len = SHA3_256_BLOCK_SIZE;
|
|
+ req->processed = SHA3_256_BLOCK_SIZE;
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
|
|
+ req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
|
|
+ req->state_sz = SHA3_256_BLOCK_SIZE / 2;
|
|
+ req->digest_sz = SHA3_256_DIGEST_SIZE;
|
|
+ req->block_sz = SHA3_256_BLOCK_SIZE;
|
|
+ req->hmac = true;
|
|
+ ctx->do_fallback = false;
|
|
+ ctx->fb_init_done = false;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
|
|
+{
|
|
+ if (req->nbytes)
|
|
+ return safexcel_hmac_sha3_256_init(req) ?:
|
|
+ safexcel_ahash_finup(req);
|
|
+
|
|
+ /* HW cannot do zero length HMAC, use fallback instead */
|
|
+ return safexcel_sha3_digest_fallback(req);
|
|
+}
|
|
+
|
|
+static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
|
|
+ .algo_mask = SAFEXCEL_ALG_SHA3,
|
|
+ .alg.ahash = {
|
|
+ .init = safexcel_hmac_sha3_256_init,
|
|
+ .update = safexcel_sha3_update,
|
|
+ .final = safexcel_sha3_final,
|
|
+ .finup = safexcel_sha3_finup,
|
|
+ .digest = safexcel_hmac_sha3_256_digest,
|
|
+ .setkey = safexcel_hmac_sha3_setkey,
|
|
+ .export = safexcel_sha3_export,
|
|
+ .import = safexcel_sha3_import,
|
|
+ .halg = {
|
|
+ .digestsize = SHA3_256_DIGEST_SIZE,
|
|
+ .statesize = sizeof(struct safexcel_ahash_export_state),
|
|
+ .base = {
|
|
+ .cra_name = "hmac(sha3-256)",
|
|
+ .cra_driver_name = "safexcel-hmac-sha3-256",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
|
|
+ CRYPTO_ALG_NEED_FALLBACK,
|
|
+ .cra_blocksize = SHA3_256_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
|
|
+ .cra_init = safexcel_hmac_sha3_256_cra_init,
|
|
+ .cra_exit = safexcel_hmac_sha3_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
|
|
+{
|
|
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
|
|
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
|
|
+
|
|
+ memset(req, 0, sizeof(*req));
|
|
+
|
|
+ /* Copy (half of) the key */
|
|
+ memcpy(req->state, ctx->ipad, SHA3_384_BLOCK_SIZE / 2);
|
|
+ /* Start of HMAC should have len == processed == blocksize */
|
|
+ req->len = SHA3_384_BLOCK_SIZE;
|
|
+ req->processed = SHA3_384_BLOCK_SIZE;
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
|
|
+ req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
|
|
+ req->state_sz = SHA3_384_BLOCK_SIZE / 2;
|
|
+ req->digest_sz = SHA3_384_DIGEST_SIZE;
|
|
+ req->block_sz = SHA3_384_BLOCK_SIZE;
|
|
+ req->hmac = true;
|
|
+ ctx->do_fallback = false;
|
|
+ ctx->fb_init_done = false;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
|
|
+{
|
|
+ if (req->nbytes)
|
|
+ return safexcel_hmac_sha3_384_init(req) ?:
|
|
+ safexcel_ahash_finup(req);
|
|
+
|
|
+ /* HW cannot do zero length HMAC, use fallback instead */
|
|
+ return safexcel_sha3_digest_fallback(req);
|
|
+}
|
|
+
|
|
+static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
|
|
+}
|
|
+
|
|
+struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
|
|
+ .algo_mask = SAFEXCEL_ALG_SHA3,
|
|
+ .alg.ahash = {
|
|
+ .init = safexcel_hmac_sha3_384_init,
|
|
+ .update = safexcel_sha3_update,
|
|
+ .final = safexcel_sha3_final,
|
|
+ .finup = safexcel_sha3_finup,
|
|
+ .digest = safexcel_hmac_sha3_384_digest,
|
|
+ .setkey = safexcel_hmac_sha3_setkey,
|
|
+ .export = safexcel_sha3_export,
|
|
+ .import = safexcel_sha3_import,
|
|
+ .halg = {
|
|
+ .digestsize = SHA3_384_DIGEST_SIZE,
|
|
+ .statesize = sizeof(struct safexcel_ahash_export_state),
|
|
+ .base = {
|
|
+ .cra_name = "hmac(sha3-384)",
|
|
+ .cra_driver_name = "safexcel-hmac-sha3-384",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
|
|
+ CRYPTO_ALG_NEED_FALLBACK,
|
|
+ .cra_blocksize = SHA3_384_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
|
|
+ .cra_init = safexcel_hmac_sha3_384_cra_init,
|
|
+ .cra_exit = safexcel_hmac_sha3_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
+ },
|
|
+};
|
|
+
|
|
+static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
|
|
+{
|
|
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
|
|
+ struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
|
|
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
|
|
+
|
|
+ memset(req, 0, sizeof(*req));
|
|
+
|
|
+ /* Copy (half of) the key */
|
|
+ memcpy(req->state, ctx->ipad, SHA3_512_BLOCK_SIZE / 2);
|
|
+ /* Start of HMAC should have len == processed == blocksize */
|
|
+ req->len = SHA3_512_BLOCK_SIZE;
|
|
+ req->processed = SHA3_512_BLOCK_SIZE;
|
|
+ ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
|
|
+ req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
|
|
+ req->state_sz = SHA3_512_BLOCK_SIZE / 2;
|
|
+ req->digest_sz = SHA3_512_DIGEST_SIZE;
|
|
+ req->block_sz = SHA3_512_BLOCK_SIZE;
|
|
+ req->hmac = true;
|
|
+ ctx->do_fallback = false;
|
|
+ ctx->fb_init_done = false;
|
|
+ return 0;
|
|
+}
|
|
+
|
|
+static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
|
|
+{
|
|
+ if (req->nbytes)
|
|
+ return safexcel_hmac_sha3_512_init(req) ?:
|
|
+ safexcel_ahash_finup(req);
|
|
+
|
|
+ /* HW cannot do zero length HMAC, use fallback instead */
|
|
+ return safexcel_sha3_digest_fallback(req);
|
|
+}
|
|
+
|
|
+static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
|
|
+{
|
|
+ return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
|
|
+}
|
|
+struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
|
|
+ .type = SAFEXCEL_ALG_TYPE_AHASH,
|
|
+ .algo_mask = SAFEXCEL_ALG_SHA3,
|
|
+ .alg.ahash = {
|
|
+ .init = safexcel_hmac_sha3_512_init,
|
|
+ .update = safexcel_sha3_update,
|
|
+ .final = safexcel_sha3_final,
|
|
+ .finup = safexcel_sha3_finup,
|
|
+ .digest = safexcel_hmac_sha3_512_digest,
|
|
+ .setkey = safexcel_hmac_sha3_setkey,
|
|
+ .export = safexcel_sha3_export,
|
|
+ .import = safexcel_sha3_import,
|
|
+ .halg = {
|
|
+ .digestsize = SHA3_512_DIGEST_SIZE,
|
|
+ .statesize = sizeof(struct safexcel_ahash_export_state),
|
|
+ .base = {
|
|
+ .cra_name = "hmac(sha3-512)",
|
|
+ .cra_driver_name = "safexcel-hmac-sha3-512",
|
|
+ .cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
+ .cra_flags = CRYPTO_ALG_ASYNC |
|
|
+ CRYPTO_ALG_KERN_DRIVER_ONLY |
|
|
+ CRYPTO_ALG_NEED_FALLBACK,
|
|
+ .cra_blocksize = SHA3_512_BLOCK_SIZE,
|
|
+ .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
|
|
+ .cra_init = safexcel_hmac_sha3_512_cra_init,
|
|
+ .cra_exit = safexcel_hmac_sha3_cra_exit,
|
|
+ .cra_module = THIS_MODULE,
|
|
+ },
|
|
+ },
|
|
},
|
|
};
|
|
--- a/drivers/crypto/inside-secure/safexcel_ring.c
|
|
+++ b/drivers/crypto/inside-secure/safexcel_ring.c
|
|
@@ -14,7 +14,12 @@ int safexcel_init_ring_descriptors(struc
|
|
struct safexcel_desc_ring *cdr,
|
|
struct safexcel_desc_ring *rdr)
|
|
{
|
|
- cdr->offset = sizeof(u32) * priv->config.cd_offset;
|
|
+ int i;
|
|
+ struct safexcel_command_desc *cdesc;
|
|
+ dma_addr_t atok;
|
|
+
|
|
+ /* Actual command descriptor ring */
|
|
+ cdr->offset = priv->config.cd_offset;
|
|
cdr->base = dmam_alloc_coherent(priv->dev,
|
|
cdr->offset * EIP197_DEFAULT_RING_SIZE,
|
|
&cdr->base_dma, GFP_KERNEL);
|
|
@@ -24,7 +29,34 @@ int safexcel_init_ring_descriptors(struc
|
|
cdr->base_end = cdr->base + cdr->offset * (EIP197_DEFAULT_RING_SIZE - 1);
|
|
cdr->read = cdr->base;
|
|
|
|
- rdr->offset = sizeof(u32) * priv->config.rd_offset;
|
|
+ /* Command descriptor shadow ring for storing additional token data */
|
|
+ cdr->shoffset = priv->config.cdsh_offset;
|
|
+ cdr->shbase = dmam_alloc_coherent(priv->dev,
|
|
+ cdr->shoffset *
|
|
+ EIP197_DEFAULT_RING_SIZE,
|
|
+ &cdr->shbase_dma, GFP_KERNEL);
|
|
+ if (!cdr->shbase)
|
|
+ return -ENOMEM;
|
|
+ cdr->shwrite = cdr->shbase;
|
|
+ cdr->shbase_end = cdr->shbase + cdr->shoffset *
|
|
+ (EIP197_DEFAULT_RING_SIZE - 1);
|
|
+
|
|
+ /*
|
|
+ * Populate command descriptors with physical pointers to shadow descs.
|
|
+ * Note that we only need to do this once if we don't overwrite them.
|
|
+ */
|
|
+ cdesc = cdr->base;
|
|
+ atok = cdr->shbase_dma;
|
|
+ for (i = 0; i < EIP197_DEFAULT_RING_SIZE; i++) {
|
|
+ cdesc->atok_lo = lower_32_bits(atok);
|
|
+ cdesc->atok_hi = upper_32_bits(atok);
|
|
+ cdesc = (void *)cdesc + cdr->offset;
|
|
+ atok += cdr->shoffset;
|
|
+ }
|
|
+
|
|
+ rdr->offset = priv->config.rd_offset;
|
|
+ /* Use shoffset for result token offset here */
|
|
+ rdr->shoffset = priv->config.res_offset;
|
|
rdr->base = dmam_alloc_coherent(priv->dev,
|
|
rdr->offset * EIP197_DEFAULT_RING_SIZE,
|
|
&rdr->base_dma, GFP_KERNEL);
|
|
@@ -42,11 +74,40 @@ inline int safexcel_select_ring(struct s
|
|
return (atomic_inc_return(&priv->ring_used) % priv->config.rings);
|
|
}
|
|
|
|
-static void *safexcel_ring_next_wptr(struct safexcel_crypto_priv *priv,
|
|
- struct safexcel_desc_ring *ring)
|
|
+static void *safexcel_ring_next_cwptr(struct safexcel_crypto_priv *priv,
|
|
+ struct safexcel_desc_ring *ring,
|
|
+ bool first,
|
|
+ struct safexcel_token **atoken)
|
|
{
|
|
void *ptr = ring->write;
|
|
|
|
+ if (first)
|
|
+ *atoken = ring->shwrite;
|
|
+
|
|
+ if ((ring->write == ring->read - ring->offset) ||
|
|
+ (ring->read == ring->base && ring->write == ring->base_end))
|
|
+ return ERR_PTR(-ENOMEM);
|
|
+
|
|
+ if (ring->write == ring->base_end) {
|
|
+ ring->write = ring->base;
|
|
+ ring->shwrite = ring->shbase;
|
|
+ } else {
|
|
+ ring->write += ring->offset;
|
|
+ ring->shwrite += ring->shoffset;
|
|
+ }
|
|
+
|
|
+ return ptr;
|
|
+}
|
|
+
|
|
+static void *safexcel_ring_next_rwptr(struct safexcel_crypto_priv *priv,
|
|
+ struct safexcel_desc_ring *ring,
|
|
+ struct result_data_desc **rtoken)
|
|
+{
|
|
+ void *ptr = ring->write;
|
|
+
|
|
+ /* Result token at relative offset shoffset */
|
|
+ *rtoken = ring->write + ring->shoffset;
|
|
+
|
|
if ((ring->write == ring->read - ring->offset) ||
|
|
(ring->read == ring->base && ring->write == ring->base_end))
|
|
return ERR_PTR(-ENOMEM);
|
|
@@ -106,10 +167,13 @@ void safexcel_ring_rollback_wptr(struct
|
|
if (ring->write == ring->read)
|
|
return;
|
|
|
|
- if (ring->write == ring->base)
|
|
+ if (ring->write == ring->base) {
|
|
ring->write = ring->base_end;
|
|
- else
|
|
+ ring->shwrite = ring->shbase_end;
|
|
+ } else {
|
|
ring->write -= ring->offset;
|
|
+ ring->shwrite -= ring->shoffset;
|
|
+ }
|
|
}
|
|
|
|
struct safexcel_command_desc *safexcel_add_cdesc(struct safexcel_crypto_priv *priv,
|
|
@@ -117,26 +181,26 @@ struct safexcel_command_desc *safexcel_a
|
|
bool first, bool last,
|
|
dma_addr_t data, u32 data_len,
|
|
u32 full_data_len,
|
|
- dma_addr_t context) {
|
|
+ dma_addr_t context,
|
|
+ struct safexcel_token **atoken)
|
|
+{
|
|
struct safexcel_command_desc *cdesc;
|
|
- int i;
|
|
|
|
- cdesc = safexcel_ring_next_wptr(priv, &priv->ring[ring_id].cdr);
|
|
+ cdesc = safexcel_ring_next_cwptr(priv, &priv->ring[ring_id].cdr,
|
|
+ first, atoken);
|
|
if (IS_ERR(cdesc))
|
|
return cdesc;
|
|
|
|
- memset(cdesc, 0, sizeof(struct safexcel_command_desc));
|
|
-
|
|
- cdesc->first_seg = first;
|
|
- cdesc->last_seg = last;
|
|
cdesc->particle_size = data_len;
|
|
+ cdesc->rsvd0 = 0;
|
|
+ cdesc->last_seg = last;
|
|
+ cdesc->first_seg = first;
|
|
+ cdesc->additional_cdata_size = 0;
|
|
+ cdesc->rsvd1 = 0;
|
|
cdesc->data_lo = lower_32_bits(data);
|
|
cdesc->data_hi = upper_32_bits(data);
|
|
|
|
- if (first && context) {
|
|
- struct safexcel_token *token =
|
|
- (struct safexcel_token *)cdesc->control_data.token;
|
|
-
|
|
+ if (first) {
|
|
/*
|
|
* Note that the length here MUST be >0 or else the EIP(1)97
|
|
* may hang. Newer EIP197 firmware actually incorporates this
|
|
@@ -146,20 +210,12 @@ struct safexcel_command_desc *safexcel_a
|
|
cdesc->control_data.packet_length = full_data_len ?: 1;
|
|
cdesc->control_data.options = EIP197_OPTION_MAGIC_VALUE |
|
|
EIP197_OPTION_64BIT_CTX |
|
|
- EIP197_OPTION_CTX_CTRL_IN_CMD;
|
|
- cdesc->control_data.context_lo =
|
|
- (lower_32_bits(context) & GENMASK(31, 2)) >> 2;
|
|
+ EIP197_OPTION_CTX_CTRL_IN_CMD |
|
|
+ EIP197_OPTION_RC_AUTO;
|
|
+ cdesc->control_data.type = EIP197_TYPE_BCLA;
|
|
+ cdesc->control_data.context_lo = lower_32_bits(context) |
|
|
+ EIP197_CONTEXT_SMALL;
|
|
cdesc->control_data.context_hi = upper_32_bits(context);
|
|
-
|
|
- if (priv->version == EIP197B_MRVL ||
|
|
- priv->version == EIP197D_MRVL)
|
|
- cdesc->control_data.options |= EIP197_OPTION_RC_AUTO;
|
|
-
|
|
- /* TODO: large xform HMAC with SHA-384/512 uses refresh = 3 */
|
|
- cdesc->control_data.refresh = 2;
|
|
-
|
|
- for (i = 0; i < EIP197_MAX_TOKENS; i++)
|
|
- eip197_noop_token(&token[i]);
|
|
}
|
|
|
|
return cdesc;
|
|
@@ -171,18 +227,27 @@ struct safexcel_result_desc *safexcel_ad
|
|
dma_addr_t data, u32 len)
|
|
{
|
|
struct safexcel_result_desc *rdesc;
|
|
+ struct result_data_desc *rtoken;
|
|
|
|
- rdesc = safexcel_ring_next_wptr(priv, &priv->ring[ring_id].rdr);
|
|
+ rdesc = safexcel_ring_next_rwptr(priv, &priv->ring[ring_id].rdr,
|
|
+ &rtoken);
|
|
if (IS_ERR(rdesc))
|
|
return rdesc;
|
|
|
|
- memset(rdesc, 0, sizeof(struct safexcel_result_desc));
|
|
-
|
|
- rdesc->first_seg = first;
|
|
- rdesc->last_seg = last;
|
|
rdesc->particle_size = len;
|
|
+ rdesc->rsvd0 = 0;
|
|
+ rdesc->descriptor_overflow = 0;
|
|
+ rdesc->buffer_overflow = 0;
|
|
+ rdesc->last_seg = last;
|
|
+ rdesc->first_seg = first;
|
|
+ rdesc->result_size = EIP197_RD64_RESULT_SIZE;
|
|
+ rdesc->rsvd1 = 0;
|
|
rdesc->data_lo = lower_32_bits(data);
|
|
rdesc->data_hi = upper_32_bits(data);
|
|
|
|
+ /* Clear length & error code in result token */
|
|
+ rtoken->packet_length = 0;
|
|
+ rtoken->error_code = 0;
|
|
+
|
|
return rdesc;
|
|
}
|