@@ -451,6 +451,9 @@ aesni_mb_set_session_cipher_parameters(const IMB_MGR *mb_mgr,
uint8_t is_zuc = 0;
uint8_t is_snow3g = 0;
uint8_t is_kasumi = 0;
+#if IMB_VERSION(1, 5, 0) <= IMB_VERSION_NUM
+ uint8_t is_sm4 = 0;
+#endif
if (xform == NULL) {
sess->template_job.cipher_mode = IMB_CIPHER_NULL;
@@ -521,6 +524,20 @@ aesni_mb_set_session_cipher_parameters(const IMB_MGR *mb_mgr,
sess->iv.offset = xform->cipher.iv.offset;
sess->template_job.iv_len_in_bytes = xform->cipher.iv.length;
return 0;
+#if IMB_VERSION(1, 5, 0) <= IMB_VERSION_NUM
+ case RTE_CRYPTO_CIPHER_SM4_CBC:
+ sess->template_job.cipher_mode = IMB_CIPHER_SM4_CBC;
+ is_sm4 = 1;
+ break;
+ case RTE_CRYPTO_CIPHER_SM4_ECB:
+ sess->template_job.cipher_mode = IMB_CIPHER_SM4_ECB;
+ is_sm4 = 1;
+ break;
+ case RTE_CRYPTO_CIPHER_SM4_CTR:
+ sess->template_job.cipher_mode = IMB_CIPHER_SM4_CNTR;
+ is_sm4 = 1;
+ break;
+#endif
default:
IPSEC_MB_LOG(ERR, "Unsupported cipher mode parameter");
return -ENOTSUP;
@@ -655,6 +672,15 @@ aesni_mb_set_session_cipher_parameters(const IMB_MGR *mb_mgr,
&sess->cipher.pKeySched_kasumi_cipher);
sess->template_job.enc_keys = &sess->cipher.pKeySched_kasumi_cipher;
sess->template_job.dec_keys = &sess->cipher.pKeySched_kasumi_cipher;
+#if IMB_VERSION(1, 5, 0) <= IMB_VERSION_NUM
+ } else if (is_sm4) {
+ sess->template_job.key_len_in_bytes = IMB_KEY_128_BYTES;
+ IMB_SM4_KEYEXP(mb_mgr, xform->cipher.key.data,
+ sess->cipher.expanded_sm4_keys.encode,
+ sess->cipher.expanded_sm4_keys.decode);
+ sess->template_job.enc_keys = sess->cipher.expanded_sm4_keys.encode;
+ sess->template_job.dec_keys = sess->cipher.expanded_sm4_keys.decode;
+#endif
} else {
if (xform->cipher.key.length != 8) {
IPSEC_MB_LOG(ERR, "Invalid cipher key length");
@@ -732,6 +732,7 @@ static const struct rte_cryptodev_capabilities aesni_mb_capabilities[] = {
}, }
}, }
},
+#if IMB_VERSION(1, 5, 0) <= IMB_VERSION_NUM
{ /* SM3 */
.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
{.sym = {
@@ -774,6 +775,63 @@ static const struct rte_cryptodev_capabilities aesni_mb_capabilities[] = {
}, }
}, }
},
+ { /* SM4 CBC */
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+ {.sym = {
+ .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+ {.cipher = {
+ .algo = RTE_CRYPTO_CIPHER_SM4_CBC,
+ .block_size = 16,
+ .key_size = {
+ .min = 16,
+ .max = 16,
+ .increment = 0
+ },
+ .iv_size = {
+ .min = 16,
+ .max = 16,
+ .increment = 0
+ }
+ }, }
+ }, }
+ },
+ { /* SM4 ECB */
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+ {.sym = {
+ .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+ {.cipher = {
+ .algo = RTE_CRYPTO_CIPHER_SM4_ECB,
+ .block_size = 16,
+ .key_size = {
+ .min = 16,
+ .max = 16,
+ .increment = 0
+ },
+ .iv_size = { 0 }
+ }, }
+ }, }
+ },
+ { /* SM4 CTR */
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+ {.sym = {
+ .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+ {.cipher = {
+ .algo = RTE_CRYPTO_CIPHER_SM4_CTR,
+ .block_size = 16,
+ .key_size = {
+ .min = 16,
+ .max = 16,
+ .increment = 0
+ },
+ .iv_size = {
+ .min = 16,
+ .max = 16,
+ .increment = 0
+ }
+ }, }
+ }, }
+ },
+#endif
RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
};
@@ -951,6 +1009,17 @@ struct __rte_cache_aligned aesni_mb_session {
/* *< SNOW3G scheduled cipher key */
kasumi_key_sched_t pKeySched_kasumi_cipher;
/* *< KASUMI scheduled cipher key */
+#if IMB_VERSION(1, 5, 0) <= IMB_VERSION_NUM
+ struct {
+ alignas(16) uint32_t encode[IMB_SM4_KEY_SCHEDULE_ROUNDS];
+ /* *< encode key */
+ alignas(16) uint32_t decode[IMB_SM4_KEY_SCHEDULE_ROUNDS];
+ /* *< decode key */
+ } expanded_sm4_keys;
+ /* *< Expanded SM4 keys - Original 128 bit key is
+ * expanded into 32 round keys, each 32 bits.
+ */
+#endif
};
} cipher;