@@ -2611,7 +2611,8 @@ test_sm2_sign(void)
/* Populate op with operational details */
asym_op->sm2.op_type = RTE_CRYPTO_ASYM_OP_SIGN;
- if (rte_cryptodev_asym_xform_capability_check_hash(capa, RTE_CRYPTO_AUTH_SM3))
+ if (rte_cryptodev_asym_xform_capability_check_opcap(capa,
+ RTE_CRYPTO_ASYM_OP_SIGN, RTE_CRYPTO_SM2_PH))
asym_op->sm2.hash = RTE_CRYPTO_AUTH_SM3;
else
asym_op->sm2.hash = RTE_CRYPTO_AUTH_NULL;
@@ -2628,7 +2629,8 @@ test_sm2_sign(void)
asym_op->sm2.id.length = 0;
}
- if (capa->internal_rng != 0) {
+ if (rte_cryptodev_asym_xform_capability_check_opcap(capa,
+ RTE_CRYPTO_ASYM_OP_ENCRYPT, RTE_CRYPTO_SM2_RNG)) {
asym_op->sm2.k.data = NULL;
asym_op->sm2.k.length = 0;
} else {
@@ -2677,7 +2679,8 @@ test_sm2_sign(void)
debug_hexdump(stdout, "s:",
asym_op->sm2.s.data, asym_op->sm2.s.length);
- if (capa->internal_rng == 0) {
+ if (!rte_cryptodev_asym_xform_capability_check_opcap(capa,
+ RTE_CRYPTO_ASYM_OP_SIGN, RTE_CRYPTO_SM2_RNG)) {
/* Verify sign (by comparison). */
if (memcmp(input_params.sign_r.data, asym_op->sm2.r.data,
asym_op->sm2.r.length) != 0) {
@@ -2802,7 +2805,8 @@ test_sm2_verify(void)
/* Populate op with operational details */
asym_op->sm2.op_type = RTE_CRYPTO_ASYM_OP_VERIFY;
- if (rte_cryptodev_asym_xform_capability_check_hash(capa, RTE_CRYPTO_AUTH_SM3))
+ if (rte_cryptodev_asym_xform_capability_check_opcap(capa,
+ RTE_CRYPTO_ASYM_OP_VERIFY, RTE_CRYPTO_SM2_PH))
asym_op->sm2.hash = RTE_CRYPTO_AUTH_SM3;
else
asym_op->sm2.hash = RTE_CRYPTO_AUTH_NULL;
@@ -2924,7 +2928,8 @@ test_sm2_enc(void)
/* Populate op with operational details */
asym_op->sm2.op_type = RTE_CRYPTO_ASYM_OP_ENCRYPT;
- if (rte_cryptodev_asym_xform_capability_check_hash(capa, RTE_CRYPTO_AUTH_SM3))
+ if (rte_cryptodev_asym_xform_capability_check_opcap(capa,
+ RTE_CRYPTO_ASYM_OP_ENCRYPT, RTE_CRYPTO_SM2_PH))
asym_op->sm2.hash = RTE_CRYPTO_AUTH_SM3;
else
asym_op->sm2.hash = RTE_CRYPTO_AUTH_NULL;
@@ -2932,7 +2937,8 @@ test_sm2_enc(void)
asym_op->sm2.message.data = input_params.message.data;
asym_op->sm2.message.length = input_params.message.length;
- if (capa->internal_rng != 0) {
+ if (rte_cryptodev_asym_xform_capability_check_opcap(capa,
+ RTE_CRYPTO_ASYM_OP_ENCRYPT, RTE_CRYPTO_SM2_RNG)) {
asym_op->sm2.k.data = NULL;
asym_op->sm2.k.length = 0;
} else {
@@ -2978,7 +2984,8 @@ test_sm2_enc(void)
debug_hexdump(stdout, "cipher:",
asym_op->sm2.cipher.data, asym_op->sm2.cipher.length);
- if (capa->internal_rng == 0) {
+ if (!rte_cryptodev_asym_xform_capability_check_opcap(capa,
+ RTE_CRYPTO_ASYM_OP_ENCRYPT, RTE_CRYPTO_SM2_RNG)) {
if (memcmp(input_params.cipher.data, asym_op->sm2.cipher.data,
asym_op->sm2.cipher.length) != 0) {
status = TEST_FAILED;
@@ -3105,7 +3112,8 @@ test_sm2_dec(void)
/* Populate op with operational details */
asym_op->sm2.op_type = RTE_CRYPTO_ASYM_OP_DECRYPT;
- if (rte_cryptodev_asym_xform_capability_check_hash(capa, RTE_CRYPTO_AUTH_SM3))
+ if (rte_cryptodev_asym_xform_capability_check_opcap(capa,
+ RTE_CRYPTO_ASYM_OP_DECRYPT, RTE_CRYPTO_SM2_PH))
asym_op->sm2.hash = RTE_CRYPTO_AUTH_SM3;
else
asym_op->sm2.hash = RTE_CRYPTO_AUTH_NULL;
@@ -1194,7 +1194,19 @@ static const struct rte_cryptodev_capabilities caps_sm2[] = {
.xform_capa = {
.xform_type = RTE_CRYPTO_ASYM_XFORM_SM2,
.op_types = ((1 << RTE_CRYPTO_ASYM_OP_SIGN) |
- (1 << RTE_CRYPTO_ASYM_OP_VERIFY))
+ (1 << RTE_CRYPTO_ASYM_OP_VERIFY) |
+ (1 << RTE_CRYPTO_ASYM_OP_ENCRYPT) |
+ (1 << RTE_CRYPTO_ASYM_OP_DECRYPT)),
+ .op_capa = {
+ {
+ .op_type = RTE_CRYPTO_ASYM_OP_ENCRYPT,
+ .capa = ((1 << RTE_CRYPTO_SM2_PKE_KDF))
+ },
+ {
+ .op_type = RTE_CRYPTO_ASYM_OP_DECRYPT,
+ .capa = ((1 << RTE_CRYPTO_SM2_PKE_KDF))
+ }
+ }
}
}
}
@@ -598,15 +598,34 @@ static const struct rte_cryptodev_capabilities openssl_pmd_capabilities[] = {
{.asym = {
.xform_capa = {
.xform_type = RTE_CRYPTO_ASYM_XFORM_SM2,
- .hash_algos = (1 << RTE_CRYPTO_AUTH_SM3),
.op_types =
((1<<RTE_CRYPTO_ASYM_OP_SIGN) |
(1 << RTE_CRYPTO_ASYM_OP_VERIFY) |
(1 << RTE_CRYPTO_ASYM_OP_ENCRYPT) |
(1 << RTE_CRYPTO_ASYM_OP_DECRYPT)),
- {.internal_rng = 1
- }
- }
+ .op_capa = {
+ {
+ .op_type = RTE_CRYPTO_ASYM_OP_ENCRYPT,
+ .capa = ((1 << RTE_CRYPTO_SM2_RNG) |
+ (1 << RTE_CRYPTO_SM2_PKE_KDF))
+ },
+ {
+ .op_type = RTE_CRYPTO_ASYM_OP_DECRYPT,
+ .capa = ((1 << RTE_CRYPTO_SM2_RNG) |
+ (1 << RTE_CRYPTO_SM2_PKE_KDF))
+ },
+ {
+ .op_type = RTE_CRYPTO_ASYM_OP_SIGN,
+ .capa = ((1 << RTE_CRYPTO_SM2_RNG) |
+ (1 << RTE_CRYPTO_SM2_PH))
+ },
+ {
+ .op_type = RTE_CRYPTO_ASYM_OP_VERIFY,
+ .capa = ((1 << RTE_CRYPTO_SM2_RNG) |
+ (1 << RTE_CRYPTO_SM2_PH))
+ }
+ },
+ },
}
}
},
@@ -633,6 +633,18 @@ struct rte_crypto_asym_xform {
};
};
+/**
+ * SM2 operation capabilities
+ */
+enum rte_crypto_sm2_op_capa {
+ RTE_CRYPTO_SM2_RNG,
+ /**< Random number generator supported in SM2 ops. */
+ RTE_CRYPTO_SM2_PH,
+ /**< Prehash message before crypto op. */
+ RTE_CRYPTO_SM2_PKE_KDF,
+ /**< KDF support in SM2 public key encryption */
+};
+
/**
* SM2 operation params.
*/
@@ -628,6 +628,27 @@ rte_cryptodev_asym_xform_capability_check_hash(
return ret;
}
+int
+rte_cryptodev_asym_xform_capability_check_opcap(
+ const struct rte_cryptodev_asymmetric_xform_capability *capability,
+ enum rte_crypto_asym_op_type op_type, uint8_t cap)
+{
+ int ret = 0;
+
+ if (!(capability->op_types & (1 << op_type)))
+ return ret;
+
+ for (int i = 0; i < RTE_CRYPTO_ASYM_OP_LIST_END; i++) {
+ if (capability->op_capa[i].op_type != op_type)
+ continue;
+
+ if (capability->op_capa[i].capa & (1 << cap))
+ ret = 1;
+ }
+
+ return ret;
+}
+
/* spinlock for crypto device enq callbacks */
static rte_spinlock_t rte_cryptodev_callback_lock = RTE_SPINLOCK_INITIALIZER;
@@ -157,6 +157,15 @@ struct rte_cryptodev_symmetric_capability {
};
};
+/**
+ * Asymmetric Crypto Operation Capability
+ */
+struct rte_cryptodev_asymmetric_op_capability {
+ enum rte_crypto_asym_op_type op_type;
+ uint32_t capa;
+ /**< Bitmask of capabilities supported for op_type. */
+};
+
/**
* Asymmetric Xform Crypto Capability
*/
@@ -185,6 +194,9 @@ struct rte_cryptodev_asymmetric_xform_capability {
* Value 0 means unavailable, and application should pass the required
* random value. Otherwise, PMD would internally compute the random number.
*/
+
+ struct rte_cryptodev_asymmetric_op_capability op_capa[RTE_CRYPTO_ASYM_OP_LIST_END];
+ /**< Operation specific capabilities. */
};
uint64_t hash_algos;
@@ -359,6 +371,22 @@ rte_cryptodev_asym_xform_capability_check_hash(
const struct rte_cryptodev_asymmetric_xform_capability *capability,
enum rte_crypto_auth_algorithm hash);
+/**
+ * Check if op capability is supported
+ *
+ * @param capability Description of the asymmetric crypto capability.
+ * @param op_type op type
+ * @param cap op capability
+ *
+ * @return
+ * - Return 1 if the op capability is supported
+ * - Return 0 if unsupported
+ */
+int
+rte_cryptodev_asym_xform_capability_check_opcap(
+ const struct rte_cryptodev_asymmetric_xform_capability *capability,
+ enum rte_crypto_asym_op_type op_type, uint8_t cap);
+
/**
* Provide the cipher algorithm enum, given an algorithm string
*