[v2,3/5] crypto/uadk: support cipher algorithms

Message ID 20220923024023.15849-4-zhangfei.gao@linaro.org (mailing list archive)
State Changes Requested, archived
Delegated to: akhil goyal
Headers
Series crypto/uadk: introduce uadk crypto driver |

Checks

Context Check Description
ci/checkpatch success coding style OK

Commit Message

Zhangfei Gao Sept. 23, 2022, 2:40 a.m. UTC
  Cipher algorithms:

* ``RTE_CRYPTO_CIPHER_AES_ECB``
* ``RTE_CRYPTO_CIPHER_AES_CBC``
* ``RTE_CRYPTO_CIPHER_AES_XTS``
* ``RTE_CRYPTO_CIPHER_DES_CBC``

Signed-off-by: Zhangfei Gao <zhangfei.gao@linaro.org>
---
 drivers/crypto/uadk/uadk_crypto_pmd.c | 254 +++++++++++++++++++++++++-
 1 file changed, 252 insertions(+), 2 deletions(-)
  

Patch

diff --git a/drivers/crypto/uadk/uadk_crypto_pmd.c b/drivers/crypto/uadk/uadk_crypto_pmd.c
index aad42524d6..592d747a5d 100644
--- a/drivers/crypto/uadk/uadk_crypto_pmd.c
+++ b/drivers/crypto/uadk/uadk_crypto_pmd.c
@@ -30,11 +30,26 @@  struct uadk_qp {
 } __rte_cache_aligned;
 
 enum uadk_chain_order {
+	UADK_CHAIN_ONLY_CIPHER,
 	UADK_CHAIN_NOT_SUPPORTED
 };
 
 struct uadk_crypto_session {
+	handle_t handle_cipher;
 	enum uadk_chain_order chain_order;
+
+	/* IV parameters */
+	struct {
+		uint16_t length;
+		uint16_t offset;
+	} iv;
+
+	/* Cipher Parameters */
+	struct {
+		enum rte_crypto_cipher_operation direction;
+		/* cipher operation direction */
+		struct wd_cipher_req req;
+	} cipher;
 } __rte_cache_aligned;
 
 enum uadk_crypto_version {
@@ -43,6 +58,7 @@  enum uadk_crypto_version {
 };
 
 struct uadk_crypto_priv {
+	bool env_cipher_init;
 	enum uadk_crypto_version version;
 } __rte_cache_aligned;
 
@@ -56,6 +72,86 @@  RTE_LOG_REGISTER_DEFAULT(uadk_crypto_logtype, INFO);
 		## __VA_ARGS__)
 
 static const struct rte_cryptodev_capabilities uadk_crypto_v2_capabilities[] = {
+	{	/* AES ECB */
+		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+		{.sym = {
+			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			{.cipher = {
+				.algo = RTE_CRYPTO_CIPHER_AES_ECB,
+				.block_size = 16,
+				.key_size = {
+					.min = 16,
+					.max = 32,
+					.increment = 8
+				},
+				.iv_size = {
+					.min = 0,
+					.max = 0,
+					.increment = 0
+				}
+			}, }
+		}, }
+	},
+	{	/* AES CBC */
+		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+		{.sym = {
+			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			{.cipher = {
+				.algo = RTE_CRYPTO_CIPHER_AES_CBC,
+				.block_size = 16,
+				.key_size = {
+					.min = 16,
+					.max = 32,
+					.increment = 8
+				},
+				.iv_size = {
+					.min = 16,
+					.max = 16,
+					.increment = 0
+				}
+			}, }
+		}, }
+	},
+	{	/* AES XTS */
+		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+		{.sym = {
+			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			{.cipher = {
+				.algo = RTE_CRYPTO_CIPHER_AES_XTS,
+				.block_size = 1,
+				.key_size = {
+					.min = 32,
+					.max = 64,
+					.increment = 32
+				},
+				.iv_size = {
+					.min = 0,
+					.max = 0,
+					.increment = 0
+				}
+			}, }
+		}, }
+	},
+	{	/* DES CBC */
+		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+		{.sym = {
+			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+			{.cipher = {
+				.algo = RTE_CRYPTO_CIPHER_DES_CBC,
+				.block_size = 8,
+				.key_size = {
+					.min = 8,
+					.max = 8,
+					.increment = 0
+				},
+				.iv_size = {
+					.min = 8,
+					.max = 8,
+					.increment = 0
+				}
+			}, }
+		}, }
+	},
 	/* End of symmetric capabilities */
 	RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
 };
@@ -83,8 +179,15 @@  uadk_crypto_pmd_stop(struct rte_cryptodev *dev __rte_unused)
 
 /* Close device */
 static int
-uadk_crypto_pmd_close(struct rte_cryptodev *dev __rte_unused)
+uadk_crypto_pmd_close(struct rte_cryptodev *dev)
 {
+	struct uadk_crypto_priv *priv = dev->data->dev_private;
+
+	if (priv->env_cipher_init) {
+		wd_cipher_env_uninit();
+		priv->env_cipher_init = false;
+	}
+
 	return 0;
 }
 
@@ -238,19 +341,107 @@  uadk_crypto_sym_session_get_size(struct rte_cryptodev *dev __rte_unused)
 }
 
 static enum uadk_chain_order
-uadk_get_chain_order(const struct rte_crypto_sym_xform *xform __rte_unused)
+uadk_get_chain_order(const struct rte_crypto_sym_xform *xform)
 {
 	enum uadk_chain_order res = UADK_CHAIN_NOT_SUPPORTED;
 
+	if (xform != NULL) {
+		if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
+			if (xform->next == NULL)
+				res = UADK_CHAIN_ONLY_CIPHER;
+		}
+	}
+
 	return res;
 }
 
+static int
+uadk_set_session_cipher_parameters(struct rte_cryptodev *dev,
+				   struct uadk_crypto_session *sess,
+				   struct rte_crypto_sym_xform *xform)
+{
+	struct uadk_crypto_priv *priv = dev->data->dev_private;
+	struct rte_crypto_cipher_xform *cipher = &xform->cipher;
+	struct wd_cipher_sess_setup setup = {0};
+	struct sched_params params = {0};
+	int ret;
+
+	if (!priv->env_cipher_init) {
+		ret = wd_cipher_env_init(NULL);
+		if (ret < 0)
+			return -EINVAL;
+		priv->env_cipher_init = true;
+	}
+
+	sess->cipher.direction = cipher->op;
+	sess->iv.offset = cipher->iv.offset;
+	sess->iv.length = cipher->iv.length;
+
+	switch (cipher->algo) {
+	/* Cover supported cipher algorithms */
+	case RTE_CRYPTO_CIPHER_AES_CTR:
+		setup.alg = WD_CIPHER_AES;
+		setup.mode = WD_CIPHER_CTR;
+		sess->cipher.req.out_bytes = 64;
+		break;
+	case RTE_CRYPTO_CIPHER_AES_ECB:
+		setup.alg = WD_CIPHER_AES;
+		setup.mode = WD_CIPHER_ECB;
+		sess->cipher.req.out_bytes = 16;
+		break;
+	case RTE_CRYPTO_CIPHER_AES_CBC:
+		setup.alg = WD_CIPHER_AES;
+		setup.mode = WD_CIPHER_CBC;
+		if (cipher->key.length == 16)
+			sess->cipher.req.out_bytes = 16;
+		else
+			sess->cipher.req.out_bytes = 64;
+		break;
+	case RTE_CRYPTO_CIPHER_AES_XTS:
+		setup.alg = WD_CIPHER_AES;
+		setup.mode = WD_CIPHER_XTS;
+		if (cipher->key.length == 16)
+			sess->cipher.req.out_bytes = 32;
+		else
+			sess->cipher.req.out_bytes = 512;
+		break;
+	default:
+		ret = -ENOTSUP;
+		goto env_uninit;
+	}
+
+	params.numa_id = -1;	/* choose nearby numa node */
+	setup.sched_param = &params;
+	sess->handle_cipher = wd_cipher_alloc_sess(&setup);
+	if (!sess->handle_cipher) {
+		UADK_LOG(ERR, "uadk failed to alloc session!\n");
+		ret = -EINVAL;
+		goto env_uninit;
+	}
+
+	ret = wd_cipher_set_key(sess->handle_cipher, cipher->key.data, cipher->key.length);
+	if (ret) {
+		wd_cipher_free_sess(sess->handle_cipher);
+		UADK_LOG(ERR, "uadk failed to set key!\n");
+		ret = -EINVAL;
+		goto env_uninit;
+	}
+
+	return 0;
+
+env_uninit:
+	wd_cipher_env_uninit();
+	priv->env_cipher_init = false;
+	return ret;
+}
+
 static int
 uadk_crypto_sym_session_configure(struct rte_cryptodev *dev,
 				  struct rte_crypto_sym_xform *xform,
 				  struct rte_cryptodev_sym_session *session,
 				  struct rte_mempool *mp)
 {
+	struct rte_crypto_sym_xform *cipher_xform = NULL;
 	struct uadk_crypto_session *sess;
 	int ret;
 
@@ -263,11 +454,23 @@  uadk_crypto_sym_session_configure(struct rte_cryptodev *dev,
 
 	sess->chain_order = uadk_get_chain_order(xform);
 	switch (sess->chain_order) {
+	case UADK_CHAIN_ONLY_CIPHER:
+		cipher_xform = xform;
+		break;
 	default:
 		ret = -ENOTSUP;
 		goto err;
 	}
 
+	if (cipher_xform) {
+		ret = uadk_set_session_cipher_parameters(dev, sess, cipher_xform);
+		if (ret != 0) {
+			UADK_LOG(ERR,
+				"Invalid/unsupported cipher parameters");
+			goto err;
+		}
+	}
+
 	set_sym_session_private_data(session, dev->driver_id, sess);
 
 	return 0;
@@ -288,6 +491,11 @@  uadk_crypto_sym_session_clear(struct rte_cryptodev *dev,
 		return;
 	}
 
+	if (priv_sess->handle_cipher) {
+		wd_cipher_free_sess(priv_sess->handle_cipher);
+		priv_sess->handle_cipher = 0;
+	}
+
 	set_sym_session_private_data(sess, dev->driver_id, NULL);
 	rte_mempool_put(rte_mempool_from_obj(priv_sess), priv_sess);
 }
@@ -307,12 +515,49 @@  static struct rte_cryptodev_ops uadk_crypto_pmd_ops = {
 		.sym_session_clear	= uadk_crypto_sym_session_clear,
 };
 
+static void
+uadk_process_cipher_op(struct rte_crypto_op *op,
+		       struct uadk_crypto_session *sess,
+		       struct rte_mbuf *msrc, struct rte_mbuf *mdst)
+{
+	uint32_t off = op->sym->cipher.data.offset;
+	int ret;
+
+	if (!sess) {
+		op->status = RTE_COMP_OP_STATUS_INVALID_ARGS;
+		return;
+	}
+
+	sess->cipher.req.src = rte_pktmbuf_mtod_offset(msrc, uint8_t *, off);
+	sess->cipher.req.in_bytes = op->sym->cipher.data.length;
+	sess->cipher.req.dst = rte_pktmbuf_mtod_offset(mdst, uint8_t *, off);
+	sess->cipher.req.out_buf_bytes = sess->cipher.req.in_bytes;
+	sess->cipher.req.iv_bytes = sess->iv.length;
+	sess->cipher.req.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+							sess->iv.offset);
+	if (sess->cipher.direction == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
+		sess->cipher.req.op_type = WD_CIPHER_ENCRYPTION;
+	else
+		sess->cipher.req.op_type = WD_CIPHER_DECRYPTION;
+
+	do {
+		ret = wd_do_cipher_sync(sess->handle_cipher, &sess->cipher.req);
+	} while (ret == -WD_EBUSY);
+
+	if (sess->cipher.req.out_buf_bytes > sess->cipher.req.in_bytes)
+		op->status = RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED;
+
+	if (ret)
+		op->status = RTE_COMP_OP_STATUS_ERROR;
+}
+
 static uint16_t
 uadk_crypto_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
 			  uint16_t nb_ops)
 {
 	struct uadk_qp *qp = queue_pair;
 	struct uadk_crypto_session *sess = NULL;
+	struct rte_mbuf *msrc, *mdst;
 	struct rte_crypto_op *op;
 	uint16_t enqd = 0;
 	int i, ret;
@@ -320,6 +565,8 @@  uadk_crypto_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
 	for (i = 0; i < nb_ops; i++) {
 		op = ops[i];
 		op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
+		msrc = op->sym->m_src;
+		mdst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
 
 		if (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
 			if (likely(op->sym->session != NULL))
@@ -330,6 +577,9 @@  uadk_crypto_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
 		}
 
 		switch (sess->chain_order) {
+		case UADK_CHAIN_ONLY_CIPHER:
+			uadk_process_cipher_op(op, sess, msrc, mdst);
+			break;
 		default:
 			op->status = RTE_CRYPTO_OP_STATUS_ERROR;
 			break;