@@ -4,12 +4,22 @@
; Refer to default.ini for the full list of available PMD features.
;
[Features]
+Symmetric crypto = Y
HW Accelerated = Y
;
; Supported crypto algorithms of the 'uadk' crypto driver.
;
[Cipher]
+AES CBC (128) = Y
+AES CBC (192) = Y
+AES CBC (256) = Y
+AES ECB (128) = Y
+AES ECB (192) = Y
+AES ECB (256) = Y
+AES XTS (128) = Y
+AES XTS (256) = Y
+DES CBC = Y
;
; Supported authentication algorithms of the 'uadk' crypto driver.
@@ -15,6 +15,12 @@ Features
UADK crypto PMD has support for:
+Cipher algorithms:
+
+* ``RTE_CRYPTO_CIPHER_AES_ECB``
+* ``RTE_CRYPTO_CIPHER_AES_CBC``
+* ``RTE_CRYPTO_CIPHER_AES_XTS``
+* ``RTE_CRYPTO_CIPHER_DES_CBC``
Test steps
-----------
@@ -29,12 +29,35 @@ struct uadk_qp {
uint8_t temp_digest[DIGEST_LENGTH_MAX];
} __rte_cache_aligned;
+enum uadk_chain_order {
+ UADK_CHAIN_ONLY_CIPHER,
+ UADK_CHAIN_NOT_SUPPORTED
+};
+
+struct uadk_crypto_session {
+ handle_t handle_cipher;
+ enum uadk_chain_order chain_order;
+
+ /* IV parameters */
+ struct {
+ uint16_t length;
+ uint16_t offset;
+ } iv;
+
+ /* Cipher Parameters */
+ struct {
+ enum rte_crypto_cipher_operation direction;
+ struct wd_cipher_req req;
+ } cipher;
+} __rte_cache_aligned;
+
enum uadk_crypto_version {
UADK_CRYPTO_V2,
UADK_CRYPTO_V3,
};
struct uadk_crypto_priv {
+ bool env_cipher_init;
enum uadk_crypto_version version;
} __rte_cache_aligned;
@@ -48,6 +71,86 @@ RTE_LOG_REGISTER_DEFAULT(uadk_crypto_logtype, INFO);
## __VA_ARGS__)
static const struct rte_cryptodev_capabilities uadk_crypto_v2_capabilities[] = {
+ { /* AES ECB */
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+ {.sym = {
+ .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+ {.cipher = {
+ .algo = RTE_CRYPTO_CIPHER_AES_ECB,
+ .block_size = 16,
+ .key_size = {
+ .min = 16,
+ .max = 32,
+ .increment = 8
+ },
+ .iv_size = {
+ .min = 0,
+ .max = 0,
+ .increment = 0
+ }
+ }, }
+ }, }
+ },
+ { /* AES CBC */
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+ {.sym = {
+ .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+ {.cipher = {
+ .algo = RTE_CRYPTO_CIPHER_AES_CBC,
+ .block_size = 16,
+ .key_size = {
+ .min = 16,
+ .max = 32,
+ .increment = 8
+ },
+ .iv_size = {
+ .min = 16,
+ .max = 16,
+ .increment = 0
+ }
+ }, }
+ }, }
+ },
+ { /* AES XTS */
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+ {.sym = {
+ .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+ {.cipher = {
+ .algo = RTE_CRYPTO_CIPHER_AES_XTS,
+ .block_size = 1,
+ .key_size = {
+ .min = 32,
+ .max = 64,
+ .increment = 32
+ },
+ .iv_size = {
+ .min = 0,
+ .max = 0,
+ .increment = 0
+ }
+ }, }
+ }, }
+ },
+ { /* DES CBC */
+ .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
+ {.sym = {
+ .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
+ {.cipher = {
+ .algo = RTE_CRYPTO_CIPHER_DES_CBC,
+ .block_size = 8,
+ .key_size = {
+ .min = 8,
+ .max = 8,
+ .increment = 0
+ },
+ .iv_size = {
+ .min = 8,
+ .max = 8,
+ .increment = 0
+ }
+ }, }
+ }, }
+ },
/* End of capabilities */
RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
};
@@ -75,8 +178,15 @@ uadk_crypto_pmd_stop(struct rte_cryptodev *dev __rte_unused)
/* Close device */
static int
-uadk_crypto_pmd_close(struct rte_cryptodev *dev __rte_unused)
+uadk_crypto_pmd_close(struct rte_cryptodev *dev)
{
+ struct uadk_crypto_priv *priv = dev->data->dev_private;
+
+ if (priv->env_cipher_init) {
+ wd_cipher_env_uninit();
+ priv->env_cipher_init = false;
+ }
+
return 0;
}
@@ -223,6 +333,172 @@ uadk_crypto_pmd_qp_setup(struct rte_cryptodev *dev, uint16_t qp_id,
return -EINVAL;
}
+static unsigned int
+uadk_crypto_sym_session_get_size(struct rte_cryptodev *dev __rte_unused)
+{
+ return sizeof(struct uadk_crypto_session);
+}
+
+static enum uadk_chain_order
+uadk_get_chain_order(const struct rte_crypto_sym_xform *xform)
+{
+ enum uadk_chain_order res = UADK_CHAIN_NOT_SUPPORTED;
+
+ if (xform != NULL) {
+ if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
+ if (xform->next == NULL)
+ res = UADK_CHAIN_ONLY_CIPHER;
+ }
+ }
+
+ return res;
+}
+
+static int
+uadk_set_session_cipher_parameters(struct rte_cryptodev *dev,
+ struct uadk_crypto_session *sess,
+ struct rte_crypto_sym_xform *xform)
+{
+ struct uadk_crypto_priv *priv = dev->data->dev_private;
+ struct rte_crypto_cipher_xform *cipher = &xform->cipher;
+ struct wd_cipher_sess_setup setup = {0};
+ struct sched_params params = {0};
+ int ret;
+
+ if (!priv->env_cipher_init) {
+ ret = wd_cipher_env_init(NULL);
+ if (ret < 0)
+ return -EINVAL;
+ priv->env_cipher_init = true;
+ }
+
+ sess->cipher.direction = cipher->op;
+ sess->iv.offset = cipher->iv.offset;
+ sess->iv.length = cipher->iv.length;
+
+ switch (cipher->algo) {
+ /* Cover supported cipher algorithms */
+ case RTE_CRYPTO_CIPHER_AES_CTR:
+ setup.alg = WD_CIPHER_AES;
+ setup.mode = WD_CIPHER_CTR;
+ sess->cipher.req.out_bytes = 64;
+ break;
+ case RTE_CRYPTO_CIPHER_AES_ECB:
+ setup.alg = WD_CIPHER_AES;
+ setup.mode = WD_CIPHER_ECB;
+ sess->cipher.req.out_bytes = 16;
+ break;
+ case RTE_CRYPTO_CIPHER_AES_CBC:
+ setup.alg = WD_CIPHER_AES;
+ setup.mode = WD_CIPHER_CBC;
+ if (cipher->key.length == 16)
+ sess->cipher.req.out_bytes = 16;
+ else
+ sess->cipher.req.out_bytes = 64;
+ break;
+ case RTE_CRYPTO_CIPHER_AES_XTS:
+ setup.alg = WD_CIPHER_AES;
+ setup.mode = WD_CIPHER_XTS;
+ if (cipher->key.length == 16)
+ sess->cipher.req.out_bytes = 32;
+ else
+ sess->cipher.req.out_bytes = 512;
+ break;
+ default:
+ ret = -ENOTSUP;
+ goto env_uninit;
+ }
+
+ params.numa_id = -1; /* choose nearby numa node */
+ setup.sched_param = ¶ms;
+ sess->handle_cipher = wd_cipher_alloc_sess(&setup);
+ if (!sess->handle_cipher) {
+ UADK_LOG(ERR, "uadk failed to alloc session!\n");
+ ret = -EINVAL;
+ goto env_uninit;
+ }
+
+ ret = wd_cipher_set_key(sess->handle_cipher, cipher->key.data, cipher->key.length);
+ if (ret) {
+ wd_cipher_free_sess(sess->handle_cipher);
+ UADK_LOG(ERR, "uadk failed to set key!\n");
+ ret = -EINVAL;
+ goto env_uninit;
+ }
+
+ return 0;
+
+env_uninit:
+ wd_cipher_env_uninit();
+ priv->env_cipher_init = false;
+ return ret;
+}
+
+static int
+uadk_crypto_sym_session_configure(struct rte_cryptodev *dev,
+ struct rte_crypto_sym_xform *xform,
+ struct rte_cryptodev_sym_session *session,
+ struct rte_mempool *mp)
+{
+ struct rte_crypto_sym_xform *cipher_xform = NULL;
+ struct uadk_crypto_session *sess;
+ int ret;
+
+ ret = rte_mempool_get(mp, (void *)&sess);
+ if (ret != 0) {
+ UADK_LOG(ERR, "Failed to get session %p private data from mempool",
+ sess);
+ return -ENOMEM;
+ }
+
+ sess->chain_order = uadk_get_chain_order(xform);
+ switch (sess->chain_order) {
+ case UADK_CHAIN_ONLY_CIPHER:
+ cipher_xform = xform;
+ break;
+ default:
+ ret = -ENOTSUP;
+ goto err;
+ }
+
+ if (cipher_xform) {
+ ret = uadk_set_session_cipher_parameters(dev, sess, cipher_xform);
+ if (ret != 0) {
+ UADK_LOG(ERR,
+ "Invalid/unsupported cipher parameters");
+ goto err;
+ }
+ }
+
+ set_sym_session_private_data(session, dev->driver_id, sess);
+
+ return 0;
+err:
+ rte_mempool_put(mp, sess);
+ return ret;
+}
+
+static void
+uadk_crypto_sym_session_clear(struct rte_cryptodev *dev,
+ struct rte_cryptodev_sym_session *sess)
+{
+ struct uadk_crypto_session *priv_sess =
+ get_sym_session_private_data(sess, dev->driver_id);
+
+ if (unlikely(priv_sess == NULL)) {
+ UADK_LOG(ERR, "Failed to get session %p private data.", priv_sess);
+ return;
+ }
+
+ if (priv_sess->handle_cipher) {
+ wd_cipher_free_sess(priv_sess->handle_cipher);
+ priv_sess->handle_cipher = 0;
+ }
+
+ set_sym_session_private_data(sess, dev->driver_id, NULL);
+ rte_mempool_put(rte_mempool_from_obj(priv_sess), priv_sess);
+}
+
static struct rte_cryptodev_ops uadk_crypto_pmd_ops = {
.dev_configure = uadk_crypto_pmd_config,
.dev_start = uadk_crypto_pmd_start,
@@ -233,16 +509,54 @@ static struct rte_cryptodev_ops uadk_crypto_pmd_ops = {
.dev_infos_get = uadk_crypto_pmd_info_get,
.queue_pair_setup = uadk_crypto_pmd_qp_setup,
.queue_pair_release = uadk_crypto_pmd_qp_release,
- .sym_session_get_size = NULL,
- .sym_session_configure = NULL,
- .sym_session_clear = NULL,
+ .sym_session_get_size = uadk_crypto_sym_session_get_size,
+ .sym_session_configure = uadk_crypto_sym_session_configure,
+ .sym_session_clear = uadk_crypto_sym_session_clear,
};
+static void
+uadk_process_cipher_op(struct rte_crypto_op *op,
+ struct uadk_crypto_session *sess,
+ struct rte_mbuf *msrc, struct rte_mbuf *mdst)
+{
+ uint32_t off = op->sym->cipher.data.offset;
+ int ret;
+
+ if (!sess) {
+ op->status = RTE_COMP_OP_STATUS_INVALID_ARGS;
+ return;
+ }
+
+ sess->cipher.req.src = rte_pktmbuf_mtod_offset(msrc, uint8_t *, off);
+ sess->cipher.req.in_bytes = op->sym->cipher.data.length;
+ sess->cipher.req.dst = rte_pktmbuf_mtod_offset(mdst, uint8_t *, off);
+ sess->cipher.req.out_buf_bytes = sess->cipher.req.in_bytes;
+ sess->cipher.req.iv_bytes = sess->iv.length;
+ sess->cipher.req.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
+ sess->iv.offset);
+ if (sess->cipher.direction == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
+ sess->cipher.req.op_type = WD_CIPHER_ENCRYPTION;
+ else
+ sess->cipher.req.op_type = WD_CIPHER_DECRYPTION;
+
+ do {
+ ret = wd_do_cipher_sync(sess->handle_cipher, &sess->cipher.req);
+ } while (ret == -WD_EBUSY);
+
+ if (sess->cipher.req.out_buf_bytes > sess->cipher.req.in_bytes)
+ op->status = RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED;
+
+ if (ret)
+ op->status = RTE_COMP_OP_STATUS_ERROR;
+}
+
static uint16_t
uadk_crypto_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
uint16_t nb_ops)
{
struct uadk_qp *qp = queue_pair;
+ struct uadk_crypto_session *sess = NULL;
+ struct rte_mbuf *msrc, *mdst;
struct rte_crypto_op *op;
uint16_t enqd = 0;
int i, ret;
@@ -250,6 +564,25 @@ uadk_crypto_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
for (i = 0; i < nb_ops; i++) {
op = ops[i];
op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
+ msrc = op->sym->m_src;
+ mdst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
+
+ if (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
+ if (likely(op->sym->session != NULL))
+ sess = (struct uadk_crypto_session *)
+ get_sym_session_private_data(
+ op->sym->session,
+ uadk_cryptodev_driver_id);
+ }
+
+ switch (sess->chain_order) {
+ case UADK_CHAIN_ONLY_CIPHER:
+ uadk_process_cipher_op(op, sess, msrc, mdst);
+ break;
+ default:
+ op->status = RTE_CRYPTO_OP_STATUS_ERROR;
+ break;
+ }
if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
@@ -325,7 +658,9 @@ uadk_cryptodev_probe(struct rte_vdev_device *vdev)
dev->driver_id = uadk_cryptodev_driver_id;
dev->dequeue_burst = uadk_crypto_dequeue_burst;
dev->enqueue_burst = uadk_crypto_enqueue_burst;
- dev->feature_flags = RTE_CRYPTODEV_FF_HW_ACCELERATED;
+ dev->feature_flags = RTE_CRYPTODEV_FF_HW_ACCELERATED |
+ RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
+ RTE_CRYPTODEV_FF_SYM_SESSIONLESS;
priv = dev->data->dev_private;
priv->version = version;
Cipher algorithms: * ``RTE_CRYPTO_CIPHER_AES_ECB`` * ``RTE_CRYPTO_CIPHER_AES_CBC`` * ``RTE_CRYPTO_CIPHER_AES_XTS`` * ``RTE_CRYPTO_CIPHER_DES_CBC`` Signed-off-by: Zhangfei Gao <zhangfei.gao@linaro.org> --- doc/guides/cryptodevs/features/uadk.ini | 10 + doc/guides/cryptodevs/uadk.rst | 6 + drivers/crypto/uadk/uadk_crypto_pmd.c | 345 +++++++++++++++++++++++- 3 files changed, 356 insertions(+), 5 deletions(-)