DPDK 21.11.0
rte_cryptodev.h
Go to the documentation of this file.
1/* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2015-2020 Intel Corporation.
3 */
4
5#ifndef _RTE_CRYPTODEV_H_
6#define _RTE_CRYPTODEV_H_
7
17#ifdef __cplusplus
18extern "C" {
19#endif
20
21#include "rte_kvargs.h"
22#include "rte_crypto.h"
23#include "rte_dev.h"
24#include <rte_common.h>
25#include <rte_config.h>
26#include <rte_rcu_qsbr.h>
27
28#include "rte_cryptodev_trace_fp.h"
29
30extern const char **rte_cyptodev_names;
31
32/* Logging Macros */
33
34#define CDEV_LOG_ERR(...) \
35 RTE_LOG(ERR, CRYPTODEV, \
36 RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
37 __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
38
39#define CDEV_LOG_INFO(...) \
40 RTE_LOG(INFO, CRYPTODEV, \
41 RTE_FMT(RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
42 RTE_FMT_TAIL(__VA_ARGS__,)))
43
44#define CDEV_LOG_DEBUG(...) \
45 RTE_LOG(DEBUG, CRYPTODEV, \
46 RTE_FMT("%s() line %u: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
47 __func__, __LINE__, RTE_FMT_TAIL(__VA_ARGS__,)))
48
49#define CDEV_PMD_TRACE(...) \
50 RTE_LOG(DEBUG, CRYPTODEV, \
51 RTE_FMT("[%s] %s: " RTE_FMT_HEAD(__VA_ARGS__,) "\n", \
52 dev, __func__, RTE_FMT_TAIL(__VA_ARGS__,)))
53
67#define rte_crypto_op_ctod_offset(c, t, o) \
68 ((t)((char *)(c) + (o)))
69
81#define rte_crypto_op_ctophys_offset(c, o) \
82 (rte_iova_t)((c)->phys_addr + (o))
83
88 uint16_t min;
89 uint16_t max;
90 uint16_t increment;
96};
97
103#define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_512_BYTES RTE_BIT32(0)
104#define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_4096_BYTES RTE_BIT32(1)
105#define RTE_CRYPTO_CIPHER_DATA_UNIT_LEN_1_MEGABYTES RTE_BIT32(2)
106
114 union {
115 struct {
118 uint16_t block_size;
128 } auth;
130 struct {
133 uint16_t block_size;
139 uint32_t dataunit_set;
145 } cipher;
147 struct {
150 uint16_t block_size;
160 } aead;
161 };
162};
163
172 uint32_t op_types;
175 __extension__
176 union {
181 };
182};
183
190};
191
192
199 union {
204 };
205};
206
210 union {
211 enum rte_crypto_cipher_algorithm cipher;
214 } algo;
215};
216
225};
226
239 const struct rte_cryptodev_sym_capability_idx *idx);
240
251__rte_experimental
254 const struct rte_cryptodev_asym_capability_idx *idx);
255
268int
270 const struct rte_cryptodev_symmetric_capability *capability,
271 uint16_t key_size, uint16_t iv_size);
272
286int
288 const struct rte_cryptodev_symmetric_capability *capability,
289 uint16_t key_size, uint16_t digest_size, uint16_t iv_size);
290
305int
307 const struct rte_cryptodev_symmetric_capability *capability,
308 uint16_t key_size, uint16_t digest_size, uint16_t aad_size,
309 uint16_t iv_size);
310
321__rte_experimental
322int
324 const struct rte_cryptodev_asymmetric_xform_capability *capability,
325 enum rte_crypto_asym_op_type op_type);
326
337__rte_experimental
338int
340 const struct rte_cryptodev_asymmetric_xform_capability *capability,
341 uint16_t modlen);
342
354int
356 const char *algo_string);
357
369int
371 const char *algo_string);
372
384int
386 const char *algo_string);
387
399__rte_experimental
400int
402 const char *xform_string);
403
404
406#define RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() \
407 { RTE_CRYPTO_OP_TYPE_UNDEFINED }
408
409
418#define RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO (1ULL << 0)
420#define RTE_CRYPTODEV_FF_ASYMMETRIC_CRYPTO (1ULL << 1)
422#define RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING (1ULL << 2)
424#define RTE_CRYPTODEV_FF_CPU_SSE (1ULL << 3)
426#define RTE_CRYPTODEV_FF_CPU_AVX (1ULL << 4)
428#define RTE_CRYPTODEV_FF_CPU_AVX2 (1ULL << 5)
430#define RTE_CRYPTODEV_FF_CPU_AESNI (1ULL << 6)
432#define RTE_CRYPTODEV_FF_HW_ACCELERATED (1ULL << 7)
436#define RTE_CRYPTODEV_FF_CPU_AVX512 (1ULL << 8)
438#define RTE_CRYPTODEV_FF_IN_PLACE_SGL (1ULL << 9)
442#define RTE_CRYPTODEV_FF_OOP_SGL_IN_SGL_OUT (1ULL << 10)
446#define RTE_CRYPTODEV_FF_OOP_SGL_IN_LB_OUT (1ULL << 11)
451#define RTE_CRYPTODEV_FF_OOP_LB_IN_SGL_OUT (1ULL << 12)
455#define RTE_CRYPTODEV_FF_OOP_LB_IN_LB_OUT (1ULL << 13)
457#define RTE_CRYPTODEV_FF_CPU_NEON (1ULL << 14)
459#define RTE_CRYPTODEV_FF_CPU_ARM_CE (1ULL << 15)
461#define RTE_CRYPTODEV_FF_SECURITY (1ULL << 16)
463#define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_EXP (1ULL << 17)
465#define RTE_CRYPTODEV_FF_RSA_PRIV_OP_KEY_QT (1ULL << 18)
467#define RTE_CRYPTODEV_FF_DIGEST_ENCRYPTED (1ULL << 19)
469#define RTE_CRYPTODEV_FF_ASYM_SESSIONLESS (1ULL << 20)
471#define RTE_CRYPTODEV_FF_SYM_CPU_CRYPTO (1ULL << 21)
473#define RTE_CRYPTODEV_FF_SYM_SESSIONLESS (1ULL << 22)
475#define RTE_CRYPTODEV_FF_NON_BYTE_ALIGNED_DATA (1ULL << 23)
477#define RTE_CRYPTODEV_FF_SYM_RAW_DP (1ULL << 24)
479#define RTE_CRYPTODEV_FF_CIPHER_MULTIPLE_DATA_UNITS (1ULL << 25)
481#define RTE_CRYPTODEV_FF_CIPHER_WRAPPED_KEY (1ULL << 26)
483#define RTE_CRYPTODEV_FF_SECURITY_INNER_CSUM (1ULL << 27)
495extern const char *
497
500 const char *driver_name;
501 uint8_t driver_id;
519 struct {
525 } sym;
526};
527
528#define RTE_CRYPTODEV_DETACHED (0)
529#define RTE_CRYPTODEV_ATTACHED (1)
530
537
540 uint32_t nb_descriptors;
545};
546
568typedef uint16_t (*rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id,
569 struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param);
570
580typedef void (*rte_cryptodev_cb_fn)(uint8_t dev_id,
581 enum rte_cryptodev_event_type event, void *cb_arg);
582
583
595};
596
597#define RTE_CRYPTODEV_NAME_MAX_LEN (64)
609extern int
610rte_cryptodev_get_dev_id(const char *name);
611
622extern const char *
624
632extern uint8_t
634
643extern uint8_t
645
657uint8_t
658rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices,
659 uint8_t nb_devices);
660/*
661 * Return the NUMA socket to which a device is connected
662 *
663 * @param dev_id
664 * The identifier of the device
665 * @return
666 * The NUMA socket id to which the device is connected or
667 * a default of zero if the socket could not be determined.
668 * -1 if returned is the dev_id value is out of range.
669 */
670extern int
671rte_cryptodev_socket_id(uint8_t dev_id);
672
678 uint64_t ff_disable;
685};
686
701extern int
702rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config);
703
719extern int
720rte_cryptodev_start(uint8_t dev_id);
721
728extern void
729rte_cryptodev_stop(uint8_t dev_id);
730
740extern int
741rte_cryptodev_close(uint8_t dev_id);
742
764extern int
765rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id,
766 const struct rte_cryptodev_qp_conf *qp_conf, int socket_id);
767
781__rte_experimental
782int
783rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id);
784
792extern uint16_t
794
795
807extern int
808rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats);
809
815extern void
817
831extern void
832rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info);
833
834
848extern int
850 enum rte_cryptodev_event_type event,
851 rte_cryptodev_cb_fn cb_fn, void *cb_arg);
852
866extern int
868 enum rte_cryptodev_event_type event,
869 rte_cryptodev_cb_fn cb_fn, void *cb_arg);
870
871struct rte_cryptodev_callback;
872
874RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback);
875
885 void *arg;
887};
888
893struct rte_cryptodev_cb_rcu {
894 struct rte_cryptodev_cb *next;
896 struct rte_rcu_qsbr *qsbr;
898};
899
900void *
901rte_cryptodev_get_sec_ctx(uint8_t dev_id);
902
908 uint64_t opaque_data;
910 uint16_t nb_drivers;
912 uint16_t user_data_sz;
914 __extension__ struct {
915 void *data;
916 uint16_t refcnt;
917 } sess_data[0];
919};
920
923 __extension__ void *sess_private_data[0];
925};
926
953__rte_experimental
954struct rte_mempool *
955rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts,
956 uint32_t elt_size, uint32_t cache_size, uint16_t priv_size,
957 int socket_id);
958
970
980__rte_experimental
983
996int
998
1011__rte_experimental
1012int
1014
1031int
1033 struct rte_cryptodev_sym_session *sess,
1034 struct rte_crypto_sym_xform *xforms,
1035 struct rte_mempool *mempool);
1036
1052__rte_experimental
1053int
1055 struct rte_cryptodev_asym_session *sess,
1056 struct rte_crypto_asym_xform *xforms,
1057 struct rte_mempool *mempool);
1058
1073int
1075 struct rte_cryptodev_sym_session *sess);
1076
1087__rte_experimental
1088int
1090 struct rte_cryptodev_asym_session *sess);
1091
1099unsigned int
1101
1113__rte_experimental
1114unsigned int
1116 struct rte_cryptodev_sym_session *sess);
1117
1124__rte_experimental
1125unsigned int
1127
1139unsigned int
1141
1152__rte_experimental
1153unsigned int
1155
1164unsigned int
1166
1175int rte_cryptodev_driver_id_get(const char *name);
1176
1185const char *rte_cryptodev_driver_name_get(uint8_t driver_id);
1186
1199__rte_experimental
1200int
1202 struct rte_cryptodev_sym_session *sess,
1203 void *data,
1204 uint16_t size);
1205
1216__rte_experimental
1217void *
1219 struct rte_cryptodev_sym_session *sess);
1220
1233__rte_experimental
1234uint32_t
1236 struct rte_cryptodev_sym_session *sess, union rte_crypto_sym_ofs ofs,
1237 struct rte_crypto_sym_vec *vec);
1238
1248__rte_experimental
1249int
1251
1257 struct rte_cryptodev_sym_session *crypto_sess;
1258 struct rte_crypto_sym_xform *xform;
1259 struct rte_security_session *sec_sess;
1260};
1261
1288 void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec,
1289 union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status);
1290
1313 void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec,
1314 uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs,
1315 struct rte_crypto_va_iova_ptr *iv,
1316 struct rte_crypto_va_iova_ptr *digest,
1317 struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1318 void *user_data);
1319
1331typedef int (*cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx,
1332 uint32_t n);
1333
1343typedef uint32_t (*rte_cryptodev_raw_get_dequeue_count_t)(void *user_data);
1344
1353typedef void (*rte_cryptodev_raw_post_dequeue_t)(void *user_data,
1354 uint32_t index, uint8_t is_op_success);
1355
1397typedef uint32_t (*cryptodev_sym_raw_dequeue_burst_t)(void *qp,
1398 uint8_t *drv_ctx,
1399 rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1400 uint32_t max_nb_to_dequeue,
1402 void **out_user_data, uint8_t is_user_data_array,
1403 uint32_t *n_success, int *dequeue_status);
1404
1428typedef void * (*cryptodev_sym_raw_dequeue_t)(
1429 void *qp, uint8_t *drv_ctx, int *dequeue_status,
1430 enum rte_crypto_op_status *op_status);
1431
1438 void *qp_data;
1439
1446
1447 /* Driver specific context data */
1448 __extension__ uint8_t drv_ctx_data[];
1449};
1450
1474__rte_experimental
1475int
1476rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id,
1477 struct rte_crypto_raw_dp_ctx *ctx,
1478 enum rte_crypto_op_sess_type sess_type,
1479 union rte_cryptodev_session_ctx session_ctx,
1480 uint8_t is_update);
1481
1506__rte_experimental
1507uint32_t
1509 struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs,
1510 void **user_data, int *enqueue_status);
1511
1532__rte_experimental
1533static __rte_always_inline int
1535 struct rte_crypto_vec *data_vec, uint16_t n_data_vecs,
1536 union rte_crypto_sym_ofs ofs,
1537 struct rte_crypto_va_iova_ptr *iv,
1538 struct rte_crypto_va_iova_ptr *digest,
1539 struct rte_crypto_va_iova_ptr *aad_or_auth_iv,
1540 void *user_data)
1541{
1542 return (*ctx->enqueue)(ctx->qp_data, ctx->drv_ctx_data, data_vec,
1543 n_data_vecs, ofs, iv, digest, aad_or_auth_iv, user_data);
1544}
1545
1556__rte_experimental
1557int
1559 uint32_t n);
1560
1602__rte_experimental
1603uint32_t
1605 rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count,
1606 uint32_t max_nb_to_dequeue,
1608 void **out_user_data, uint8_t is_user_data_array,
1609 uint32_t *n_success, int *dequeue_status);
1610
1634__rte_experimental
1635static __rte_always_inline void *
1637 int *dequeue_status, enum rte_crypto_op_status *op_status)
1638{
1639 return (*ctx->dequeue)(ctx->qp_data, ctx->drv_ctx_data, dequeue_status,
1640 op_status);
1641}
1642
1652__rte_experimental
1653int
1655 uint32_t n);
1656
1693__rte_experimental
1694struct rte_cryptodev_cb *
1696 uint16_t qp_id,
1698 void *cb_arg);
1699
1722__rte_experimental
1724 uint16_t qp_id,
1725 struct rte_cryptodev_cb *cb);
1726
1762__rte_experimental
1763struct rte_cryptodev_cb *
1765 uint16_t qp_id,
1767 void *cb_arg);
1768
1790__rte_experimental
1792 uint16_t qp_id,
1793 struct rte_cryptodev_cb *cb);
1794
1795#include <rte_cryptodev_core.h>
1832static inline uint16_t
1833rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id,
1834 struct rte_crypto_op **ops, uint16_t nb_ops)
1835{
1836 const struct rte_crypto_fp_ops *fp_ops;
1837 void *qp;
1838
1839 rte_cryptodev_trace_dequeue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1840
1841 fp_ops = &rte_crypto_fp_ops[dev_id];
1842 qp = fp_ops->qp.data[qp_id];
1843
1844 nb_ops = fp_ops->dequeue_burst(qp, ops, nb_ops);
1845
1846#ifdef RTE_CRYPTO_CALLBACKS
1847 if (unlikely(fp_ops->qp.deq_cb != NULL)) {
1848 struct rte_cryptodev_cb_rcu *list;
1849 struct rte_cryptodev_cb *cb;
1850
1851 /* __ATOMIC_RELEASE memory order was used when the
1852 * call back was inserted into the list.
1853 * Since there is a clear dependency between loading
1854 * cb and cb->fn/cb->next, __ATOMIC_ACQUIRE memory order is
1855 * not required.
1856 */
1857 list = &fp_ops->qp.deq_cb[qp_id];
1858 rte_rcu_qsbr_thread_online(list->qsbr, 0);
1859 cb = __atomic_load_n(&list->next, __ATOMIC_RELAXED);
1860
1861 while (cb != NULL) {
1862 nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1863 cb->arg);
1864 cb = cb->next;
1865 };
1866
1867 rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1868 }
1869#endif
1870 return nb_ops;
1871}
1872
1904static inline uint16_t
1905rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id,
1906 struct rte_crypto_op **ops, uint16_t nb_ops)
1907{
1908 const struct rte_crypto_fp_ops *fp_ops;
1909 void *qp;
1910
1911 fp_ops = &rte_crypto_fp_ops[dev_id];
1912 qp = fp_ops->qp.data[qp_id];
1913#ifdef RTE_CRYPTO_CALLBACKS
1914 if (unlikely(fp_ops->qp.enq_cb != NULL)) {
1915 struct rte_cryptodev_cb_rcu *list;
1916 struct rte_cryptodev_cb *cb;
1917
1918 /* __ATOMIC_RELEASE memory order was used when the
1919 * call back was inserted into the list.
1920 * Since there is a clear dependency between loading
1921 * cb and cb->fn/cb->next, __ATOMIC_ACQUIRE memory order is
1922 * not required.
1923 */
1924 list = &fp_ops->qp.enq_cb[qp_id];
1925 rte_rcu_qsbr_thread_online(list->qsbr, 0);
1926 cb = __atomic_load_n(&list->next, __ATOMIC_RELAXED);
1927
1928 while (cb != NULL) {
1929 nb_ops = cb->fn(dev_id, qp_id, ops, nb_ops,
1930 cb->arg);
1931 cb = cb->next;
1932 };
1933
1934 rte_rcu_qsbr_thread_offline(list->qsbr, 0);
1935 }
1936#endif
1937
1938 rte_cryptodev_trace_enqueue_burst(dev_id, qp_id, (void **)ops, nb_ops);
1939 return fp_ops->enqueue_burst(qp, ops, nb_ops);
1940}
1941
1942
1943
1944#ifdef __cplusplus
1945}
1946#endif
1947
1948#endif /* _RTE_CRYPTODEV_H_ */
#define unlikely(x)
#define RTE_STD_C11
Definition: rte_common.h:42
#define __rte_always_inline
Definition: rte_common.h:228
rte_crypto_op_sess_type
Definition: rte_crypto.h:62
rte_crypto_op_type
Definition: rte_crypto.h:29
rte_crypto_op_status
Definition: rte_crypto.h:39
rte_crypto_asym_op_type
rte_crypto_asym_xform_type
rte_crypto_auth_algorithm
rte_crypto_sym_xform_type
rte_crypto_aead_algorithm
rte_crypto_cipher_algorithm
static uint16_t rte_cryptodev_dequeue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
uint32_t(* cryptodev_sym_raw_dequeue_burst_t)(void *qp, uint8_t *drv_ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
int rte_cryptodev_close(uint8_t dev_id)
rte_cryptodev_event_type
@ RTE_CRYPTODEV_EVENT_ERROR
@ RTE_CRYPTODEV_EVENT_UNKNOWN
@ RTE_CRYPTODEV_EVENT_MAX
uint8_t rte_cryptodev_devices_get(const char *driver_name, uint8_t *devices, uint8_t nb_devices)
__rte_experimental int rte_cryptodev_get_raw_dp_ctx_size(uint8_t dev_id)
uint8_t rte_cryptodev_count(void)
__rte_experimental int rte_cryptodev_remove_enq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
__rte_experimental uint32_t rte_cryptodev_raw_dequeue_burst(struct rte_crypto_raw_dp_ctx *ctx, rte_cryptodev_raw_get_dequeue_count_t get_dequeue_count, uint32_t max_nb_to_dequeue, rte_cryptodev_raw_post_dequeue_t post_dequeue, void **out_user_data, uint8_t is_user_data_array, uint32_t *n_success, int *dequeue_status)
static uint16_t rte_cryptodev_enqueue_burst(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops)
int rte_cryptodev_start(uint8_t dev_id)
__rte_experimental int rte_cryptodev_sym_session_set_user_data(struct rte_cryptodev_sym_session *sess, void *data, uint16_t size)
const char * rte_cryptodev_driver_name_get(uint8_t driver_id)
void *(* cryptodev_sym_raw_dequeue_t)(void *qp, uint8_t *drv_ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
__rte_experimental uint32_t rte_cryptodev_sym_cpu_crypto_process(uint8_t dev_id, struct rte_cryptodev_sym_session *sess, union rte_crypto_sym_ofs ofs, struct rte_crypto_sym_vec *vec)
__rte_experimental unsigned int rte_cryptodev_asym_get_private_session_size(uint8_t dev_id)
__rte_experimental int rte_cryptodev_asym_session_free(struct rte_cryptodev_asym_session *sess)
unsigned int rte_cryptodev_is_valid_dev(uint8_t dev_id)
unsigned int rte_cryptodev_sym_get_private_session_size(uint8_t dev_id)
int rte_cryptodev_get_auth_algo_enum(enum rte_crypto_auth_algorithm *algo_enum, const char *algo_string)
void rte_cryptodev_stop(uint8_t dev_id)
int rte_cryptodev_sym_session_init(uint8_t dev_id, struct rte_cryptodev_sym_session *sess, struct rte_crypto_sym_xform *xforms, struct rte_mempool *mempool)
const char * rte_cryptodev_name_get(uint8_t dev_id)
RTE_TAILQ_HEAD(rte_cryptodev_cb_list, rte_cryptodev_callback)
int rte_cryptodev_callback_unregister(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
int rte_cryptodev_sym_capability_check_auth(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t iv_size)
__rte_experimental int rte_cryptodev_remove_deq_callback(uint8_t dev_id, uint16_t qp_id, struct rte_cryptodev_cb *cb)
int rte_cryptodev_configure(uint8_t dev_id, struct rte_cryptodev_config *config)
__rte_experimental struct rte_cryptodev_asym_session * rte_cryptodev_asym_session_create(struct rte_mempool *mempool)
int rte_cryptodev_callback_register(uint8_t dev_id, enum rte_cryptodev_event_type event, rte_cryptodev_cb_fn cb_fn, void *cb_arg)
struct rte_cryptodev_sym_session * rte_cryptodev_sym_session_create(struct rte_mempool *mempool)
__rte_experimental uint32_t rte_cryptodev_raw_enqueue_burst(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void **user_data, int *enqueue_status)
uint16_t rte_cryptodev_queue_pair_count(uint8_t dev_id)
void(* rte_cryptodev_cb_fn)(uint8_t dev_id, enum rte_cryptodev_event_type event, void *cb_arg)
uint32_t(* cryptodev_sym_raw_enqueue_burst_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_sym_vec *vec, union rte_crypto_sym_ofs ofs, void *user_data[], int *enqueue_status)
void rte_cryptodev_stats_reset(uint8_t dev_id)
__rte_experimental struct rte_cryptodev_cb * rte_cryptodev_add_deq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
unsigned int rte_cryptodev_sym_get_header_session_size(void)
static __rte_experimental __rte_always_inline int rte_cryptodev_raw_enqueue(struct rte_crypto_raw_dp_ctx *ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
__rte_experimental int rte_cryptodev_configure_raw_dp_ctx(uint8_t dev_id, uint16_t qp_id, struct rte_crypto_raw_dp_ctx *ctx, enum rte_crypto_op_sess_type sess_type, union rte_cryptodev_session_ctx session_ctx, uint8_t is_update)
__rte_experimental struct rte_cryptodev_cb * rte_cryptodev_add_enq_callback(uint8_t dev_id, uint16_t qp_id, rte_cryptodev_callback_fn cb_fn, void *cb_arg)
void(* rte_cryptodev_raw_post_dequeue_t)(void *user_data, uint32_t index, uint8_t is_op_success)
__rte_experimental unsigned int rte_cryptodev_asym_get_header_session_size(void)
int rte_cryptodev_driver_id_get(const char *name)
int rte_cryptodev_get_dev_id(const char *name)
int rte_cryptodev_sym_session_clear(uint8_t dev_id, struct rte_cryptodev_sym_session *sess)
int rte_cryptodev_stats_get(uint8_t dev_id, struct rte_cryptodev_stats *stats)
int rte_cryptodev_get_cipher_algo_enum(enum rte_crypto_cipher_algorithm *algo_enum, const char *algo_string)
__rte_experimental int rte_cryptodev_asym_session_clear(uint8_t dev_id, struct rte_cryptodev_asym_session *sess)
uint32_t(* rte_cryptodev_raw_get_dequeue_count_t)(void *user_data)
__rte_experimental int rte_cryptodev_asym_session_init(uint8_t dev_id, struct rte_cryptodev_asym_session *sess, struct rte_crypto_asym_xform *xforms, struct rte_mempool *mempool)
static __rte_experimental __rte_always_inline void * rte_cryptodev_raw_dequeue(struct rte_crypto_raw_dp_ctx *ctx, int *dequeue_status, enum rte_crypto_op_status *op_status)
int(* cryptodev_sym_raw_enqueue_t)(void *qp, uint8_t *drv_ctx, struct rte_crypto_vec *data_vec, uint16_t n_data_vecs, union rte_crypto_sym_ofs ofs, struct rte_crypto_va_iova_ptr *iv, struct rte_crypto_va_iova_ptr *digest, struct rte_crypto_va_iova_ptr *aad_or_auth_iv, void *user_data)
int rte_cryptodev_sym_capability_check_aead(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t digest_size, uint16_t aad_size, uint16_t iv_size)
__rte_experimental int rte_cryptodev_raw_enqueue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
const char * rte_cryptodev_get_feature_name(uint64_t flag)
__rte_experimental void * rte_cryptodev_sym_session_get_user_data(struct rte_cryptodev_sym_session *sess)
int rte_cryptodev_sym_capability_check_cipher(const struct rte_cryptodev_symmetric_capability *capability, uint16_t key_size, uint16_t iv_size)
uint8_t rte_cryptodev_device_count_by_driver(uint8_t driver_id)
__rte_experimental int rte_cryptodev_asym_xform_capability_check_optype(const struct rte_cryptodev_asymmetric_xform_capability *capability, enum rte_crypto_asym_op_type op_type)
int rte_cryptodev_get_aead_algo_enum(enum rte_crypto_aead_algorithm *algo_enum, const char *algo_string)
__rte_experimental unsigned int rte_cryptodev_sym_get_existing_header_session_size(struct rte_cryptodev_sym_session *sess)
__rte_experimental const struct rte_cryptodev_asymmetric_xform_capability * rte_cryptodev_asym_capability_get(uint8_t dev_id, const struct rte_cryptodev_asym_capability_idx *idx)
__rte_experimental int rte_cryptodev_get_qp_status(uint8_t dev_id, uint16_t queue_pair_id)
uint16_t(* rte_cryptodev_callback_fn)(uint16_t dev_id, uint16_t qp_id, struct rte_crypto_op **ops, uint16_t nb_ops, void *user_param)
__rte_experimental int rte_cryptodev_raw_dequeue_done(struct rte_crypto_raw_dp_ctx *ctx, uint32_t n)
int rte_cryptodev_queue_pair_setup(uint8_t dev_id, uint16_t queue_pair_id, const struct rte_cryptodev_qp_conf *qp_conf, int socket_id)
__rte_experimental int rte_cryptodev_asym_xform_capability_check_modlen(const struct rte_cryptodev_asymmetric_xform_capability *capability, uint16_t modlen)
int(* cryptodev_sym_raw_operation_done_t)(void *qp, uint8_t *drv_ctx, uint32_t n)
__rte_experimental struct rte_mempool * rte_cryptodev_sym_session_pool_create(const char *name, uint32_t nb_elts, uint32_t elt_size, uint32_t cache_size, uint16_t priv_size, int socket_id)
void rte_cryptodev_info_get(uint8_t dev_id, struct rte_cryptodev_info *dev_info)
__rte_experimental int rte_cryptodev_asym_get_xform_enum(enum rte_crypto_asym_xform_type *xform_enum, const char *xform_string)
const struct rte_cryptodev_symmetric_capability * rte_cryptodev_sym_capability_get(uint8_t dev_id, const struct rte_cryptodev_sym_capability_idx *idx)
int rte_cryptodev_sym_session_free(struct rte_cryptodev_sym_session *sess)
static __rte_always_inline void rte_rcu_qsbr_thread_online(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:303
static __rte_always_inline void rte_rcu_qsbr_thread_offline(struct rte_rcu_qsbr *v, unsigned int thread_id)
Definition: rte_rcu_qsbr.h:356
enum rte_crypto_asym_xform_type type
__extension__ void * sess_private_data[0]
struct rte_crypto_param_range modlen
enum rte_crypto_asym_xform_type xform_type
enum rte_crypto_op_type op
struct rte_cryptodev_symmetric_capability sym
struct rte_cryptodev_asymmetric_capability asym
struct rte_cryptodev_cb * next
rte_cryptodev_callback_fn fn
unsigned max_nb_queue_pairs
struct rte_device * device
uint16_t min_mbuf_headroom_req
const struct rte_cryptodev_capabilities * capabilities
uint16_t min_mbuf_tailroom_req
const char * driver_name
unsigned max_nb_sessions
struct rte_mempool * mp_session_private
struct rte_mempool * mp_session
uint64_t enqueue_err_count
uint64_t dequeue_err_count
__extension__ struct rte_cryptodev_sym_session::@141 sess_data[0]
enum rte_crypto_auth_algorithm algo
struct rte_cryptodev_symmetric_capability::@130::@132 auth
enum rte_crypto_cipher_algorithm algo
enum rte_crypto_aead_algorithm algo
struct rte_crypto_param_range iv_size
struct rte_cryptodev_symmetric_capability::@130::@133 cipher
struct rte_crypto_param_range digest_size
struct rte_crypto_param_range aad_size
struct rte_crypto_param_range key_size
enum rte_crypto_sym_xform_type xform_type
char name[RTE_MEMPOOL_NAMESIZE]
Definition: rte_mempool.h:213
uint32_t cache_size
Definition: rte_mempool.h:224
uint32_t elt_size
Definition: rte_mempool.h:227