summaryrefslogtreecommitdiffstats
path: root/src/vnet
diff options
context:
space:
mode:
authorDamjan Marion <damarion@cisco.com>2019-03-29 13:47:54 +0100
committerDamjan Marion <damarion@cisco.com>2019-04-07 11:19:35 +0200
commit060bfb987a277624e5644de2fcbee1196c2c76e8 (patch)
tree2ca6ccf57c09c5e016f9613b0e0e75f8e49475eb /src/vnet
parentdc43bcd8abef2cee4eebdc94d9a82c0194ba00fb (diff)
crypto: add support for AEAD and AES-GCM
Change-Id: Iff6f81a49b9cff5522fbb4914d47472423eac5db Signed-off-by: Damjan Marion <damarion@cisco.com>
Diffstat (limited to 'src/vnet')
-rw-r--r--src/vnet/crypto/cli.c59
-rw-r--r--src/vnet/crypto/crypto.c118
-rw-r--r--src/vnet/crypto/crypto.h116
-rw-r--r--src/vnet/crypto/format.c39
-rw-r--r--src/vnet/ipsec/esp.h8
-rw-r--r--src/vnet/ipsec/esp_decrypt.c11
-rw-r--r--src/vnet/ipsec/esp_encrypt.c13
-rw-r--r--src/vnet/ipsec/ipsec.c30
-rw-r--r--src/vnet/ipsec/ipsec.h6
-rw-r--r--src/vnet/ipsec/ipsec_sa.c6
-rw-r--r--src/vnet/ipsec/ipsec_sa.h6
-rw-r--r--src/vnet/lisp-cp/control.c10
12 files changed, 272 insertions, 150 deletions
diff --git a/src/vnet/crypto/cli.c b/src/vnet/crypto/cli.c
index 792cc4bf243..4b0e093b02e 100644
--- a/src/vnet/crypto/cli.c
+++ b/src/vnet/crypto/cli.c
@@ -53,33 +53,58 @@ VLIB_CLI_COMMAND (show_crypto_engines_command, static) =
.function = show_crypto_engines_command_fn,
};
-static clib_error_t *
-show_crypto_handlers_command_fn (vlib_main_t * vm,
- unformat_input_t * input, vlib_cli_command_t * cmd)
+static u8 *
+format_vnet_crypto_handlers (u8 * s, va_list * args)
{
+ vnet_crypto_alg_t alg = va_arg (*args, vnet_crypto_alg_t);
vnet_crypto_main_t *cm = &crypto_main;
- unformat_input_t _line_input, *line_input = &_line_input;
- u8 *s = 0;
-
- if (unformat_user (input, unformat_line_input, line_input))
- unformat_free (line_input);
+ vnet_crypto_alg_data_t *d = vec_elt_at_index (cm->algs, alg);
+ u32 indent = format_get_indent (s);
+ int i, first = 1;
- vlib_cli_output (vm, "%-40s%-20s%s", "Name", "Active", "Candidates");
- for (int i = 1; i < VNET_CRYPTO_N_OP_TYPES; i++)
+ for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
{
- vnet_crypto_op_type_data_t *otd = cm->opt_data + i;
+ vnet_crypto_op_data_t *od;
vnet_crypto_engine_t *e;
+ vnet_crypto_op_id_t id = d->op_by_type[i];
+
+ if (id == 0)
+ continue;
+
+ od = vec_elt_at_index (cm->opt_data, id);
+ if (first == 0)
+ s = format (s, "\n%U", format_white_space, indent);
+ s = format (s, "%-20U%-20U", format_vnet_crypto_op_type, od->type,
+ format_vnet_crypto_engine, od->active_engine_index,s);
- vec_reset_length (s);
vec_foreach (e, cm->engines)
{
- if (e->ops_handlers[i] != 0)
+ if (e->ops_handlers[id] != 0)
s = format (s, "%U ", format_vnet_crypto_engine, e - cm->engines);
}
- vlib_cli_output (vm, "%-40U%-20U%v", format_vnet_crypto_op, i,
- format_vnet_crypto_engine, otd->active_engine_index,s);
+ first = 0;
}
- vec_free (s);
+ return s;
+}
+
+
+static clib_error_t *
+show_crypto_handlers_command_fn (vlib_main_t * vm,
+ unformat_input_t * input, vlib_cli_command_t * cmd)
+{
+ unformat_input_t _line_input, *line_input = &_line_input;
+ int i;
+
+ if (unformat_user (input, unformat_line_input, line_input))
+ unformat_free (line_input);
+
+ vlib_cli_output (vm, "%-20s%-20s%-20s%s", "Algo", "Type", "Active",
+ "Candidates");
+
+ for (i = 0; i < VNET_CRYPTO_N_ALGS; i++)
+ vlib_cli_output (vm, "%-20U%U", format_vnet_crypto_alg, i,
+ format_vnet_crypto_handlers, i);
+
return 0;
}
@@ -135,7 +160,7 @@ set_crypto_handler_command_fn (vlib_main_t * vm,
u8 *value;
/* *INDENT-OFF* */
- hash_foreach_mem (key, value, cm->ops_handler_index_by_name,
+ hash_foreach_mem (key, value, cm->alg_index_by_name,
({
(void) value;
rc += vnet_crypto_set_handler (key, engine);
diff --git a/src/vnet/crypto/crypto.c b/src/vnet/crypto/crypto.c
index 9d0ad8b13ca..58b13638924 100644
--- a/src/vnet/crypto/crypto.c
+++ b/src/vnet/crypto/crypto.c
@@ -22,7 +22,7 @@ vnet_crypto_main_t crypto_main;
static_always_inline u32
vnet_crypto_process_ops_call_handler (vlib_main_t * vm,
vnet_crypto_main_t * cm,
- vnet_crypto_op_type_t opt,
+ vnet_crypto_op_id_t opt,
vnet_crypto_op_t * ops[], u32 n_ops)
{
if (n_ops == 0)
@@ -48,7 +48,7 @@ vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[], u32 n_ops)
vnet_crypto_main_t *cm = &crypto_main;
const int op_q_size = VLIB_FRAME_SIZE;
vnet_crypto_op_t *op_queue[op_q_size];
- vnet_crypto_op_type_t opt, current_op_type = ~0;
+ vnet_crypto_op_id_t opt, current_op_type = ~0;
u32 n_op_queue = 0;
u32 rv = 0, i;
@@ -92,41 +92,49 @@ vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
}
int
-vnet_crypto_set_handler (char *ops_handler_name, char *engine)
+vnet_crypto_set_handler (char *alg_name, char *engine)
{
uword *p;
vnet_crypto_main_t *cm = &crypto_main;
- vnet_crypto_op_type_t ot;
- vnet_crypto_op_type_data_t *otd;
+ vnet_crypto_alg_data_t *ad;
vnet_crypto_engine_t *ce;
+ int i;
- p = hash_get_mem (cm->ops_handler_index_by_name, ops_handler_name);
+ p = hash_get_mem (cm->alg_index_by_name, alg_name);
if (!p)
return -1;
- ot = p[0];
- otd = cm->opt_data + ot;
+ ad = vec_elt_at_index (cm->algs, p[0]);
p = hash_get_mem (cm->engine_index_by_name, engine);
if (!p)
return -1;
- ce = cm->engines + p[0];
- otd->active_engine_index = p[0];
- cm->ops_handlers[ot] = ce->ops_handlers[ot];
+ ce = vec_elt_at_index (cm->engines, p[0]);
+
+ for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
+ {
+ vnet_crypto_op_data_t *od;
+ vnet_crypto_op_id_t id = ad->op_by_type[i];
+ if (id == 0)
+ continue;
+ od = vec_elt_at_index (cm->opt_data, id);
+ od->active_engine_index = p[0];
+ cm->ops_handlers[id] = ce->ops_handlers[id];
+ }
return 0;
}
vlib_error_t *
vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
- vnet_crypto_op_type_t opt,
+ vnet_crypto_op_id_t opt,
vnet_crypto_ops_handler_t * fn)
{
vnet_crypto_main_t *cm = &crypto_main;
vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
- vnet_crypto_op_type_data_t *otd = cm->opt_data + opt;
- vec_validate_aligned (cm->ops_handlers, VNET_CRYPTO_N_OP_TYPES - 1,
+ vnet_crypto_op_data_t *otd = cm->opt_data + opt;
+ vec_validate_aligned (cm->ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
CLIB_CACHE_LINE_BYTES);
e->ops_handlers[opt] = fn;
@@ -146,47 +154,73 @@ vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
return 0;
}
+static void
+vnet_crypto_init_cipher_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t eid,
+ vnet_crypto_op_id_t did, char *name, u8 is_aead)
+{
+ vnet_crypto_op_type_t eopt, dopt;
+ vnet_crypto_main_t *cm = &crypto_main;
+ cm->algs[alg].name = name;
+ cm->opt_data[eid].alg = cm->opt_data[did].alg = alg;
+ cm->opt_data[eid].active_engine_index = ~0;
+ cm->opt_data[did].active_engine_index = ~0;
+ if (is_aead)
+ {
+ eopt = VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT;
+ dopt = VNET_CRYPTO_OP_TYPE_AEAD_DECRYPT;
+ }
+ else
+ {
+ eopt = VNET_CRYPTO_OP_TYPE_ENCRYPT;
+ dopt = VNET_CRYPTO_OP_TYPE_DECRYPT;
+ }
+ cm->opt_data[eid].type = eopt;
+ cm->opt_data[did].type = dopt;
+ cm->algs[alg].op_by_type[eopt] = eid;
+ cm->algs[alg].op_by_type[dopt] = did;
+ hash_set_mem (cm->alg_index_by_name, name, alg);
+}
+
+static void
+vnet_crypto_init_hmac_data (vnet_crypto_alg_t alg,
+ vnet_crypto_op_id_t id, char *name)
+{
+ vnet_crypto_main_t *cm = &crypto_main;
+ cm->algs[alg].name = name;
+ cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HMAC] = id;
+ cm->opt_data[id].alg = alg;
+ cm->opt_data[id].active_engine_index = ~0;
+ cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HMAC;
+ hash_set_mem (cm->alg_index_by_name, name, alg);
+}
+
clib_error_t *
vnet_crypto_init (vlib_main_t * vm)
{
vnet_crypto_main_t *cm = &crypto_main;
vlib_thread_main_t *tm = vlib_get_thread_main ();
-#define CRYPTO_ENC_STR "encrypt"
-#define CRYPTO_DEC_STR "decrypt"
-#define CRYPTO_HMAC_STR "hmac"
-
cm->engine_index_by_name = hash_create_string ( /* size */ 0,
sizeof (uword));
- cm->ops_handler_index_by_name = hash_create_string (0, sizeof (uword));
-
+ cm->alg_index_by_name = hash_create_string (0, sizeof (uword));
vec_validate_aligned (cm->threads, tm->n_vlib_mains, CLIB_CACHE_LINE_BYTES);
vec_validate (cm->algs, VNET_CRYPTO_N_ALGS);
-
#define _(n, s) \
- cm->algs[VNET_CRYPTO_ALG_##n].name = s; \
- cm->opt_data[VNET_CRYPTO_OP_##n##_ENC].alg = VNET_CRYPTO_ALG_##n; \
- cm->opt_data[VNET_CRYPTO_OP_##n##_DEC].alg = VNET_CRYPTO_ALG_##n; \
- cm->opt_data[VNET_CRYPTO_OP_##n##_ENC].desc = CRYPTO_ENC_STR; \
- cm->opt_data[VNET_CRYPTO_OP_##n##_DEC].desc = CRYPTO_DEC_STR; \
- cm->opt_data[VNET_CRYPTO_OP_##n##_ENC].active_engine_index = ~0; \
- cm->opt_data[VNET_CRYPTO_OP_##n##_DEC].active_engine_index = ~0; \
- hash_set_mem (cm->ops_handler_index_by_name, CRYPTO_ENC_STR "-" s, \
- VNET_CRYPTO_OP_##n##_ENC); \
- hash_set_mem (cm->ops_handler_index_by_name, CRYPTO_DEC_STR "-" s, \
- VNET_CRYPTO_OP_##n##_DEC);
- foreach_crypto_alg;
+ vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
+ VNET_CRYPTO_OP_##n##_ENC, \
+ VNET_CRYPTO_OP_##n##_DEC, s, 0);
+ foreach_crypto_cipher_alg;
#undef _
-
#define _(n, s) \
- cm->algs[VNET_CRYPTO_ALG_##n].name = s; \
- cm->opt_data[VNET_CRYPTO_OP_##n##_HMAC].alg = VNET_CRYPTO_ALG_##n; \
- cm->opt_data[VNET_CRYPTO_OP_##n##_HMAC].desc = CRYPTO_HMAC_STR; \
- cm->opt_data[VNET_CRYPTO_OP_##n##_HMAC].active_engine_index = ~0; \
- hash_set_mem (cm->ops_handler_index_by_name, CRYPTO_HMAC_STR "-" s, \
- VNET_CRYPTO_OP_##n##_HMAC);
- foreach_hmac_alg;
+ vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
+ VNET_CRYPTO_OP_##n##_ENC, \
+ VNET_CRYPTO_OP_##n##_DEC, s, 1);
+ foreach_crypto_aead_alg;
+#undef _
+#define _(n, s) \
+ vnet_crypto_init_hmac_data (VNET_CRYPTO_ALG_HMAC_##n, \
+ VNET_CRYPTO_OP_##n##_HMAC, "hmac-" s);
+ foreach_crypto_hmac_alg;
#undef _
-
return 0;
}
diff --git a/src/vnet/crypto/crypto.h b/src/vnet/crypto/crypto.h
index 2bd40dc9c75..5d03d52756e 100644
--- a/src/vnet/crypto/crypto.h
+++ b/src/vnet/crypto/crypto.h
@@ -20,14 +20,19 @@
#include <vlib/vlib.h>
-#define foreach_crypto_alg \
- _(DES_CBC, "des-cbc") \
- _(3DES_CBC, "3des-cbc") \
+#define foreach_crypto_cipher_alg \
+ _(DES_CBC, "des-cbc") \
+ _(3DES_CBC, "3des-cbc") \
_(AES_128_CBC, "aes-128-cbc") \
_(AES_192_CBC, "aes-192-cbc") \
_(AES_256_CBC, "aes-256-cbc")
-#define foreach_hmac_alg \
+#define foreach_crypto_aead_alg \
+ _(AES_128_GCM, "aes-128-gcm") \
+ _(AES_192_GCM, "aes-192-gcm") \
+ _(AES_256_GCM, "aes-256-gcm")
+
+#define foreach_crypto_hmac_alg \
_(MD5, "md5") \
_(SHA1, "sha-1") \
_(SHA224, "sha-224") \
@@ -35,14 +40,46 @@
_(SHA384, "sha-384") \
_(SHA512, "sha-512")
+
+#define foreach_crypto_op_type \
+ _(ENCRYPT, "encrypt") \
+ _(DECRYPT, "decrypt") \
+ _(AEAD_ENCRYPT, "aead-encrypt") \
+ _(AEAD_DECRYPT, "aead-decrypt") \
+ _(HMAC, "hmac")
+
+typedef enum
+{
+#define _(n, s) VNET_CRYPTO_OP_TYPE_##n,
+ foreach_crypto_op_type
+#undef _
+ VNET_CRYPTO_OP_N_TYPES,
+} vnet_crypto_op_type_t;
+
+#define foreach_crypto_op_status \
+ _(PENDING, "pending") \
+ _(COMPLETED, "completed") \
+ _(FAIL_NO_HANDLER, "no-handler") \
+ _(FAIL_BAD_HMAC, "bad-hmac") \
+ _(FAIL_DECRYPT, "decrypt-fail")
+
+typedef enum
+{
+#define _(n, s) VNET_CRYPTO_OP_STATUS_##n,
+ foreach_crypto_op_status
+#undef _
+ VNET_CRYPTO_OP_N_STATUS,
+} vnet_crypto_op_status_t;
+
/* *INDENT-OFF* */
typedef enum
{
#define _(n, s) VNET_CRYPTO_ALG_##n,
- foreach_crypto_alg
+ foreach_crypto_cipher_alg
+ foreach_crypto_aead_alg
#undef _
-#define _(n, s) VNET_CRYPTO_ALG_##n,
- foreach_hmac_alg
+#define _(n, s) VNET_CRYPTO_ALG_HMAC_##n,
+ foreach_crypto_hmac_alg
#undef _
VNET_CRYPTO_N_ALGS,
} vnet_crypto_alg_t;
@@ -51,68 +88,54 @@ typedef enum
{
VNET_CRYPTO_OP_NONE = 0,
#define _(n, s) VNET_CRYPTO_OP_##n##_ENC, VNET_CRYPTO_OP_##n##_DEC,
- foreach_crypto_alg
+ foreach_crypto_cipher_alg
+ foreach_crypto_aead_alg
#undef _
#define _(n, s) VNET_CRYPTO_OP_##n##_HMAC,
- foreach_hmac_alg
+ foreach_crypto_hmac_alg
#undef _
- VNET_CRYPTO_N_OP_TYPES,
-} vnet_crypto_op_type_t;
+ VNET_CRYPTO_N_OP_IDS,
+} vnet_crypto_op_id_t;
/* *INDENT-ON* */
typedef struct
{
char *name;
+ vnet_crypto_op_id_t op_by_type[VNET_CRYPTO_OP_N_TYPES];
} vnet_crypto_alg_data_t;
-typedef enum
-{
- VNET_CRYPTO_OP_STATUS_PENDING,
- VNET_CRYPTO_OP_STATUS_COMPLETED,
- VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER,
- VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC,
-} vnet_crypto_op_status_t;
-
typedef struct
{
CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
- vnet_crypto_op_type_t op:8;
+ vnet_crypto_op_id_t op:16;
vnet_crypto_op_status_t status:8;
- u8 key_len, hmac_trunc_len;
- u16 flags;
+ u8 flags;
#define VNET_CRYPTO_OP_FLAG_INIT_IV (1 << 0)
#define VNET_CRYPTO_OP_FLAG_HMAC_CHECK (1 << 1)
u32 len;
+ u16 aad_len;
+ u8 key_len, iv_len, digest_len, tag_len;
u8 *key;
u8 *iv;
u8 *src;
u8 *dst;
+ u8 *aad;
+ u8 *tag;
+ u8 *digest;
uword user_data;
} vnet_crypto_op_t;
typedef struct
{
+ vnet_crypto_op_type_t type;
vnet_crypto_alg_t alg;
- const char *desc;
u32 active_engine_index;
-} vnet_crypto_op_type_data_t;
-
-typedef struct
-{
- CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
- u32 head;
- u32 tail;
- u32 size;
- vnet_crypto_alg_t alg:8;
- vnet_crypto_op_type_t op:8;
- vnet_crypto_op_t *jobs[0];
-} vnet_crypto_queue_t;
+} vnet_crypto_op_data_t;
typedef struct
{
CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
clib_bitmap_t *act_queues;
- vnet_crypto_queue_t *queues[VNET_CRYPTO_N_OP_TYPES];
} vnet_crypto_thread_t;
typedef u32 (vnet_crypto_ops_handler_t) (vlib_main_t * vm,
@@ -123,7 +146,7 @@ u32 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
vlib_error_t *vnet_crypto_register_ops_handler (vlib_main_t * vm,
u32 provider_index,
- vnet_crypto_op_type_t opt,
+ vnet_crypto_op_id_t opt,
vnet_crypto_ops_handler_t *
f);
@@ -132,7 +155,7 @@ typedef struct
char *name;
char *desc;
int priority;
- vnet_crypto_ops_handler_t *ops_handlers[VNET_CRYPTO_N_OP_TYPES];
+ vnet_crypto_ops_handler_t *ops_handlers[VNET_CRYPTO_N_OP_IDS];
} vnet_crypto_engine_t;
typedef struct
@@ -140,10 +163,10 @@ typedef struct
vnet_crypto_alg_data_t *algs;
vnet_crypto_thread_t *threads;
vnet_crypto_ops_handler_t **ops_handlers;
- vnet_crypto_op_type_data_t opt_data[VNET_CRYPTO_N_OP_TYPES];
+ vnet_crypto_op_data_t opt_data[VNET_CRYPTO_N_OP_IDS];
vnet_crypto_engine_t *engines;
uword *engine_index_by_name;
- uword *ops_handler_index_by_name;
+ uword *alg_index_by_name;
} vnet_crypto_main_t;
extern vnet_crypto_main_t crypto_main;
@@ -160,10 +183,11 @@ int vnet_crypto_set_handler (char *ops_handler_name, char *engine);
format_function_t format_vnet_crypto_alg;
format_function_t format_vnet_crypto_engine;
format_function_t format_vnet_crypto_op;
-
+format_function_t format_vnet_crypto_op_type;
+format_function_t format_vnet_crypto_op_status;
static_always_inline void
-vnet_crypto_op_init (vnet_crypto_op_t * op, vnet_crypto_op_type_t type)
+vnet_crypto_op_init (vnet_crypto_op_t * op, vnet_crypto_op_id_t type)
{
if (CLIB_DEBUG > 0)
clib_memset (op, 0xfe, sizeof (*op));
@@ -171,6 +195,14 @@ vnet_crypto_op_init (vnet_crypto_op_t * op, vnet_crypto_op_type_t type)
op->flags = 0;
}
+static_always_inline vnet_crypto_op_type_t
+vnet_crypto_get_op_type (vnet_crypto_op_id_t id)
+{
+ vnet_crypto_main_t *cm = &crypto_main;
+ vnet_crypto_op_data_t *od = vec_elt_at_index (cm->opt_data, id);
+ return od->type;
+}
+
#endif /* included_vnet_crypto_crypto_h */
/*
diff --git a/src/vnet/crypto/format.c b/src/vnet/crypto/format.c
index c2786ee5afd..df811fe4f2f 100644
--- a/src/vnet/crypto/format.c
+++ b/src/vnet/crypto/format.c
@@ -30,10 +30,43 @@ u8 *
format_vnet_crypto_op (u8 * s, va_list * args)
{
vnet_crypto_main_t *cm = &crypto_main;
- vnet_crypto_op_type_t op = va_arg (*args, int); // vnet_crypto_op_type_t);
- vnet_crypto_op_type_data_t *otd = cm->opt_data + op;
+ vnet_crypto_op_id_t op = va_arg (*args, int); // vnet_crypto_op_id_t);
+ vnet_crypto_op_data_t *otd = cm->opt_data + op;
- return format (s, "%s-%U", otd->desc, format_vnet_crypto_alg, otd->alg);
+ return format (s, "%U-%U", format_vnet_crypto_op_type, otd->type,
+ format_vnet_crypto_alg, otd->alg);
+}
+
+u8 *
+format_vnet_crypto_op_type (u8 * s, va_list * args)
+{
+ vnet_crypto_op_type_t opt = va_arg (*args, vnet_crypto_op_type_t);
+ char *strings[] = {
+#define _(n, s) [VNET_CRYPTO_OP_TYPE_##n] = s,
+ foreach_crypto_op_type
+#undef _
+ };
+
+ if (opt >= VNET_CRYPTO_OP_N_TYPES)
+ return format (s, "unknown");
+
+ return format (s, "%s", strings[opt]);
+}
+
+u8 *
+format_vnet_crypto_op_status (u8 * s, va_list * args)
+{
+ vnet_crypto_op_status_t st = va_arg (*args, vnet_crypto_op_status_t);
+ char *strings[] = {
+#define _(n, s) [VNET_CRYPTO_OP_STATUS_##n] = s,
+ foreach_crypto_op_status
+#undef _
+ };
+
+ if (st >= VNET_CRYPTO_OP_N_STATUS)
+ return format (s, "unknown");
+
+ return format (s, "%s", strings[st]);
}
u8 *
diff --git a/src/vnet/ipsec/esp.h b/src/vnet/ipsec/esp.h
index b6942fadf97..4b67eb2134b 100644
--- a/src/vnet/ipsec/esp.h
+++ b/src/vnet/ipsec/esp.h
@@ -94,16 +94,16 @@ hmac_calc (vlib_main_t * vm, ipsec_sa_t * sa, u8 * data, int data_len,
{
vnet_crypto_op_t _op, *op = &_op;
- if (PREDICT_FALSE (sa->integ_op_type == 0))
+ if (PREDICT_FALSE (sa->integ_op_id == 0))
return 0;
- vnet_crypto_op_init (op, sa->integ_op_type);
+ vnet_crypto_op_init (op, sa->integ_op_id);
op->key = sa->integ_key.data;
op->key_len = sa->integ_key.len;
op->src = data;
op->len = data_len;
- op->dst = signature;
- op->hmac_trunc_len = sa->integ_icv_size;
+ op->digest = signature;
+ op->digest_len = sa->integ_icv_size;
if (ipsec_sa_is_set_USE_ESN (sa))
{
diff --git a/src/vnet/ipsec/esp_decrypt.c b/src/vnet/ipsec/esp_decrypt.c
index 7737d186865..9b24e5aaeaa 100644
--- a/src/vnet/ipsec/esp_decrypt.c
+++ b/src/vnet/ipsec/esp_decrypt.c
@@ -202,14 +202,14 @@ esp_decrypt_inline (vlib_main_t * vm,
vnet_crypto_op_t *op;
vec_add2_aligned (ptd->integ_ops, op, 1, CLIB_CACHE_LINE_BYTES);
- vnet_crypto_op_init (op, sa0->integ_op_type);
+ vnet_crypto_op_init (op, sa0->integ_op_id);
op->key = sa0->integ_key.data;
op->key_len = sa0->integ_key.len;
op->src = payload;
- op->hmac_trunc_len = cpd.icv_sz;
op->flags = VNET_CRYPTO_OP_FLAG_HMAC_CHECK;
op->user_data = b - bufs;
- op->dst = payload + len;
+ op->digest = payload + len;
+ op->digest_len = cpd.icv_sz;
op->len = len;
if (PREDICT_TRUE (sa0->flags & IPSEC_SA_FLAG_USE_ESN))
{
@@ -226,11 +226,11 @@ esp_decrypt_inline (vlib_main_t * vm,
payload += esp_sz;
len -= esp_sz;
- if (sa0->crypto_enc_op_type != VNET_CRYPTO_OP_NONE)
+ if (sa0->crypto_enc_op_id != VNET_CRYPTO_OP_NONE)
{
vnet_crypto_op_t *op;
vec_add2_aligned (ptd->crypto_ops, op, 1, CLIB_CACHE_LINE_BYTES);
- vnet_crypto_op_init (op, sa0->crypto_dec_op_type);
+ vnet_crypto_op_init (op, sa0->crypto_dec_op_id);
op->key = sa0->crypto_key.data;
op->iv = payload;
op->src = op->dst = payload += cpd.iv_sz;
@@ -271,7 +271,6 @@ esp_decrypt_inline (vlib_main_t * vm,
op++;
}
}
-
if ((n = vec_len (ptd->crypto_ops)))
{
vnet_crypto_op_t *op = ptd->crypto_ops;
diff --git a/src/vnet/ipsec/esp_encrypt.c b/src/vnet/ipsec/esp_encrypt.c
index 29e27d4488c..bb1effda68b 100644
--- a/src/vnet/ipsec/esp_encrypt.c
+++ b/src/vnet/ipsec/esp_encrypt.c
@@ -425,11 +425,11 @@ esp_encrypt_inline (vlib_main_t * vm, vlib_node_runtime_t * node,
esp->spi = spi;
esp->seq = clib_net_to_host_u32 (sa0->seq);
- if (sa0->crypto_enc_op_type)
+ if (sa0->crypto_enc_op_id)
{
vnet_crypto_op_t *op;
vec_add2_aligned (ptd->crypto_ops, op, 1, CLIB_CACHE_LINE_BYTES);
- vnet_crypto_op_init (op, sa0->crypto_enc_op_type);
+ vnet_crypto_op_init (op, sa0->crypto_enc_op_id);
op->iv = payload - iv_sz;
op->src = op->dst = payload;
op->key = sa0->crypto_key.data;
@@ -438,16 +438,16 @@ esp_encrypt_inline (vlib_main_t * vm, vlib_node_runtime_t * node,
op->user_data = b - bufs;
}
- if (sa0->integ_op_type)
+ if (sa0->integ_op_id)
{
vnet_crypto_op_t *op;
vec_add2_aligned (ptd->integ_ops, op, 1, CLIB_CACHE_LINE_BYTES);
- vnet_crypto_op_init (op, sa0->integ_op_type);
+ vnet_crypto_op_init (op, sa0->integ_op_id);
op->src = payload - iv_sz - sizeof (esp_header_t);
- op->dst = payload + payload_len - icv_sz;
+ op->digest = payload + payload_len - icv_sz;
op->key = sa0->integ_key.data;
op->key_len = sa0->integ_key.len;
- op->hmac_trunc_len = icv_sz;
+ op->digest_len = icv_sz;
op->len = payload_len - icv_sz + iv_sz + sizeof (esp_header_t);
op->user_data = b - bufs;
if (ipsec_sa_is_set_USE_ESN (sa0))
@@ -484,7 +484,6 @@ esp_encrypt_inline (vlib_main_t * vm, vlib_node_runtime_t * node,
vlib_increment_combined_counter (&ipsec_sa_counters, thread_index,
current_sa_index, current_sa_packets,
current_sa_bytes);
-
esp_process_ops (vm, node, ptd->crypto_ops, bufs, nexts);
esp_process_ops (vm, node, ptd->integ_ops, bufs, nexts);
diff --git a/src/vnet/ipsec/ipsec.c b/src/vnet/ipsec/ipsec.c
index 9719d3a2d09..dc2f4cdbb60 100644
--- a/src/vnet/ipsec/ipsec.c
+++ b/src/vnet/ipsec/ipsec.c
@@ -269,51 +269,51 @@ ipsec_init (vlib_main_t * vm)
vec_validate (im->crypto_algs, IPSEC_CRYPTO_N_ALG - 1);
a = im->crypto_algs + IPSEC_CRYPTO_ALG_DES_CBC;
- a->enc_op_type = VNET_CRYPTO_OP_DES_CBC_ENC;
- a->dec_op_type = VNET_CRYPTO_OP_DES_CBC_DEC;
+ a->enc_op_id = VNET_CRYPTO_OP_DES_CBC_ENC;
+ a->dec_op_id = VNET_CRYPTO_OP_DES_CBC_DEC;
a->iv_size = a->block_size = 8;
a = im->crypto_algs + IPSEC_CRYPTO_ALG_3DES_CBC;
- a->enc_op_type = VNET_CRYPTO_OP_3DES_CBC_ENC;
- a->dec_op_type = VNET_CRYPTO_OP_3DES_CBC_DEC;
+ a->enc_op_id = VNET_CRYPTO_OP_3DES_CBC_ENC;
+ a->dec_op_id = VNET_CRYPTO_OP_3DES_CBC_DEC;
a->iv_size = a->block_size = 8;
a = im->crypto_algs + IPSEC_CRYPTO_ALG_AES_CBC_128;
- a->enc_op_type = VNET_CRYPTO_OP_AES_128_CBC_ENC;
- a->dec_op_type = VNET_CRYPTO_OP_AES_128_CBC_DEC;
+ a->enc_op_id = VNET_CRYPTO_OP_AES_128_CBC_ENC;
+ a->dec_op_id = VNET_CRYPTO_OP_AES_128_CBC_DEC;
a->iv_size = a->block_size = 16;
a = im->crypto_algs + IPSEC_CRYPTO_ALG_AES_CBC_192;
- a->enc_op_type = VNET_CRYPTO_OP_AES_192_CBC_ENC;
- a->dec_op_type = VNET_CRYPTO_OP_AES_192_CBC_DEC;
+ a->enc_op_id = VNET_CRYPTO_OP_AES_192_CBC_ENC;
+ a->dec_op_id = VNET_CRYPTO_OP_AES_192_CBC_DEC;
a->iv_size = a->block_size = 16;
a = im->crypto_algs + IPSEC_CRYPTO_ALG_AES_CBC_256;
- a->enc_op_type = VNET_CRYPTO_OP_AES_256_CBC_ENC;
- a->dec_op_type = VNET_CRYPTO_OP_AES_256_CBC_DEC;
+ a->enc_op_id = VNET_CRYPTO_OP_AES_256_CBC_ENC;
+ a->dec_op_id = VNET_CRYPTO_OP_AES_256_CBC_DEC;
a->iv_size = a->block_size = 16;
vec_validate (im->integ_algs, IPSEC_INTEG_N_ALG - 1);
ipsec_main_integ_alg_t *i;
i = &im->integ_algs[IPSEC_INTEG_ALG_SHA1_96];
- i->op_type = VNET_CRYPTO_OP_SHA1_HMAC;
+ i->op_id = VNET_CRYPTO_OP_SHA1_HMAC;
i->icv_size = 12;
i = &im->integ_algs[IPSEC_INTEG_ALG_SHA_256_96];
- i->op_type = VNET_CRYPTO_OP_SHA1_HMAC;
+ i->op_id = VNET_CRYPTO_OP_SHA1_HMAC;
i->icv_size = 12;
i = &im->integ_algs[IPSEC_INTEG_ALG_SHA_256_128];
- i->op_type = VNET_CRYPTO_OP_SHA256_HMAC;
+ i->op_id = VNET_CRYPTO_OP_SHA256_HMAC;
i->icv_size = 16;
i = &im->integ_algs[IPSEC_INTEG_ALG_SHA_384_192];
- i->op_type = VNET_CRYPTO_OP_SHA384_HMAC;
+ i->op_id = VNET_CRYPTO_OP_SHA384_HMAC;
i->icv_size = 24;
i = &im->integ_algs[IPSEC_INTEG_ALG_SHA_512_256];
- i->op_type = VNET_CRYPTO_OP_SHA512_HMAC;
+ i->op_id = VNET_CRYPTO_OP_SHA512_HMAC;
i->icv_size = 32;
vec_validate_aligned (im->ptd, vlib_num_workers (), CLIB_CACHE_LINE_BYTES);
diff --git a/src/vnet/ipsec/ipsec.h b/src/vnet/ipsec/ipsec.h
index 821b7ed3107..b6332d672fb 100644
--- a/src/vnet/ipsec/ipsec.h
+++ b/src/vnet/ipsec/ipsec.h
@@ -66,15 +66,15 @@ typedef struct
typedef struct
{
- vnet_crypto_op_type_t enc_op_type;
- vnet_crypto_op_type_t dec_op_type;
+ vnet_crypto_op_id_t enc_op_id;
+ vnet_crypto_op_id_t dec_op_id;
u8 iv_size;
u8 block_size;
} ipsec_main_crypto_alg_t;
typedef struct
{
- vnet_crypto_op_type_t op_type;
+ vnet_crypto_op_id_t op_id;
u8 icv_size;
} ipsec_main_integ_alg_t;
diff --git a/src/vnet/ipsec/ipsec_sa.c b/src/vnet/ipsec/ipsec_sa.c
index 4d20566686d..af37b2e49cc 100644
--- a/src/vnet/ipsec/ipsec_sa.c
+++ b/src/vnet/ipsec/ipsec_sa.c
@@ -98,8 +98,8 @@ ipsec_sa_set_crypto_alg (ipsec_sa_t * sa, ipsec_crypto_alg_t crypto_alg)
sa->crypto_alg = crypto_alg;
sa->crypto_iv_size = im->crypto_algs[crypto_alg].iv_size;
sa->crypto_block_size = im->crypto_algs[crypto_alg].block_size;
- sa->crypto_enc_op_type = im->crypto_algs[crypto_alg].enc_op_type;
- sa->crypto_dec_op_type = im->crypto_algs[crypto_alg].dec_op_type;
+ sa->crypto_enc_op_id = im->crypto_algs[crypto_alg].enc_op_id;
+ sa->crypto_dec_op_id = im->crypto_algs[crypto_alg].dec_op_id;
ASSERT (sa->crypto_iv_size <= ESP_MAX_IV_SIZE);
ASSERT (sa->crypto_block_size <= ESP_MAX_BLOCK_SIZE);
}
@@ -110,7 +110,7 @@ ipsec_sa_set_integ_alg (ipsec_sa_t * sa, ipsec_integ_alg_t integ_alg)
ipsec_main_t *im = &ipsec_main;
sa->integ_alg = integ_alg;
sa->integ_icv_size = im->integ_algs[integ_alg].icv_size;
- sa->integ_op_type = im->integ_algs[integ_alg].op_type;
+ sa->integ_op_id = im->integ_algs[integ_alg].op_id;
ASSERT (sa->integ_icv_size <= ESP_MAX_ICV_SIZE);
}
diff --git a/src/vnet/ipsec/ipsec_sa.h b/src/vnet/ipsec/ipsec_sa.h
index 12700ccaa39..72a592984f6 100644
--- a/src/vnet/ipsec/ipsec_sa.h
+++ b/src/vnet/ipsec/ipsec_sa.h
@@ -119,9 +119,9 @@ typedef struct
u32 last_seq_hi;
u64 replay_window;
- vnet_crypto_op_type_t crypto_enc_op_type;
- vnet_crypto_op_type_t crypto_dec_op_type;
- vnet_crypto_op_type_t integ_op_type;
+ vnet_crypto_op_id_t crypto_enc_op_id;
+ vnet_crypto_op_id_t crypto_dec_op_id;
+ vnet_crypto_op_id_t integ_op_id;
dpo_id_t dpo[IPSEC_N_PROTOCOLS];
diff --git a/src/vnet/lisp-cp/control.c b/src/vnet/lisp-cp/control.c
index bce44288d08..340217c661e 100644
--- a/src/vnet/lisp-cp/control.c
+++ b/src/vnet/lisp-cp/control.c
@@ -2725,7 +2725,7 @@ build_map_register_record_list (lisp_cp_main_t * lcm)
return recs;
}
-static vnet_crypto_op_type_t
+static vnet_crypto_op_id_t
lisp_key_type_to_crypto_op (lisp_key_type_t key_id)
{
switch (key_id)
@@ -2755,9 +2755,9 @@ update_map_register_auth_data (map_register_hdr_t * map_reg_hdr,
op->key = key;
op->key_len = vec_len (key);
op->len = msg_len;
- op->dst = MREG_DATA (map_reg_hdr);
+ op->digest = MREG_DATA (map_reg_hdr);
op->src = (u8 *) map_reg_hdr;
- op->hmac_trunc_len = 0;
+ op->digest_len = 0;
op->iv = 0;
vnet_crypto_process_ops (lcm->vlib_main, op, 1);
@@ -3946,9 +3946,9 @@ is_auth_data_valid (map_notify_hdr_t * h, u32 msg_len,
op->key = key;
op->key_len = vec_len (key);
op->len = msg_len;
- op->dst = out;
+ op->digest = out;
op->src = (u8 *) h;
- op->hmac_trunc_len = 0;
+ op->digest_len = 0;
op->iv = 0;
vnet_crypto_process_ops (lcm->vlib_main, op, 1);