aboutsummaryrefslogtreecommitdiffstats
path: root/src/tools
diff options
context:
space:
mode:
Diffstat (limited to 'src/tools')
-rw-r--r--src/tools/appimage/CMakeLists.txt2
-rw-r--r--src/tools/g2/clib.c1
-rw-r--r--src/tools/g2/cpel.c1
-rw-r--r--src/tools/g2/events.c1
-rw-r--r--src/tools/g2/pointsel.c2
-rw-r--r--src/tools/g2/view1.c23
-rw-r--r--src/tools/perftool/c2cpel.c1
-rw-r--r--src/tools/perftool/cpel_util.c1
-rw-r--r--src/tools/perftool/cpelatency.c1
-rw-r--r--src/tools/perftool/cpeldump.c1
-rw-r--r--src/tools/perftool/cpelinreg.c1
-rw-r--r--src/tools/perftool/cpelstate.c1
-rw-r--r--src/tools/perftool/delsvec.c496
-rw-r--r--src/tools/vppapigen/CMakeLists.txt18
-rw-r--r--src/tools/vppapigen/VPPAPI.rst193
-rwxr-xr-xsrc/tools/vppapigen/generate_json.py9
-rwxr-xr-xsrc/tools/vppapigen/vppapigen.py36
-rwxr-xr-x[-rw-r--r--]src/tools/vppapigen/vppapigen_c.py210
-rw-r--r--src/tools/vppapigen/vppapigen_json.py32
19 files changed, 684 insertions, 346 deletions
diff --git a/src/tools/appimage/CMakeLists.txt b/src/tools/appimage/CMakeLists.txt
index 1b83656dbf8..26ef77d1c91 100644
--- a/src/tools/appimage/CMakeLists.txt
+++ b/src/tools/appimage/CMakeLists.txt
@@ -18,7 +18,7 @@ if(VPP_BUILD_APPIMAGE)
WORLD_READ WORLD_EXECUTE)
install(FILES vpp.desktop DESTINATION .)
install(FILES vpp.png DESTINATION .)
- install(FILES vpp.svg DESTINATION share/icons/hicolor/scalable/vpp.svg)
+ install(FILES vpp.svg DESTINATION ${CMAKE_INSTALL_DATADIR}/icons/hicolor/scalable/vpp.svg)
install(CODE "EXECUTE_PROCESS(COMMAND ln -s . ./usr
WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX})")
install(CODE "EXECUTE_PROCESS(
diff --git a/src/tools/g2/clib.c b/src/tools/g2/clib.c
index 3cfc2637673..bb1f2026a43 100644
--- a/src/tools/g2/clib.c
+++ b/src/tools/g2/clib.c
@@ -21,7 +21,6 @@
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
-#include <sys/fcntl.h>
#include <sys/mman.h>
#include <unistd.h>
#include <ctype.h>
diff --git a/src/tools/g2/cpel.c b/src/tools/g2/cpel.c
index 8bcc91e674e..0d1873431b7 100644
--- a/src/tools/g2/cpel.c
+++ b/src/tools/g2/cpel.c
@@ -21,7 +21,6 @@
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
-#include <sys/fcntl.h>
#include <sys/mman.h>
#include <unistd.h>
#include <ctype.h>
diff --git a/src/tools/g2/events.c b/src/tools/g2/events.c
index 09054b71324..ef85c208b3c 100644
--- a/src/tools/g2/events.c
+++ b/src/tools/g2/events.c
@@ -17,7 +17,6 @@
#include <stdlib.h>
#include <unistd.h>
#include <sys/stat.h>
-#include <sys/fcntl.h>
#include <sys/mman.h>
#include <arpa/inet.h>
#include <stdio.h>
diff --git a/src/tools/g2/pointsel.c b/src/tools/g2/pointsel.c
index 59822377219..fae93365e3d 100644
--- a/src/tools/g2/pointsel.c
+++ b/src/tools/g2/pointsel.c
@@ -169,7 +169,7 @@ static void down_button(void)
static void button_click_callback(GtkButton *item, gpointer data)
{
int i;
- enum button_click click = (enum button_click)data;
+ enum button_click click = (enum button_click) (long int) data;
switch (click) {
case ALL_BUTTON:
diff --git a/src/tools/g2/view1.c b/src/tools/g2/view1.c
index 3902c0a2dc1..7a6ae714e3f 100644
--- a/src/tools/g2/view1.c
+++ b/src/tools/g2/view1.c
@@ -2329,21 +2329,22 @@ out:
static void view1_button_click_callback(GtkButton *item, gpointer data)
{
- enum view1_button_click click = (enum view1_button_click) data;
- event_t *ep;
- ulonglong event_incdec;
- ulonglong current_width;
- ulonglong zoom_delta;
+ enum view1_button_click click = (enum view1_button_click) (long int) data;
+ event_t *ep;
+ ulonglong event_incdec;
+ ulonglong current_width;
+ ulonglong zoom_delta;
- current_width = s_v1->maxvistime - s_v1->minvistime;
- event_incdec = (current_width) / 3;
+ current_width = s_v1->maxvistime - s_v1->minvistime;
+ event_incdec = (current_width) / 3;
- if (event_incdec == 0LL)
- event_incdec = 1;
+ if (event_incdec == 0LL)
+ event_incdec = 1;
- zoom_delta = (s_v1->maxvistime - s_v1->minvistime) / 6;
+ zoom_delta = (s_v1->maxvistime - s_v1->minvistime) / 6;
- switch(click) {
+ switch (click)
+ {
case TOP_BUTTON:
/* First PID to top of window */
s_v1->first_pid_index = 0;
diff --git a/src/tools/perftool/c2cpel.c b/src/tools/perftool/c2cpel.c
index 72049054ae1..b02c506bd59 100644
--- a/src/tools/perftool/c2cpel.c
+++ b/src/tools/perftool/c2cpel.c
@@ -20,7 +20,6 @@
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
-#include <sys/fcntl.h>
#include <sys/mman.h>
#include <unistd.h>
#include <ctype.h>
diff --git a/src/tools/perftool/cpel_util.c b/src/tools/perftool/cpel_util.c
index 4dc1eaf3256..9667f080919 100644
--- a/src/tools/perftool/cpel_util.c
+++ b/src/tools/perftool/cpel_util.c
@@ -20,7 +20,6 @@
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
-#include <sys/fcntl.h>
#include <sys/mman.h>
#include <unistd.h>
#include <ctype.h>
diff --git a/src/tools/perftool/cpelatency.c b/src/tools/perftool/cpelatency.c
index 7b87d606cda..6a3d4f79b8a 100644
--- a/src/tools/perftool/cpelatency.c
+++ b/src/tools/perftool/cpelatency.c
@@ -21,7 +21,6 @@
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
-#include <sys/fcntl.h>
#include <sys/mman.h>
#include <unistd.h>
#include <ctype.h>
diff --git a/src/tools/perftool/cpeldump.c b/src/tools/perftool/cpeldump.c
index be0a70df24e..1ccfd6a91df 100644
--- a/src/tools/perftool/cpeldump.c
+++ b/src/tools/perftool/cpeldump.c
@@ -21,7 +21,6 @@
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
-#include <sys/fcntl.h>
#include <sys/mman.h>
#include <unistd.h>
#include <ctype.h>
diff --git a/src/tools/perftool/cpelinreg.c b/src/tools/perftool/cpelinreg.c
index 44399904237..007e727d1bf 100644
--- a/src/tools/perftool/cpelinreg.c
+++ b/src/tools/perftool/cpelinreg.c
@@ -27,7 +27,6 @@
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
-#include <sys/fcntl.h>
#include <sys/mman.h>
#include <unistd.h>
#include <ctype.h>
diff --git a/src/tools/perftool/cpelstate.c b/src/tools/perftool/cpelstate.c
index 3fd9ccb9c79..78d9c9752fd 100644
--- a/src/tools/perftool/cpelstate.c
+++ b/src/tools/perftool/cpelstate.c
@@ -21,7 +21,6 @@
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
-#include <sys/fcntl.h>
#include <sys/mman.h>
#include <unistd.h>
#include <ctype.h>
diff --git a/src/tools/perftool/delsvec.c b/src/tools/perftool/delsvec.c
index 724935d331e..d49ba98b94d 100644
--- a/src/tools/perftool/delsvec.c
+++ b/src/tools/perftool/delsvec.c
@@ -1,4 +1,4 @@
-/*
+/*
*------------------------------------------------------------------
* Copyright (c) 2006-2016 Cisco and/or its affiliates.
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -24,13 +24,13 @@
/*
* #define UNIT_TESTS 1
- * #define MATCH_TRACE 1
+ * #define MATCH_TRACE 1
*/
/*
* delsvec
* break up an input string into a vector of [null-terminated] u8 *'s
- *
+ *
* Each supplied delimiter character results in a string in the output
* vector, unless the delimiters occur back-to-back. When matched,
* a whitespace character in the delimiter consumes an arbitrary
@@ -46,270 +46,316 @@
static u8 **string_cache;
static u8 **svec_cache;
-void delsvec_recycle_this_string (u8 *s)
+void
+delsvec_recycle_this_string (u8 *s)
{
- if (s) {
- _vec_len (s) = 0;
- vec_add1(string_cache, s);
+ if (s)
+ {
+ vec_set_len (s, 0);
+ vec_add1 (string_cache, s);
}
}
-void delsvec_recycle_this_svec (u8 **svec)
+void
+delsvec_recycle_this_svec (u8 **svec)
{
- if (svec) {
- if (svec_cache) {
- vec_free (svec_cache);
- }
- _vec_len (svec) = 0;
- svec_cache = svec;
+ if (svec)
+ {
+ if (svec_cache)
+ {
+ vec_free (svec_cache);
+ }
+ vec_set_len (svec, 0);
+ svec_cache = svec;
}
}
-int pvl (char *a)
+int
+pvl (char *a)
{
- return vec_len(a);
+ return vec_len (a);
}
-u8 **delsvec(void *input_arg, char *fmt)
+u8 **
+delsvec (void *input_arg, char *fmt)
{
- u8 **rv = 0;
- int input_index=0;
- u8 *this;
- int dirflag=0;
- int i;
- u8 *input = input_arg;
+ u8 **rv = 0;
+ int input_index = 0;
+ u8 *this;
+ int dirflag = 0;
+ int i;
+ u8 *input = input_arg;
- if (svec_cache) {
- rv = svec_cache;
- svec_cache = 0;
+ if (svec_cache)
+ {
+ rv = svec_cache;
+ svec_cache = 0;
}
- while (fmt) {
- dirflag=0;
- if (vec_len (string_cache) > 0) {
- this = string_cache [vec_len(string_cache)-1];
- _vec_len (string_cache) = vec_len (string_cache) - 1;
- } else
- this = 0;
- /*
- * '*' means one of two things: match the rest of the input,
- * or match as many characters as possible
- */
- if (fmt[0] == '*') {
- fmt++;
- dirflag=1;
- /*
- * no more format: eat rest of string...
- */
- if (!fmt[0]) {
- for (;input[input_index]; input_index++)
- vec_add1(this, input[input_index]);
- if (vec_len(this)) {
- vec_add1(this, 0);
+ while (fmt)
+ {
+ dirflag = 0;
+ if (vec_len (string_cache) > 0)
+ {
+ this = string_cache[vec_len (string_cache) - 1];
+ vec_set_len (string_cache, vec_len (string_cache) - 1);
+ }
+ else
+ this = 0;
+ /*
+ * '*' means one of two things: match the rest of the input,
+ * or match as many characters as possible
+ */
+ if (fmt[0] == '*')
+ {
+ fmt++;
+ dirflag = 1;
+ /*
+ * no more format: eat rest of string...
+ */
+ if (!fmt[0])
+ {
+ for (; input[input_index]; input_index++)
+ vec_add1 (this, input[input_index]);
+ if (vec_len (this))
+ {
+ vec_add1 (this, 0);
#ifdef MATCH_TRACE
- printf("final star-match adds: '%s'\n", this);
+ printf ("final star-match adds: '%s'\n", this);
#endif
- vec_add1(rv, this);
- } else {
- vec_add1(string_cache, this);
- }
+ vec_add1 (rv, this);
+ }
+ else
+ {
+ vec_add1 (string_cache, this);
+ }
- return(rv);
- }
- }
- /*
- * Left-to-right scan, adding chars until next delimiter char
- * appears.
- */
- if (!dirflag) {
- while (input[input_index]) {
- if (input[input_index] == fmt[0]) {
- /* If we just (exact) matched a whitespace delimiter */
- if (fmt[0] == ' '){
- /* scan forward eating whitespace */
- while (input[input_index] == ' ' ||
- input[input_index] == '\t' ||
- input[input_index] == '\n')
- input_index++;
- input_index--;
- }
- goto found;
- }
- /* If we're looking for whitespace */
- if (fmt[0] == ' ') {
- /* and we have whitespace */
- if (input[input_index] == ' ' ||
- input[input_index] == '\t' ||
- input[input_index] == '\n') {
- /* scan forward eating whitespace */
- while (input[input_index] == ' ' ||
- input[input_index] == '\t' ||
- input[input_index] == '\n') {
- input_index++;
- }
- input_index--;
- goto found;
- }
- }
- /* Not a delimiter, save it */
- vec_add1(this, input[input_index]);
- input_index++;
- }
- /*
- * Fell off the wagon, clean up and bail out
- */
- bail:
+ return (rv);
+ }
+ }
+ /*
+ * Left-to-right scan, adding chars until next delimiter char
+ * appears.
+ */
+ if (!dirflag)
+ {
+ while (input[input_index])
+ {
+ if (input[input_index] == fmt[0])
+ {
+ /* If we just (exact) matched a whitespace delimiter */
+ if (fmt[0] == ' ')
+ {
+ /* scan forward eating whitespace */
+ while (input[input_index] == ' ' ||
+ input[input_index] == '\t' ||
+ input[input_index] == '\n')
+ input_index++;
+ input_index--;
+ }
+ goto found;
+ }
+ /* If we're looking for whitespace */
+ if (fmt[0] == ' ')
+ {
+ /* and we have whitespace */
+ if (input[input_index] == ' ' ||
+ input[input_index] == '\t' || input[input_index] == '\n')
+ {
+ /* scan forward eating whitespace */
+ while (input[input_index] == ' ' ||
+ input[input_index] == '\t' ||
+ input[input_index] == '\n')
+ {
+ input_index++;
+ }
+ input_index--;
+ goto found;
+ }
+ }
+ /* Not a delimiter, save it */
+ vec_add1 (this, input[input_index]);
+ input_index++;
+ }
+ /*
+ * Fell off the wagon, clean up and bail out
+ */
+ bail:
#ifdef MATCH_TRACE
- printf("failed, fmt[0] = '%c', input[%d]='%s'\n",
- fmt[0], input_index, &input[input_index]);
+ printf ("failed, fmt[0] = '%c', input[%d]='%s'\n", fmt[0],
+ input_index, &input[input_index]);
#endif
- delsvec_recycle_this_string(this);
- for (i = 0; i < vec_len(rv); i++)
- delsvec_recycle_this_string(rv[i]);
- delsvec_recycle_this_svec(rv);
- return(0);
-
- found:
- /*
- * Delimiter matched
- */
- input_index++;
- fmt++;
- /*
- * If we actually accumulated non-delimiter characters,
- * add them to the result vector
- */
- if (vec_len(this)) {
- vec_add1(this, 0);
+ delsvec_recycle_this_string (this);
+ for (i = 0; i < vec_len (rv); i++)
+ delsvec_recycle_this_string (rv[i]);
+ delsvec_recycle_this_svec (rv);
+ return (0);
+
+ found:
+ /*
+ * Delimiter matched
+ */
+ input_index++;
+ fmt++;
+ /*
+ * If we actually accumulated non-delimiter characters,
+ * add them to the result vector
+ */
+ if (vec_len (this))
+ {
+ vec_add1 (this, 0);
#ifdef MATCH_TRACE
- printf("match: add '%s'\n", this);
+ printf ("match: add '%s'\n", this);
#endif
- vec_add1(rv, this);
- } else {
- vec_add1(string_cache, this);
- }
- } else {
- /*
- * right-to-left scan, '*' not at
- * the end of the delimiter string
- */
- i = input_index;
- while (input[++i])
- ; /* scan forward */
- i--;
- while (i > input_index) {
- if (input[i] == fmt[0])
- goto found2;
-
- if (fmt[0] == ' ' || fmt[0] == '\t' ||
- fmt[0] == '\n') {
- if (input[i] == ' ' ||
- input[i] == '\t' ||
- input[i] == '\n')
- goto found2;
- }
- i--;
- }
- goto bail;
+ vec_add1 (rv, this);
+ }
+ else
+ {
+ vec_add1 (string_cache, this);
+ }
+ }
+ else
+ {
+ /*
+ * right-to-left scan, '*' not at
+ * the end of the delimiter string
+ */
+ i = input_index;
+ while (input[++i])
+ ; /* scan forward */
+ i--;
+ while (i > input_index)
+ {
+ if (input[i] == fmt[0])
+ goto found2;
- found2:
- for (; input_index < i; input_index++) {
- vec_add1(this, input[input_index]);
- }
- input_index++;
- fmt++;
- vec_add1(this, 0);
+ if (fmt[0] == ' ' || fmt[0] == '\t' || fmt[0] == '\n')
+ {
+ if (input[i] == ' ' || input[i] == '\t' || input[i] == '\n')
+ goto found2;
+ }
+ i--;
+ }
+ goto bail;
+
+ found2:
+ for (; input_index < i; input_index++)
+ {
+ vec_add1 (this, input[input_index]);
+ }
+ input_index++;
+ fmt++;
+ vec_add1 (this, 0);
#ifdef MATCH_TRACE
- printf("inner '*' match: add '%s'\n", this);
+ printf ("inner '*' match: add '%s'\n", this);
#endif
- vec_add1(rv, this);
- }
+ vec_add1 (rv, this);
+ }
}
- return (rv);
+ return (rv);
}
#ifdef UNIT_TESTS
-typedef struct utest_ {
- char *string;
- char *fmt;
+typedef struct utest_
+{
+ char *string;
+ char *fmt;
} utest_t;
utest_t tests[] = {
#ifdef NOTDEF
- {"Dec 7 08:56",
- " :*"},
- {"Dec 17 08:56",
- " :*"},
- {"Dec 7 08:56:41.239 install/inst_repl 0/9/CPU0 t1 [40989] File List:Successfully blobbified file list. Took 1 milliseconds",
- " ::. / // [] *"},
- {"RP/0/9/CPU0:Dec 7 08:55:28.550 : sam_server[291]: SAM backs up digest list to memory file",
- "///: ::. : []: *"},
- /* Expected to fail */
- {"Dec 7 08:56:41.239 install/inst_repl 0/9/CPU0 t1 [40989] File List:Successfully blobbified file list. Took 1 milliseconds",
- "///: ::. : : *"},
- /* Expected to fail */
- {"RP/0/9/CPU0:Dec 7 08:55:28.550 : sam_server[291]: SAM backs up digest list to memory file",
- " ::. / // [] *"},
- {"THIS that and + theother", "*+ *"},
- {"Dec 12 15:33:07.103 ifmgr/errors 0/RP0/CPU0 3# t2 Failed to open IM connection: No such file or directory", " ::. / // *"},
- {"Dec 16 21:43:47.328 ifmgr/bulk 0/3/CPU0 t8 Bulk DPC async download complete. Partitions 1, node_count 1, total_out 0, out_offset 0, out_expected 0: No error"," ::. / // *"},
- {"t:0x53034bd6 CPU:00 PROCESS :PROCCREATE_NAME",
- ": : :*"},
- {" pid:1", " *"},
- {"t:0x53034cbb CPU:00 THREAD :THCREATE pid:1 tid:1",
- ": : : pid: tid:*"},
- {"t:0x5303f950 CPU:00 COMM :REC_PULSE scoid:0x40000003 pid:364659",
- ": : : *"},
- {"/hfr-base-3.3.85/lib/libttyconnection.dll 0xfc000000 0x0000306c 0xfc027000 0x000001c8 1",
- " *"},
- {"Feb 28 02:38:26.123 seqtrace 0/1/CPU0 t8 :msg_receive:ifmgr/t8:IMC_MSG_MTU_UPDATE:ppp_ma/t1",
- " ::. // ::::*"},
+ { "Dec 7 08:56", " :*" },
+ { "Dec 17 08:56", " :*" },
+ { "Dec 7 08:56:41.239 install/inst_repl 0/9/CPU0 t1 [40989] File "
+ "List:Successfully blobbified file list. Took 1 milliseconds",
+ " ::. / // [] *" },
+ { "RP/0/9/CPU0:Dec 7 08:55:28.550 : sam_server[291]: SAM backs up digest "
+ "list to memory file",
+ "///: ::. : []: *" },
+ /* Expected to fail */
+ { "Dec 7 08:56:41.239 install/inst_repl 0/9/CPU0 t1 [40989] File "
+ "List:Successfully blobbified file list. Took 1 milliseconds",
+ "///: ::. : : *" },
+ /* Expected to fail */
+ { "RP/0/9/CPU0:Dec 7 08:55:28.550 : sam_server[291]: SAM backs up digest "
+ "list to memory file",
+ " ::. / // [] *" },
+ { "THIS that and + theother", "*+ *" },
+ { "Dec 12 15:33:07.103 ifmgr/errors 0/RP0/CPU0 3# t2 Failed to open IM "
+ "connection: No such file or directory",
+ " ::. / // *" },
+ { "Dec 16 21:43:47.328 ifmgr/bulk 0/3/CPU0 t8 Bulk DPC async download "
+ "complete. Partitions 1, node_count 1, total_out 0, out_offset 0, "
+ "out_expected 0: No error",
+ " ::. / // *" },
+ { "t:0x53034bd6 CPU:00 PROCESS :PROCCREATE_NAME", ": : :*" },
+ { " pid:1", " *" },
+ { "t:0x53034cbb CPU:00 THREAD :THCREATE pid:1 tid:1",
+ ": : : pid: tid:*" },
+ { "t:0x5303f950 CPU:00 COMM :REC_PULSE scoid:0x40000003 pid:364659",
+ ": : : *" },
+ { "/hfr-base-3.3.85/lib/libttyconnection.dll 0xfc000000 0x0000306c "
+ "0xfc027000 0x000001c8 1",
+ " *" },
+ { "Feb 28 02:38:26.123 seqtrace 0/1/CPU0 t8 "
+ ":msg_receive:ifmgr/t8:IMC_MSG_MTU_UPDATE:ppp_ma/t1",
+ " ::. // ::::*" },
- {"Feb 28 02:38:26.123 seqtrace 0/1/CPU0 t8 :msg_send_event:call:ifmgr/t8:124/0:cdp/t1",
- " ::. // :msg_send_event::::*"},
+ { "Feb 28 02:38:26.123 seqtrace 0/1/CPU0 t8 "
+ ":msg_send_event:call:ifmgr/t8:124/0:cdp/t1",
+ " ::. // :msg_send_event::::*" },
- {"Feb 28 02:38:26.125 seqtrace 0/1/CPU0 t1 :msg_receive_event:cdp/t1:124/0",
- " ::. // :msg_receive_event::*"}
- {"t:0x645dd86d CPU:00 USREVENT:EVENT:100, d0:0x00000002 d1:0x00000000",
- ": : USREVENT:EVENT:, d0: *"}
- {"t:0x5303f950 CPU:00 COMM :REC_PULSE scoid:0x40000003 pid:364659",
- ": : : *"},
- {"t:0x2ccf9f5a CPU:00 INT_ENTR:0x80000000 (-2147483648) IP:0x002d8b18",
- ": : INT_ENTR: IP:*"}
- {"t:0xd473951c CPU:00 KER_EXIT:SCHED_GET/88 ret_val:2 sched_priority:10",
- ": : KER_EXIT:SCHED_GET : sched_priority:*"}
- {"t:0x00000123 CPU:01 SYSTEM :FUNC_ENTER thisfn:0x40e62048 call_site:0x00000000",
- ": : SYSTEM :FUNC_ thisfn: *"},
- {"t:0x5af8de95 CPU:00 INT_HANDLER_ENTR:0x0000004d (77) PID:8200 IP:0x00000000 AREA:0x0bf9b290", ": : INT_HANDLER_*"},
+ { "Feb 28 02:38:26.125 seqtrace 0/1/CPU0 t1 "
+ ":msg_receive_event:cdp/t1:124/0",
+ " ::. // :msg_receive_event::*" } {
+ "t:0x645dd86d CPU:00 USREVENT:EVENT:100, d0:0x00000002 d1:0x00000000",
+ ": : USREVENT:EVENT:, d0: *" } {
+ "t:0x5303f950 CPU:00 COMM :REC_PULSE scoid:0x40000003 pid:364659",
+ ": : : *" },
+ { "t:0x2ccf9f5a CPU:00 INT_ENTR:0x80000000 (-2147483648) "
+ "IP:0x002d8b18",
+ ": : INT_ENTR: IP:*" } {
+ "t:0xd473951c CPU:00 KER_EXIT:SCHED_GET/88 ret_val:2 sched_priority:10",
+ ": : KER_EXIT:SCHED_GET : sched_priority:*" } {
+ "t:0x00000123 CPU:01 SYSTEM :FUNC_ENTER thisfn:0x40e62048 "
+ "call_site:0x00000000",
+ ": : SYSTEM :FUNC_ thisfn: *" },
+ { "t:0x5af8de95 CPU:00 INT_HANDLER_ENTR:0x0000004d (77) PID:8200 "
+ "IP:0x00000000 AREA:0x0bf9b290",
+ ": : INT_HANDLER_*" },
#endif
- {"t:0x6d1ff92f CPU:00 CONTROL: BUFFER sequence = 1053, num_events = 714",
- ": : CONTROL*"},
- {"t:0x6d1ff92f CPU:00 CONTROL :TIME msb:0x0000003c lsb(offset):0x6d1ff921",
- ": : CONTROL*"},
+ { "t:0x6d1ff92f CPU:00 CONTROL: BUFFER sequence = 1053, num_events = 714",
+ ": : CONTROL*" },
+ { "t:0x6d1ff92f CPU:00 CONTROL :TIME msb:0x0000003c lsb(offset):0x6d1ff921",
+ ": : CONTROL*" },
};
-int main (int argc, char **argv)
+int
+main (int argc, char **argv)
{
- int i, j;
- u8 **svec;
+ int i, j;
+ u8 **svec;
- for (j = 0; j < ARRAY_LEN(tests); j++) {
- printf ("input string: '%s'\n", tests[j].string);
- printf ("delimiter arg: '%s'\n", tests[j].fmt);
- printf ("parse trace:\n");
- svec = delsvec(tests[j].string, tests[j].fmt);
- if (!svec) {
- printf("index %d failed\n", j);
- continue;
- }
- printf("%d substring vectors\n", vec_len(svec));
- for (i = 0; i < vec_len(svec); i++) {
- printf("[%d]: '%s'\n", i, svec[i]);
- }
- printf ("-------------------\n");
+ for (j = 0; j < ARRAY_LEN (tests); j++)
+ {
+ printf ("input string: '%s'\n", tests[j].string);
+ printf ("delimiter arg: '%s'\n", tests[j].fmt);
+ printf ("parse trace:\n");
+ svec = delsvec (tests[j].string, tests[j].fmt);
+ if (!svec)
+ {
+ printf ("index %d failed\n", j);
+ continue;
+ }
+ printf ("%d substring vectors\n", vec_len (svec));
+ for (i = 0; i < vec_len (svec); i++)
+ {
+ printf ("[%d]: '%s'\n", i, svec[i]);
+ }
+ printf ("-------------------\n");
}
- exit(0);
+ exit (0);
}
#endif
diff --git a/src/tools/vppapigen/CMakeLists.txt b/src/tools/vppapigen/CMakeLists.txt
index bfabc3a670c..97a6d35f9b5 100644
--- a/src/tools/vppapigen/CMakeLists.txt
+++ b/src/tools/vppapigen/CMakeLists.txt
@@ -11,6 +11,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+find_package(
+ Python3
+ REQUIRED
+ COMPONENTS Interpreter
+)
+
+execute_process(
+ COMMAND ${Python3_EXECUTABLE} -c "import ply"
+ RESULT_VARIABLE _rv
+ OUTPUT_QUIET
+)
+
+if (NOT ${_rv} EQUAL 0)
+ message( FATAL_ERROR "The \"ply\" Python3 package is not installed.")
+endif()
+
install(
FILES vppapigen.py
RENAME vppapigen
@@ -27,7 +43,7 @@ install(
vppapigen_json.py
generate_json.py
DESTINATION
- share/vpp
+ ${CMAKE_INSTALL_DATADIR}/vpp
COMPONENT
vpp-dev
)
diff --git a/src/tools/vppapigen/VPPAPI.rst b/src/tools/vppapigen/VPPAPI.rst
index 5b172a8c758..e8144803a87 100644
--- a/src/tools/vppapigen/VPPAPI.rst
+++ b/src/tools/vppapigen/VPPAPI.rst
@@ -402,3 +402,196 @@ Future considerations
- Embed JSON definitions into the API server, so dynamic languages
can download them directly without going via the filesystem and JSON
files.
+
+API Change Process
+------------------
+
+Purpose
+~~~~~~~
+
+To minimize the disruptions to the consumers of the VPP API, while permitting
+the innovation for the VPP itself.
+
+Historically, API changes in VPP master branch were allowed at any point in time
+outside of a small window between the API freeze milestone and RC1 milestone.
+The API changes on the throttle branches were not permitted at all. This model
+proved workable, however all the production use cases ended up on throttle
+branches, with a lot of forklift activity when it is the time to upgrade to the
+next branch.
+
+This formally structured API change process harmonizes the behavior across all
+the VPP branches, and allows more flexibility for the consumer, while permitting
+the innovation in the VPP itself.
+
+The Core Promise
+~~~~~~~~~~~~~~~~
+
+"If a user is running a VPP version N and does not use any deprecated APIs, they
+should be able to simply upgrade the VPP to version N+1 and there should be no
+API breakage".
+
+In-Progress, Production and Deprecated APIs
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This proposal adds a classification of stability of an API call:
+
+- "In-Progress": APIs in the process of the development, experimentation, and
+ limited testing.
+
+- "Production": tested as part of the "make test", considered stable for general
+ usage.
+
+- "Deprecated": used as a flag on Production APIs which are slated to be
+ deprecated in the future release.
+
+The "In-Progress" APIs or the APIs with the semantic version of 0.x.y are not
+subject to any stability checks, thus the developers are free to introduce them,
+modify their signatures, and as well remove them completely at will. The users
+should not use the in-progress APIs without the interactions with its
+maintainers, nor base the production code on those APIs. The goal of
+"in-progress" APIs to allow rapid iteration and modifications to ensure the API
+signature and function is stabilized. These API calls may be used for testing or
+experimentation and prototyping.
+
+When the maintainer is satisfied with the quality of the APIs, and ensures that
+they are tested as part of the "Make test" runs, they can transition their
+status to "Production".
+
+The "Production" APIs can *NOT* be changed in any way that modifies their
+representation on the wire and the signature (thus CRC). The only change that
+they may incur is to be marked as "Deprecated". These are the APIs that the
+downstream users can use for production purposes. They exist to fulfill a core
+promise of this process: The "Deprecated" APIs are the "Production" APIs that
+are about to be deleted. To ensure the above core promise is maintained, if the
+API call was marked as deprecated at any point between RC1 of release N and RC1
+of release N+1, it MUST NOT be deleted until the RC1 milestone of the
+release N+2. The deprecated API SHOULD specify a replacement API - which MUST
+be a Production API, so as not to decrease the level of stability.
+
+
+The time interval between a commit that marks an API as deprecated and a commit
+that deletes that API MUST be at least equal the time between the two subsequent
+releases (currently 4 months).
+
+
+Doing so allows a for a good heads-up to those who are using the
+"one free upgrade" property to proactively catch and test the transition from
+the deprecated APIs using the master.
+
+
+Marking an API as deprecated just 1 day before RC1 branch pull and then deleting
+that API one day after does *technically* satisfy "one free upgrade" promise,
+but is rather hostile to the users that are proactively tracking it.
+
+Semantic API Versioning
+~~~~~~~~~~~~~~~~~~~~~~~
+
+VPP APIs use semantic versioning according to semver.org, with the compatibility
+logic being applied at the moment the messages are marked as deprecated.
+
+To discuss: i.e. if message_2 is being introduced which deprecates the
+message_1, then that same commit should increase the major version of the API.
+
+The 0.x.x API versions, by virtue of being in-progress, are exempt from this
+treatment.
+
+Tooling
+~~~~~~~
+
+See https://gerrit.fd.io/r/c/vpp/+/26881:
+
+crcchecker.py is a tool to enforce the policy, with a few other bonus uses:
+
+extras/scripts/crcchecker.py --check-patchset # returns -1 if backwards incompatible extras/scripts/crcchecker.py --dump-manifest extras/scripts/crcchecker.py --git-revision v20.01 <files> extras/scripts/crcchecker.py -- diff <oldfile> <newfile>
+
+Notice that you can use this tool to get the list of API changes since a given past release.
+
+The policy:
+
+.. highlight:: none
+
+.. code-block::
+
+ 1. Production APIs should never change.
+ The definition of a "production API" is if the major version in
+ the API file is > 0 that is not marked as "in-progress".
+ 2. APIs that are experimental / not released are not checked.
+ An API message can be individually marked as in progress,
+ by adding the following in the API definition:
+ option in_progress;
+ 3. An API can be deprecated in three-to-six steps (the steps
+ with letters can be combined or split, depending on situation):
+ Step 1a: A new "in-progress" API new_api_2 is added that
+ is deemed to be a replacement.
+ Step 1b: The existing API is marked as "replaced_by" this new API:
+ option replaced_by="new_api_2";
+ Step 2a: The new_api_2 is marked as production by deleting its in-progress status,
+ provided that this API does have sufficient test coverage to deem it well tested.
+ Step 2b: the existing API is marked as "deprecated":
+ option deprecated="optional short message to humans reading it";
+ Step 3: the deprecated API is deleted.
+
+There is a time constraint that the minimum interval between the steps 2 and 3
+must be at least 4 months. The proposal is to have step 2 around a couple of
+weeks before the F0 milestone for a release, as triggered by the release manager
+(and in the future by an automated means).
+
+Use Cases
+~~~~~~~~~
+
+Adding A New Field To A Production API
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The simplest way to add a new field to a Production API message *foo_message* is
+to create a new In-Progress message *foo_message_v2*, and add the field to that
+one. Typically it will be an extension - so the API message handlers are
+trivially chained. If there are changes/adjustments that are needed, this new
+message can be freely altered without bothering the users of the Production API.
+
+When the maintainer is happy with the quality of the implementation, and the
+foo_message_v2 is tested in "make test" to the same extent as the foo_message,
+they can make two commits: one, removing the in-progress status for
+foo_message_v2, and the second one - deprecating foo_message and pointing the
+foo_message_v2 as the replacement. Technically after the next throttle pull,
+they can delete the foo_message - the deprecation and the replacement will be
+already in the corresponding branch.
+
+Rapid Experimentation For A New Feature
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Add a message that is in-progress, and keep iterating with this message. This
+message is not subject to the change control process.
+
+An In-progress API Accidentally Marked As "production"
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This is expected to mainly apply during the initial period of 20.05->20.09, the
+proposal is to have it active for 4 weeks from Jun 17 till July 15th, with the
+following process.
+
+If a developer finds that a given API or a set of APIs is not ready for
+production due to lack of tests and/or the general API stability, then they:
+
+- Create a new gerrit change with *just* the marking of the API as
+ in_progress, subject being: "api: <feature> api message downgrade" and
+ a comment identifying which APIs are being downgraded and why.
+
+- Add ayourtch@gmail.com or the current Release Manager as a reviewer --
+ for help in guiding the process and to ensure that the gerrit change is not
+ forgotten.
+
+- Send an email to vpp-dev mailing list with the subject being the same as the
+ one-liner commit message, reference to the gerrit change, and the reasoning.
+
+- Wait for the timeout period of two weeks for the feedback.
+
+- If no feedback received, assume the community agreement and commit the
+ change to master branch.
+
+This needs to be highlighted that this process is an *exception* - normally the
+transition is always in_progress => production => deprecated.
+
+API Change Examples
+~~~~~~~~~~~~~~~~~~~
+
+https://gerrit.fd.io/r/q/+is:merged+message:%2522%255Eapi:.*%2524%2522
diff --git a/src/tools/vppapigen/generate_json.py b/src/tools/vppapigen/generate_json.py
index 610f84f5533..dc5cf9c1bbf 100755
--- a/src/tools/vppapigen/generate_json.py
+++ b/src/tools/vppapigen/generate_json.py
@@ -110,6 +110,15 @@ def main():
],
f.name,
),
+ "outputdir": "%s/%s/"
+ % (
+ output_path,
+ output_dir_map[
+ f.as_posix().split("/")[
+ src_dir_depth + BASE_DIR.count("/") - 1
+ ]
+ ],
+ ),
"input_file": f.as_posix(),
"includedir": [src_dir.as_posix()],
"output_module": "JSON",
diff --git a/src/tools/vppapigen/vppapigen.py b/src/tools/vppapigen/vppapigen.py
index 9abc5362d80..2b0ce9999d7 100755
--- a/src/tools/vppapigen/vppapigen.py
+++ b/src/tools/vppapigen/vppapigen.py
@@ -89,7 +89,7 @@ class VPPAPILexer:
"description": "DESCRIPTION",
}
- tokens = ["STRING_LITERAL", "ID", "NUM"] + list(reserved.values())
+ tokens = ["STRING_LITERAL", "COMMENT", "ID", "NUM"] + list(reserved.values())
t_ignore_LINE_COMMENT = "//.*"
@@ -125,9 +125,10 @@ class VPPAPILexer:
return t
# C or C++ comment (ignore)
- def t_comment(self, t):
+ def t_COMMENT(self, t):
r"(/\*(.|\n)*?\*/)|(//.*)"
t.lexer.lineno += t.value.count("\n")
+ return t
# Error handling rule
def t_error(self, t):
@@ -301,7 +302,7 @@ class Union(Processable):
class Define(Processable):
type = "Define"
- def __init__(self, name, flags, block):
+ def __init__(self, name, flags, block, comment=None):
self.name = name
self.flags = flags
self.block = block
@@ -311,6 +312,7 @@ class Define(Processable):
self.autoreply = False
self.autoendian = 0
self.options = {}
+ self.comment = comment
for f in flags:
if f == "dont_trace":
self.dont_trace = True
@@ -560,6 +562,7 @@ class VPPAPIParser:
self.logger = logger
self.fields = []
self.revision = revision
+ self.last_comment = None
def _parse_error(self, msg, coord):
raise ParseError("%s: %s" % (coord, msg))
@@ -600,6 +603,7 @@ class VPPAPIParser:
| union
| service
| paths
+ | comment
| counters"""
p[0] = p[1]
@@ -747,7 +751,8 @@ class VPPAPIParser:
def p_define(self, p):
"""define : DEFINE ID '{' block_statements_opt '}' ';'"""
self.fields = []
- p[0] = Define(p[2], [], p[4])
+ p[0] = Define(p[2], [], p[4], self.last_comment)
+ self.last_comment = None
def p_define_flist(self, p):
"""define : flist DEFINE ID '{' block_statements_opt '}' ';'"""
@@ -758,7 +763,8 @@ class VPPAPIParser:
self._token_coord(p, 1),
)
else:
- p[0] = Define(p[3], p[1], p[5])
+ p[0] = Define(p[3], p[1], p[5], self.last_comment)
+ self.last_comment = None
def p_flist(self, p):
"""flist : flag
@@ -865,6 +871,11 @@ class VPPAPIParser:
"""
p[0] = p[1]
+ def p_comment(self, p):
+ """comment : COMMENT"""
+ self.last_comment = p[1]
+ p[0] = []
+
def p_declaration(self, p):
"""declaration : type_specifier variable_name ';'
| type_specifier variable_name '[' field_options ']' ';'
@@ -955,26 +966,29 @@ class VPPAPIParser:
# Error rule for syntax errors
def p_error(self, p):
if p:
+ if p.type == "COMMENT":
+ self.parser.errok()
+ return
self._parse_error("before: %s" % p.value, self._coord(lineno=p.lineno))
else:
self._parse_error("At end of input", self.filename)
+ def build(self, **kwargs):
+ self.parser = yacc.yacc(module=self, **kwargs)
+
class VPPAPI:
def __init__(self, debug=False, filename="", logger=None, revision=None):
self.lexer = lex.lex(module=VPPAPILexer(filename), debug=debug)
- self.parser = yacc.yacc(
- module=VPPAPIParser(filename, logger, revision=revision),
- write_tables=False,
- debug=debug,
- )
+ self.parser = VPPAPIParser(filename, logger, revision=revision)
+ self.parser.build(write_tables=False, debug=debug)
self.logger = logger
self.revision = revision
self.filename = filename
def parse_string(self, code, debug=0, lineno=1):
self.lexer.lineno = lineno
- return self.parser.parse(code, lexer=self.lexer, debug=debug)
+ return self.parser.parser.parse(code, lexer=self.lexer, debug=debug)
def parse_fd(self, fd, debug=0):
data = fd.read()
diff --git a/src/tools/vppapigen/vppapigen_c.py b/src/tools/vppapigen/vppapigen_c.py
index a065653e391..c2e1e7da7b7 100644..100755
--- a/src/tools/vppapigen/vppapigen_c.py
+++ b/src/tools/vppapigen/vppapigen_c.py
@@ -123,7 +123,6 @@ class ToJSON:
)
)
else:
-
write(
' cJSON_AddStringToObject(o, "{n}", (char *)a->{n});\n'.format(
n=o.fieldname
@@ -366,7 +365,7 @@ class FromJSON:
write(" char *p = cJSON_GetStringValue(item);\n")
write(" size_t plen = strlen(p);\n")
write(
- " {msgvar} = cJSON_realloc({msgvar}, {msgsize} + plen, {msgsize});\n".format(
+ " {msgvar} = cJSON_realloc({msgvar}, {msgsize} + plen);\n".format(
msgvar=msgvar, msgsize=msgsize
)
)
@@ -435,7 +434,7 @@ class FromJSON:
cJSON *array = cJSON_GetObjectItem(o, "{n}");
int size = cJSON_GetArraySize(array);
{lfield} = size;
- {realloc} = cJSON_realloc({realloc}, {msgsize} + sizeof({t}) * size, {msgsize});
+ {realloc} = cJSON_realloc({realloc}, {msgsize} + sizeof({t}) * size);
{t} *d = (void *){realloc} + {msgsize};
{msgsize} += sizeof({t}) * size;
for (i = 0; i < size; i++) {{
@@ -462,12 +461,12 @@ class FromJSON:
write(
" {realloc} = cJSON_realloc({realloc}, {msgsize} + "
- "vec_len(s), {msgsize});\n".format(
+ "vec_len(s));\n".format(
msgvar=msgvar, msgsize=msgsize, realloc=realloc
)
)
write(
- " memcpy((void *){realloc} + {msgsize}, s, "
+ " clib_memcpy((void *){realloc} + {msgsize}, s, "
"vec_len(s));\n".format(realloc=realloc, msgsize=msgsize)
)
write(" {msgsize} += vec_len(s);\n".format(msgsize=msgsize))
@@ -751,6 +750,17 @@ TOP_BOILERPLATE = """\
#endif
#define VL_API_PACKED(x) x __attribute__ ((packed))
+
+/*
+ * Note: VL_API_MAX_ARRAY_SIZE is set to an arbitrarily large limit.
+ *
+ * However, any message with a ~2 billion element array is likely to break the
+ * api handling long before this limit causes array element endian issues.
+ *
+ * Applications should be written to create reasonable api messages.
+ */
+#define VL_API_MAX_ARRAY_SIZE 0x7fffffff
+
"""
BOTTOM_BOILERPLATE = """\
@@ -1028,9 +1038,9 @@ def printfun(objs, stream, modulename):
"""
signature = """\
-static inline void *vl_api_{name}_t_print{suffix} (vl_api_{name}_t *a, void *handle)
+static inline u8 *vl_api_{name}_t_format (u8 *s, va_list *args)
{{
- u8 *s = 0;
+ __attribute__((unused)) vl_api_{name}_t *a = va_arg (*args, vl_api_{name}_t *);
u32 indent __attribute__((unused)) = 2;
int i __attribute__((unused));
"""
@@ -1041,27 +1051,14 @@ static inline void *vl_api_{name}_t_print{suffix} (vl_api_{name}_t *a, void *han
pp = Printfun(stream)
for t in objs:
if t.manual_print:
- write("/***** manual: vl_api_%s_t_print *****/\n\n" % t.name)
+ write("/***** manual: vl_api_%s_t_format *****/\n\n" % t.name)
continue
write(signature.format(name=t.name, suffix=""))
write(" /* Message definition: vl_api_{}_t: */\n".format(t.name))
write(' s = format(s, "vl_api_%s_t:");\n' % t.name)
for o in t.block:
pp.print_obj(o, stream)
- write(" vec_add1(s, 0);\n")
- write(" vl_print (handle, (char *)s);\n")
- write(" vec_free (s);\n")
- write(" return handle;\n")
- write("}\n\n")
-
- write(signature.format(name=t.name, suffix="_json"))
- write(" cJSON * o = vl_api_{}_t_tojson(a);\n".format(t.name))
- write(" (void)s;\n")
- write(" char *out = cJSON_Print(o);\n")
- write(" vl_print(handle, out);\n")
- write(" cJSON_Delete(o);\n")
- write(" cJSON_free(out);\n")
- write(" return handle;\n")
+ write(" return s;\n")
write("}\n\n")
write("\n#endif")
@@ -1103,7 +1100,7 @@ static inline u8 *format_vl_api_{name}_t (u8 *s, va_list * args)
continue
if t.manual_print:
- write("/***** manual: vl_api_%s_t_print *****/\n\n" % t.name)
+ write("/***** manual: vl_api_%s_t_format *****/\n\n" % t.name)
continue
if t.__class__.__name__ == "Using":
@@ -1146,9 +1143,15 @@ ENDIAN_STRINGS = {
}
+def get_endian_string(o, fieldtype):
+ """Return proper endian string conversion function"""
+ return ENDIAN_STRINGS[fieldtype]
+
+
def endianfun_array(o):
"""Generate endian functions for arrays"""
forloop = """\
+ ASSERT((u32){length} <= (u32)VL_API_MAX_ARRAY_SIZE);
for (i = 0; i < {length}; i++) {{
a->{name}[i] = {format}(a->{name}[i]);
}}
@@ -1156,7 +1159,7 @@ def endianfun_array(o):
forloop_format = """\
for (i = 0; i < {length}; i++) {{
- {type}_endian(&a->{name}[i]);
+ {type}_endian(&a->{name}[i], to_net);
}}
"""
@@ -1165,9 +1168,20 @@ def endianfun_array(o):
output += " /* a->{n} = a->{n} (no-op) */\n".format(n=o.fieldname)
else:
lfield = "a->" + o.lengthfield if o.lengthfield else o.length
+ if o.lengthfield:
+ output += (
+ f" u32 count = to_net ? clib_host_to_net_u32(a->{o.lengthfield}) : "
+ f"a->{o.lengthfield};\n"
+ )
+ lfield = "count"
+ else:
+ lfield = o.length
+
if o.fieldtype in ENDIAN_STRINGS:
output += forloop.format(
- length=lfield, format=ENDIAN_STRINGS[o.fieldtype], name=o.fieldname
+ length=lfield,
+ format=get_endian_string(o, o.fieldtype),
+ name=o.fieldname,
)
else:
output += forloop_format.format(
@@ -1194,10 +1208,10 @@ def endianfun_obj(o):
return output
if o.fieldtype in ENDIAN_STRINGS:
output += " a->{name} = {format}(a->{name});\n".format(
- name=o.fieldname, format=ENDIAN_STRINGS[o.fieldtype]
+ name=o.fieldname, format=get_endian_string(o, o.fieldtype)
)
elif o.fieldtype.startswith("vl_api_"):
- output += " {type}_endian(&a->{name});\n".format(
+ output += " {type}_endian(&a->{name}, to_net);\n".format(
type=o.fieldtype, name=o.fieldname
)
else:
@@ -1216,17 +1230,20 @@ def endianfun(objs, modulename):
#define included_{module}_endianfun
#undef clib_net_to_host_uword
+#undef clib_host_to_net_uword
#ifdef LP64
#define clib_net_to_host_uword clib_net_to_host_u64
+#define clib_host_to_net_uword clib_host_to_net_u64
#else
#define clib_net_to_host_uword clib_net_to_host_u32
+#define clib_host_to_net_uword clib_host_to_net_u32
#endif
"""
output = output.format(module=modulename)
signature = """\
-static inline void vl_api_{name}_t_endian (vl_api_{name}_t *a)
+static inline void vl_api_{name}_t_endian (vl_api_{name}_t *a, bool to_net)
{{
int i __attribute__((unused));
"""
@@ -1235,7 +1252,7 @@ static inline void vl_api_{name}_t_endian (vl_api_{name}_t *a)
if t.__class__.__name__ == "Enum" or t.__class__.__name__ == "EnumFlag":
output += signature.format(name=t.name)
if t.enumtype in ENDIAN_STRINGS:
- output += " *a = {}(*a);\n".format(ENDIAN_STRINGS[t.enumtype])
+ output += " *a = {}(*a);\n".format(get_endian_string(t, t.enumtype))
else:
output += " /* a->{name} = a->{name} (no-op) */\n".format(
name=t.name
@@ -1255,7 +1272,9 @@ static inline void vl_api_{name}_t_endian (vl_api_{name}_t *a)
name=t.name
)
elif t.alias["type"] in FORMAT_STRINGS:
- output += " *a = {}(*a);\n".format(ENDIAN_STRINGS[t.alias["type"]])
+ output += " *a = {}(*a);\n".format(
+ get_endian_string(t, t.alias["type"])
+ )
else:
output += " /* Not Implemented yet {} */".format(t.name)
output += "}\n\n"
@@ -1330,7 +1349,7 @@ static inline uword vl_api_{name}_t_calc_size (vl_api_{name}_t *a)
)
lf = m[0]
if lf.fieldtype in ENDIAN_STRINGS:
- output += f" + {ENDIAN_STRINGS[lf.fieldtype]}(a->{b.lengthfield}) * sizeof(a->{b.fieldname}[0])"
+ output += f" + {get_endian_string(b, lf.fieldtype)}(a->{b.lengthfield}) * sizeof(a->{b.fieldname}[0])"
elif lf.fieldtype == "u8":
output += (
f" + a->{b.lengthfield} * sizeof(a->{b.fieldname}[0])"
@@ -1525,22 +1544,24 @@ def generate_c_boilerplate(services, defines, counters, file_crc, module, stream
#undef vl_calsizefun
/* instantiate all the print functions we know about */
-#define vl_print(handle, ...) vlib_cli_output (handle, __VA_ARGS__)
#define vl_printfun
#include "{module}.api.h"
#undef vl_printfun
+#include "{module}.api_json.h"
"""
write(hdr.format(module=module))
- write("static u16\n")
- write("setup_message_id_table (void) {\n")
- write(" api_main_t *am = my_api_main;\n")
- write(" vl_msg_api_msg_config_t c;\n")
- write(
- ' u16 msg_id_base = vl_msg_api_get_msg_ids ("{}_{crc:08x}", '
- "VL_MSG_{m}_LAST);\n".format(module, crc=file_crc, m=module.upper())
- )
+ if len(defines) > 0:
+ write("static u16\n")
+ write("setup_message_id_table (void) {\n")
+ write(" api_main_t *am = my_api_main;\n")
+ write(" vl_msg_api_msg_config_t c;\n")
+ write(
+ ' u16 msg_id_base = vl_msg_api_get_msg_ids ("{}_{crc:08x}", '
+ "VL_MSG_{m}_LAST);\n".format(module, crc=file_crc, m=module.upper())
+ )
+ write(f" vec_add1(am->json_api_repr, (u8 *)json_api_repr_{module});\n")
for d in defines:
write(
@@ -1556,12 +1577,10 @@ def generate_c_boilerplate(services, defines, counters, file_crc, module, stream
" {{.id = VL_API_{ID} + msg_id_base,\n"
' .name = "{n}",\n'
" .handler = vl_api_{n}_t_handler,\n"
- " .cleanup = vl_noop_handler,\n"
" .endian = vl_api_{n}_t_endian,\n"
- " .print = vl_api_{n}_t_print,\n"
+ " .format_fn = vl_api_{n}_t_format,\n"
" .traced = 1,\n"
" .replay = 1,\n"
- " .print_json = vl_api_{n}_t_print_json,\n"
" .tojson = vl_api_{n}_t_tojson,\n"
" .fromjson = vl_api_{n}_t_fromjson,\n"
" .calc_size = vl_api_{n}_t_calc_size,\n"
@@ -1577,12 +1596,10 @@ def generate_c_boilerplate(services, defines, counters, file_crc, module, stream
"{{.id = VL_API_{ID} + msg_id_base,\n"
' .name = "{n}",\n'
" .handler = 0,\n"
- " .cleanup = vl_noop_handler,\n"
" .endian = vl_api_{n}_t_endian,\n"
- " .print = vl_api_{n}_t_print,\n"
+ " .format_fn = vl_api_{n}_t_format,\n"
" .traced = 1,\n"
" .replay = 1,\n"
- " .print_json = vl_api_{n}_t_print_json,\n"
" .tojson = vl_api_{n}_t_tojson,\n"
" .fromjson = vl_api_{n}_t_fromjson,\n"
" .calc_size = vl_api_{n}_t_calc_size,\n"
@@ -1594,8 +1611,33 @@ def generate_c_boilerplate(services, defines, counters, file_crc, module, stream
except KeyError:
pass
- write(" return msg_id_base;\n")
- write("}\n")
+ try:
+ if s.stream:
+ d = define_hash[s.stream_message]
+ write(
+ " c = (vl_msg_api_msg_config_t) "
+ "{{.id = VL_API_{ID} + msg_id_base,\n"
+ ' .name = "{n}",\n'
+ " .handler = 0,\n"
+ " .endian = vl_api_{n}_t_endian,\n"
+ " .format_fn = vl_api_{n}_t_format,\n"
+ " .traced = 1,\n"
+ " .replay = 1,\n"
+ " .tojson = vl_api_{n}_t_tojson,\n"
+ " .fromjson = vl_api_{n}_t_fromjson,\n"
+ " .calc_size = vl_api_{n}_t_calc_size,\n"
+ " .is_autoendian = {auto}}};\n".format(
+ n=s.stream_message,
+ ID=s.stream_message.upper(),
+ auto=d.autoendian,
+ )
+ )
+ write(" vl_msg_api_config (&c);\n")
+ except KeyError:
+ pass
+ if len(defines) > 0:
+ write(" return msg_id_base;\n")
+ write("}\n")
severity = {
"error": "VL_COUNTER_SEVERITY_ERROR",
@@ -1631,7 +1673,6 @@ def generate_c_test_boilerplate(services, defines, file_crc, module, plugin, str
#undef vl_calsizefun
/* instantiate all the print functions we know about */
-#define vl_print(handle, ...) vlib_cli_output (handle, __VA_ARGS__)
#define vl_printfun
#include "{module}.api.h"
#undef vl_printfun
@@ -1678,27 +1719,28 @@ def generate_c_test_boilerplate(services, defines, file_crc, module, plugin, str
continue
write("static void\n")
write("vl_api_{n}_t_handler (vl_api_{n}_t * mp) {{\n".format(n=e))
- write(' vl_print(0, "{n} event called:");\n'.format(n=e))
- write(" vl_api_{n}_t_print(mp, 0);\n".format(n=e))
+ write(' vlib_cli_output(0, "{n} event called:");\n'.format(n=e))
+ write(
+ ' vlib_cli_output(0, "%U", vl_api_{n}_t_format, mp);\n'.format(n=e)
+ )
write("}\n")
write("static void\n")
write("setup_message_id_table (vat_main_t * vam, u16 msg_id_base) {\n")
for s in services:
write(
- " vl_msg_api_set_handlers(VL_API_{ID} + msg_id_base, "
- ' "{n}",\n'
- " vl_api_{n}_t_handler, "
- " vl_noop_handler,\n"
- " vl_api_{n}_t_endian, "
- " vl_api_{n}_t_print,\n"
- " sizeof(vl_api_{n}_t), 1,\n"
- " vl_api_{n}_t_print_json,\n"
- " vl_api_{n}_t_tojson,\n"
- " vl_api_{n}_t_fromjson,\n"
- " vl_api_{n}_t_calc_size);\n".format(
- n=s.reply, ID=s.reply.upper()
- )
+ " vl_msg_api_config (&(vl_msg_api_msg_config_t){{\n"
+ " .id = VL_API_{ID} + msg_id_base,\n"
+ ' .name = "{n}",\n'
+ " .handler = vl_api_{n}_t_handler,\n"
+ " .endian = vl_api_{n}_t_endian,\n"
+ " .format_fn = vl_api_{n}_t_format,\n"
+ " .size = sizeof(vl_api_{n}_t),\n"
+ " .traced = 1,\n"
+ " .tojson = vl_api_{n}_t_tojson,\n"
+ " .fromjson = vl_api_{n}_t_fromjson,\n"
+ " .calc_size = vl_api_{n}_t_calc_size,\n"
+ " }});".format(n=s.reply, ID=s.reply.upper())
)
write(
' hash_set_mem (vam->function_by_name, "{n}", api_{n});\n'.format(
@@ -1717,19 +1759,18 @@ def generate_c_test_boilerplate(services, defines, file_crc, module, plugin, str
# Events
for e in s.events:
write(
- " vl_msg_api_set_handlers(VL_API_{ID} + msg_id_base, "
- ' "{n}",\n'
- " vl_api_{n}_t_handler, "
- " vl_noop_handler,\n"
- " vl_api_{n}_t_endian, "
- " vl_api_{n}_t_print,\n"
- " sizeof(vl_api_{n}_t), 1,\n"
- " vl_api_{n}_t_print_json,\n"
- " vl_api_{n}_t_tojson,\n"
- " vl_api_{n}_t_fromjson,\n"
- " vl_api_{n}_t_calc_size);\n".format(
- n=e, ID=e.upper()
- )
+ " vl_msg_api_config (&(vl_msg_api_msg_config_t){{\n"
+ " .id = VL_API_{ID} + msg_id_base,\n"
+ ' .name = "{n}",\n'
+ " .handler = vl_api_{n}_t_handler,\n"
+ " .endian = vl_api_{n}_t_endian,\n"
+ " .format_fn = vl_api_{n}_t_format,\n"
+ " .size = sizeof(vl_api_{n}_t),\n"
+ " .traced = 1,\n"
+ " .tojson = vl_api_{n}_t_tojson,\n"
+ " .fromjson = vl_api_{n}_t_fromjson,\n"
+ " .calc_size = vl_api_{n}_t_calc_size,\n"
+ " }});".format(n=e, ID=e.upper())
)
write("}\n")
@@ -1785,7 +1826,7 @@ api_{n} (cJSON *o)
}}
mp->_vl_msg_id = vac_get_msg_index(VL_API_{N}_CRC);
- vl_api_{n}_t_endian(mp);
+ vl_api_{n}_t_endian(mp, 1);
vac_write((char *)mp, len);
cJSON_free(mp);
@@ -1800,7 +1841,7 @@ api_{n} (cJSON *o)
return 0;
}}
vl_api_{r}_t *rmp = (vl_api_{r}_t *)p;
- vl_api_{r}_t_endian(rmp);
+ vl_api_{r}_t_endian(rmp, 0);
return vl_api_{r}_t_tojson(rmp);
}}
@@ -1818,7 +1859,7 @@ api_{n} (cJSON *o)
return 0;
}}
mp->_vl_msg_id = msg_id;
- vl_api_{n}_t_endian(mp);
+ vl_api_{n}_t_endian(mp, 1);
vac_write((char *)mp, len);
cJSON_free(mp);
@@ -1852,7 +1893,7 @@ api_{n} (cJSON *o)
return 0;
}}
vl_api_{r}_t *rmp = (vl_api_{r}_t *)p;
- vl_api_{r}_t_endian(rmp);
+ vl_api_{r}_t_endian(rmp, 0);
cJSON_AddItemToArray(reply, vl_api_{r}_t_tojson(rmp));
}}
}}
@@ -1874,7 +1915,7 @@ api_{n} (cJSON *o)
}}
mp->_vl_msg_id = msg_id;
- vl_api_{n}_t_endian(mp);
+ vl_api_{n}_t_endian(mp, 1);
vac_write((char *)mp, len);
cJSON_free(mp);
@@ -1895,14 +1936,14 @@ api_{n} (cJSON *o)
u16 msg_id = ntohs(*((u16 *)p));
if (msg_id == reply_msg_id) {{
vl_api_{r}_t *rmp = (vl_api_{r}_t *)p;
- vl_api_{r}_t_endian(rmp);
+ vl_api_{r}_t_endian(rmp, 0);
cJSON_AddItemToArray(reply, vl_api_{r}_t_tojson(rmp));
break;
}}
if (msg_id == details_msg_id) {{
vl_api_{d}_t *rmp = (vl_api_{d}_t *)p;
- vl_api_{d}_t_endian(rmp);
+ vl_api_{d}_t_endian(rmp, 0);
cJSON_AddItemToArray(reply, vl_api_{d}_t_tojson(rmp));
}}
}}
@@ -1968,7 +2009,6 @@ def generate_c_test2_boilerplate(services, defines, module, stream):
#include "{module}.api.h"
#undef vl_calsizefun
-#define vl_print(handle, ...) vlib_cli_output (handle, __VA_ARGS__)
#define vl_printfun
#include "{module}.api.h"
#undef vl_printfun
diff --git a/src/tools/vppapigen/vppapigen_json.py b/src/tools/vppapigen/vppapigen_json.py
index 91334468503..7239d1ea732 100644
--- a/src/tools/vppapigen/vppapigen_json.py
+++ b/src/tools/vppapigen/vppapigen_json.py
@@ -1,5 +1,7 @@
# JSON generation
import json
+import sys
+import os
process_imports = True
@@ -77,6 +79,8 @@ def walk_defs(s, is_message=False):
c = {}
c["crc"] = "{0:#0{1}x}".format(t.crc, 10)
c["options"] = t.options
+ if t.comment:
+ c["comment"] = t.comment
d.append(c)
r.append(d)
@@ -86,7 +90,26 @@ def walk_defs(s, is_message=False):
#
# Plugin entry point
#
-def run(output_dir, filename, s):
+
+
+def contents_to_c_string(contents):
+ # Escape backslashes and double quotes
+ contents = contents.replace("\\", "\\\\").replace('"', '\\"')
+ # Replace newlines with \n
+ contents = contents.replace("\n", "\\n")
+ return '"' + contents + '"'
+
+
+def run(output_dir, apifilename, s):
+ if not output_dir:
+ sys.stderr.write("Missing --outputdir argument")
+ return None
+
+ basename = os.path.basename(apifilename)
+ filename_json_repr = os.path.join(output_dir + "/" + basename + "_json.h")
+ filename, _ = os.path.splitext(basename)
+ modulename = filename.replace(".", "_")
+
j = {}
j["types"] = walk_defs([o for o in s["types"] if o.__class__.__name__ == "Typedef"])
@@ -104,4 +127,9 @@ def run(output_dir, filename, s):
j["vl_api_version"] = hex(s["file_crc"])
j["imports"] = walk_imports(i for i in s["Import"])
j["counters"], j["paths"] = walk_counters(s["Counters"], s["Paths"])
- return json.dumps(j, indent=4, separators=(",", ": "))
+ r = json.dumps(j, indent=4, separators=(",", ": "))
+ c_string = contents_to_c_string(r)
+ with open(filename_json_repr, "w", encoding="UTF-8") as f:
+ print(f"const char *json_api_repr_{modulename} = {c_string};", file=f)
+ # return json.dumps(j, indent=4, separators=(",", ": "))
+ return r