summaryrefslogtreecommitdiffstats
path: root/tests/resources/libraries/robot/runtest.robot
blob: 9a3da86477d73a4099e0c8790b0c62817fc82e04 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
*** Settings ***
Library    OperatingSystem
Library    Process
Library    String

*** Variables ***

*** Keywords ***

Infra ${VALUE}
    Run Process    ${EXECDIR}/config.sh    ${VALUE}

Run Test
    [Arguments]         ${TEST_SETUP}=${NONE}                    ${TESTID}=${NONE}                                                  ${EXPECTED_MIN}=${NONE}    ${EXPECTED_MAX}=${NONE}    ${EXPECTED_AVG}=${NONE}
    ${result_test} =    Run Process                              ${EXECDIR}/config.sh                                               start                      ${TEST_SETUP}              ${TESTID}                  stdout=${TEMPDIR}/stdout.txt    stderr=${TEMPDIR}/stderr.txt
    Log Many            stdout: ${result_test.stdout}            stderr: ${result_test.stderr}
    @{min_max_avg} =    Split String                             ${result_test.stdout.strip()}
    Log To Console      Min Max Average Array: @{min_max_avg}
    IF                  '${TESTID}' == 'rtc'
    Should Be True      ${min_max_avg}[0] == ${EXPECTED_MIN}     msg="Min does not match (${min_max_avg}[0] != ${EXPECTED_MIN})"
    Should Be True      ${min_max_avg}[1] == ${EXPECTED_MAX}     msg="Max does not match (${min_max_avg}[1] != ${EXPECTED_MAX})"
    Should Be True      ${min_max_avg}[2] == ${EXPECTED_AVG}     msg="Avg does not match (${min_max_avg}[2] != ${EXPECTED_AVG})"
    ELSE IF             '${TESTID}' == 'requin'
    Should Be True      ${min_max_avg}[0] >= ${EXPECTED_MIN}     msg="Min does not match (${min_max_avg}[0] < ${EXPECTED_MIN})"
    Should Be True      ${min_max_avg}[1] >= ${EXPECTED_MAX}     msg="Max does not match (${min_max_avg}[1] < ${EXPECTED_MAX})"
    Should Be True      ${min_max_avg}[2] >= ${EXPECTED_AVG}     msg="Avg does not match (${min_max_avg}[2] < ${EXPECTED_AVG})"
    ELSE IF             '${TESTID}' == 'requin-new'
    Should Be True      ${min_max_avg}[0] >= ${EXPECTED_MIN}     msg="Min does not match (${min_max_avg}[0] < ${EXPECTED_MIN})"
    Should Be True      ${min_max_avg}[1] >= ${EXPECTED_MAX}     msg="Max does not match (${min_max_avg}[1] < ${EXPECTED_MAX})"
    Should Be True      ${min_max_avg}[2] >= ${EXPECTED_AVG}     msg="Avg does not match (${min_max_avg}[2] < ${EXPECTED_AVG})"
    ELSE IF             '${TESTID}' == 'latency'
    Should Be True      ${min_max_avg}[0] <= ${EXPECTED_MIN}     msg="Min does not match (${min_max_avg}[0] > ${EXPECTED_MIN})"
    Should Be True      ${min_max_avg}[1] <= ${EXPECTED_MAX}     msg="Max does not match (${min_max_avg}[1] > ${EXPECTED_MAX})"
    Should Be True      ${min_max_avg}[2] <= ${EXPECTED_AVG}     msg="Avg does not match (${min_max_avg}[2] > ${EXPECTED_AVG})"
    ELSE IF             '${TESTID}' == 'latency-new'
    Should Be True      ${min_max_avg}[0] <= ${EXPECTED_MIN}     msg="Min does not match (${min_max_avg}[0] > ${EXPECTED_MIN})"
    Should Be True      ${min_max_avg}[1] <= ${EXPECTED_MAX}     msg="Max does not match (${min_max_avg}[1] > ${EXPECTED_MAX})"
    Should Be True      ${min_max_avg}[2] <= ${EXPECTED_AVG}     msg="Avg does not match (${min_max_avg}[2] > ${EXPECTED_AVG})"
    ELSE IF             '${TESTID}' == 'cbr'
    Should Be True      ${min_max_avg}[0] >= ${EXPECTED_MIN}     msg="Min does not match (${min_max_avg}[0] < ${EXPECTED_MIN})"
    Should Be True      ${min_max_avg}[1] >= ${EXPECTED_MAX}     msg="Max does not match (${min_max_avg}[1] < ${EXPECTED_MAX})"
    Should Be True      ${min_max_avg}[2] >= ${EXPECTED_AVG}     msg="Avg does not match (${min_max_avg}[2] < ${EXPECTED_AVG})"
    ELSE IF             '${TESTID}' == 'cbr-new'
    Should Be True      ${min_max_avg}[0] >= ${EXPECTED_MIN}     msg="Min does not match (${min_max_avg}[0] < ${EXPECTED_MIN})"
    Should Be True      ${min_max_avg}[1] >= ${EXPECTED_MAX}     msg="Max does not match (${min_max_avg}[1] < ${EXPECTED_MAX})"
    Should Be True      ${min_max_avg}[2] >= ${EXPECTED_AVG}     msg="Avg does not match (${min_max_avg}[2] < ${EXPECTED_AVG})"
    ELSE
    Fail                "Provided Test ID does not exist"
    END

Set Link
    [Documentation]     Configure link rate/delay/jitter/loss
    ...                 Arguments:
    ...                 ${RATE} Rate of the link
    ...                 ${DELAY} Delay of the link
    ...                 ${JITTER} Jitter of the link
    ...                 ${LOSS} Loss of the link
    [Arguments]         ${TEST_SETUP}=${NONE}
    ...                 ${RATE}=${NONE}
    ...                 ${DELAY}=${NONE}
    ...                 ${JITTER}=${NONE}
    ...                 ${LOSS}=${NONE}
    ${result_link} =    Run Process                              ${EXECDIR}/config.sh             setchannel    ${TEST_SETUP}    server    ${RATE}-${DELAY}-${JITTER}-${LOSS}
    Log Many            stdout: ${result_link.stdout}            stderr: ${result_link.stderr}

Run Latency Test
    [Documentation]    Run hicn-ping on the ${TEST_SETUP} topology and measure latency.
    ...                Arguments:
    ...                ${TEST_SETUP} The setup of the test.
    ...                ${EXPECTED_MIN} The expected min latency
    ...                ${EXPECTED_MAX} The expected max latency
    ...                ${EXPECTED_AVG} The expected avg latency
    [Arguments]        ${TEST_SETUP}=${NONE}                                               ${EXPECTED_MIN}=${NONE}    ${EXPECTED_MAX}=${NONE}    ${EXPECTED_AVG}=${NONE}
    Run Test           ${TEST_SETUP}                                                       latency                    ${EXPECTED_MIN}            ${EXPECTED_MAX}            ${EXPECTED_AVG}

Run Throughput Test Raaqm
    [Documentation]    Run hiperf on the ${TEST_SETUP} topology and measure throughput.
    ...                Arguments:
    ...                ${TEST_SETUP} The setup of the test.
    ...                ${EXPECTED_MIN} The expected min throughput
    ...                ${EXPECTED_MAX} The expected max throughput
    ...                ${EXPECTED_AVG} The expected avg throughput
    [Arguments]        ${TEST_SETUP}=${NONE}                                               ${EXPECTED_MIN}=${NONE}    ${EXPECTED_MAX}=${NONE}    ${EXPECTED_AVG}=${NONE}
    Run Test           ${TEST_SETUP}                                                       requin                     ${EXPECTED_MIN}            ${EXPECTED_MAX}            ${EXPECTED_AVG}

Run Throughput Test Raaqm New
    [Documentation]    Run hiperf on the ${TEST_SETUP} topology and measure throughput.
    ...                Arguments:
    ...                ${TEST_SETUP} The setup of the test.
    ...                ${EXPECTED_MIN} The expected min throughput
    ...                ${EXPECTED_MAX} The expected max throughput
    ...                ${EXPECTED_AVG} The expected avg throughput
    [Arguments]        ${TEST_SETUP}=${NONE}                                               ${EXPECTED_MIN}=${NONE}    ${EXPECTED_MAX}=${NONE}    ${EXPECTED_AVG}=${NONE}
    Run Test           ${TEST_SETUP}                                                       requin                     ${EXPECTED_MIN}            ${EXPECTED_MAX}            ${EXPECTED_AVG}

Run Throughput Test CBR
    [Documentation]    Run hiperf on the ${TEST_SETUP} topology and measure throughput.
    ...                Arguments:
    ...                ${TEST_SETUP} The setup of the test.
    ...                ${EXPECTED_MIN} The expected min throughput
    ...                ${EXPECTED_MAX} The expected max throughput
    ...                ${EXPECTED_AVG} The expected avg throughput
    [Arguments]        ${TEST_SETUP}=${NONE}                                               ${EXPECTED_MIN}=${NONE}    ${EXPECTED_MAX}=${NONE}    ${EXPECTED_AVG}=${NONE}
    Run Test           ${TEST_SETUP}                                                       cbr                        ${EXPECTED_MIN}            ${EXPECTED_MAX}            ${EXPECTED_AVG}

Run Throughput Test CBR New
    [Documentation]    Run hiperf on the ${TEST_SETUP} topology and measure throughput.
    ...                Arguments:
    ...                ${TEST_SETUP} The setup of the test.
    ...                ${EXPECTED_MIN} The expected min throughput
    ...                ${EXPECTED_MAX} The expected max throughput
    ...                ${EXPECTED_AVG} The expected avg throughput
    [Arguments]        ${TEST_SETUP}=${NONE}                                               ${EXPECTED_MIN}=${NONE}    ${EXPECTED_MAX}=${NONE}    ${EXPECTED_AVG}=${NONE}
    Run Test           ${TEST_SETUP}                                                       cbr                        ${EXPECTED_MIN}            ${EXPECTED_MAX}            ${EXPECTED_AVG}

Run RTC Test
    [Documentation]    Run hiperf RTC on the ${TEST_SETUP} topology and check consumer syncs to producer bitrate.
    ...                Arguments:
    ...                ${TEST_SETUP} The setup of the test.
    ...                ${EXPECTED_MIN} The expected min bitrate
    ...                ${EXPECTED_MAX} The expected max bitrate
    ...                ${EXPECTED_AVG} The expected avg bitrate
    [Arguments]        ${TEST_SETUP}=${NONE}                                                                         ${EXPECTED_MIN}=${NONE}    ${EXPECTED_MAX}=${NONE}    ${EXPECTED_AVG}=${NONE}
    Run Test           ${TEST_SETUP}                                                                                 rtc                        ${EXPECTED_MIN}            ${EXPECTED_MAX}            ${EXPECTED_AVG}

Run Latency Test New
    [Documentation]    Run hicn-ping on the ${TEST_SETUP} topology with the new packet format and measure latency.
    ...                Arguments:
    ...                ${TEST_SETUP} The setup of the test.
    ...                ${EXPECTED_MIN} The expected min latency
    ...                ${EXPECTED_MAX} The expected max latency
    ...                ${EXPECTED_AVG} The expected avg latency
    [Arguments]        ${TEST_SETUP}=${NONE}                                               ${EXPECTED_MIN}=${NONE}    ${EXPECTED_MAX}=${NONE}    ${EXPECTED_AVG}=${NONE}
    Run Test           ${TEST_SETUP}                                                       latency-new                ${EXPECTED_MIN}            ${EXPECTED_MAX}            ${EXPECTED_AVG}