From 402a7e1bcdf4724b23b02070edb1140008b2a489 Mon Sep 17 00:00:00 2001 From: Juraj Linkeš Date: Wed, 30 Sep 2020 13:50:53 +0200 Subject: vpp_device: updates for 1n-tx2 testbeds MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit One ThunderX2 9975 server (.69) was replaced with two ThunderX2 9980 (.70, .71) servers. Move the .69 server under ansible perf section in anticipation of repurposing it for that purpose. Update the ansible scripts with .70 and .71 config and rename port names in device.sh lib to reflect the NIC differences between .69 and .70 (and .71). Change-Id: I88b75648735243e5559175d3192ffcc8fc70071c Signed-off-by: Juraj Linkeš --- .../lf_inventory/host_vars/10.30.51.69.yaml | 33 +++++----------- .../lf_inventory/host_vars/10.30.51.70.yaml | 45 ++++++++++++++++++++++ .../lf_inventory/host_vars/10.30.51.71.yaml | 45 ++++++++++++++++++++++ 3 files changed, 100 insertions(+), 23 deletions(-) create mode 100644 resources/tools/testbed-setup/ansible/inventories/lf_inventory/host_vars/10.30.51.70.yaml create mode 100644 resources/tools/testbed-setup/ansible/inventories/lf_inventory/host_vars/10.30.51.71.yaml (limited to 'resources/tools/testbed-setup/ansible/inventories/lf_inventory/host_vars') diff --git a/resources/tools/testbed-setup/ansible/inventories/lf_inventory/host_vars/10.30.51.69.yaml b/resources/tools/testbed-setup/ansible/inventories/lf_inventory/host_vars/10.30.51.69.yaml index f330a19640..dfbe5a876b 100644 --- a/resources/tools/testbed-setup/ansible/inventories/lf_inventory/host_vars/10.30.51.69.yaml +++ b/resources/tools/testbed-setup/ansible/inventories/lf_inventory/host_vars/10.30.51.69.yaml @@ -2,10 +2,18 @@ # file: host_vars/10.30.51.69.yaml hostname: "s27-t13-sut1" -inventory_ipmi_hostname: "10.30.50.69" -vfs_data_file: "csit-initialize-vfs-tx2.sh" grub: + isolcpus: "1-111,113-223" + nohz_full: "1-111,113-223" + rcu_nocbs: "1-111,113-223" +sysctl: + kernel: + watchdog_cpumask: "0,112" + vm: nr_hugepages: 57344 + max_map_count: 20000 + +inventory_ipmi_hostname: "10.30.50.69" cpu_microarchitecture: "thunderx2" # User management. @@ -22,24 +30,3 @@ users: ssh_key: - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com" - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes" - -# Nomad settings. -nomad_certificates: - - src: "{{ vault_nomad_v1_ca_file }}" - dest: "{{ nomad_ca_file }}" - - src: "{{ vault_nomad_v1_cert_file }}" - dest: "{{ nomad_cert_file }}" - - src: "{{ vault_nomad_v1_key_file }}" - dest: "{{ nomad_key_file }}" -nomad_datacenter: "yul1" -nomad_name: "{{ hostname }}-{{ ansible_architecture }}" -nomad_node_role: "client" -nomad_node_class: "csitarm" -nomad_options: - driver.raw_exec.enable: 1 - docker.cleanup.image: false - docker.privileged.enabled: true - docker.volumes.enabled: true - driver.whitelist: "docker,raw_exec,exec" -nomad_servers: [ "10.30.51.32:4647", "10.30.51.33:4647" ] -nomad_cpu_total_compute: "40000" diff --git a/resources/tools/testbed-setup/ansible/inventories/lf_inventory/host_vars/10.30.51.70.yaml b/resources/tools/testbed-setup/ansible/inventories/lf_inventory/host_vars/10.30.51.70.yaml new file mode 100644 index 0000000000..da603c28ce --- /dev/null +++ b/resources/tools/testbed-setup/ansible/inventories/lf_inventory/host_vars/10.30.51.70.yaml @@ -0,0 +1,45 @@ +--- +# file: host_vars/10.30.51.70.yaml + +hostname: "s55-t36-sut1" +inventory_ipmi_hostname: "10.30.50.70" +vfs_data_file: "csit-initialize-vfs-tx2.sh" +grub: + nr_hugepages: 65536 +cpu_microarchitecture: "thunderx2" + +# User management. +users: + - username: localadmin + groups: [adm, sudo] + password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1" + ssh_key: + - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com" + - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes" + - username: testuser + groups: [adm, sudo] + password: "$6$zpBUdQ4q$P2zKclumvCndWujgP/qQ8eMk3YZk7ESAom04Fqp26hJH2jWkMXEX..jqxzMdDLJKiDaDHIaSkQMVjHzd3cRLs1" + ssh_key: + - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com" + - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes" + +# Nomad settings. +nomad_certificates: + - src: "{{ vault_nomad_v1_ca_file }}" + dest: "{{ nomad_ca_file }}" + - src: "{{ vault_nomad_v1_cert_file }}" + dest: "{{ nomad_cert_file }}" + - src: "{{ vault_nomad_v1_key_file }}" + dest: "{{ nomad_key_file }}" +nomad_datacenter: "yul1" +nomad_name: "{{ hostname }}-{{ ansible_architecture }}" +nomad_node_role: "client" +nomad_node_class: "csitarm" +nomad_options: + driver.raw_exec.enable: 1 + docker.cleanup.image: false + docker.privileged.enabled: true + docker.volumes.enabled: true + driver.whitelist: "docker,raw_exec,exec" +nomad_servers: [ "10.30.51.32:4647", "10.30.51.33:4647" ] +nomad_cpu_total_compute: "40000" diff --git a/resources/tools/testbed-setup/ansible/inventories/lf_inventory/host_vars/10.30.51.71.yaml b/resources/tools/testbed-setup/ansible/inventories/lf_inventory/host_vars/10.30.51.71.yaml new file mode 100644 index 0000000000..b5fb9cc4ec --- /dev/null +++ b/resources/tools/testbed-setup/ansible/inventories/lf_inventory/host_vars/10.30.51.71.yaml @@ -0,0 +1,45 @@ +--- +# file: host_vars/10.30.51.71.yaml + +hostname: "s56-t37-sut1" +inventory_ipmi_hostname: "10.30.50.71" +vfs_data_file: "csit-initialize-vfs-tx2.sh" +grub: + nr_hugepages: 65536 +cpu_microarchitecture: "thunderx2" + +# User management. +users: + - username: localadmin + groups: [adm, sudo] + password: "$6$FIsbVDQR$5D0wgufOd2FtnmOiRNsGlgg6Loh.0x3dWSj72DSQnqisSyE9DROfgSgA6s0yxDwz4Jd5SRTXiTKuRYuSQ5POI1" + ssh_key: + - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com" + - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes" + - username: testuser + groups: [adm, sudo] + password: "$6$zpBUdQ4q$P2zKclumvCndWujgP/qQ8eMk3YZk7ESAom04Fqp26hJH2jWkMXEX..jqxzMdDLJKiDaDHIaSkQMVjHzd3cRLs1" + ssh_key: + - "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAQEAgObJFDIMmPwQhhkjAynvlbwpM5yeSewyaE7vTLaFf4uFz4vmsE2hFf6B2xXHUGLVwoVfk91UeK7LOGrdDpoDDHzvPZXj5NmZI+WiWax5y2pQZNkcSZws0ENCeEc4hPwc4veJ1JmhokF4Bsmu14HyFMaFUhM8897jtJwsh+9fLA/no0iPGaQqEtRUQhkV+P4jCEPoY0qdRZAzVw/rY4EGAMhsJe3EJmyj63OfrrkG3+hvSLFo5pDxHQr3pZd/c6ukI7xMef48PosAvGCm3oxzb/Gu9PZIGuHLczY+tCnzCkY7MO7E+IWgjXrUAfYwSWz8XmFmA9LLe26DT5jkcK8hGQ== pmikus@cisco.com" + - "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXUbbhesOpvPF+KI8nq4TXvEO/Un1aU/ehZ9clCyw9C40xjDkX2BlcX8WeHxFHe7fjFaCd07Vg73rn/3M9uNDnDxvjH1GQ0twvi3iBTO4PkHBBfGF9qnE8MYzno6FvlsVKLuUuPbfm8kbOQ+ZDfdXq6gdtXh0hSYYkqC1heNPCNsqaakkB99Edyle+Ot0V7cpW+Yo2wo98KuX/cgUEhVoA8QnNVE7zaWcjSXBZEteoA4gLpAbV6p67/d6H/2ykHTidBViYTEsHco56tJoA4nTPuAupDOLBcWXgF5TAN6z1aCn2JA1DDfniLakgrZ5oVj2qHhUmbxQAtnKQfHADjqzV jlinkes@jlinkes" + +# Nomad settings. +nomad_certificates: + - src: "{{ vault_nomad_v1_ca_file }}" + dest: "{{ nomad_ca_file }}" + - src: "{{ vault_nomad_v1_cert_file }}" + dest: "{{ nomad_cert_file }}" + - src: "{{ vault_nomad_v1_key_file }}" + dest: "{{ nomad_key_file }}" +nomad_datacenter: "yul1" +nomad_name: "{{ hostname }}-{{ ansible_architecture }}" +nomad_node_role: "client" +nomad_node_class: "csitarm" +nomad_options: + driver.raw_exec.enable: 1 + docker.cleanup.image: false + docker.privileged.enabled: true + docker.volumes.enabled: true + driver.whitelist: "docker,raw_exec,exec" +nomad_servers: [ "10.30.51.32:4647", "10.30.51.33:4647" ] +nomad_cpu_total_compute: "40000" -- cgit 1.2.3-korg