diff options
author | pmikus <pmikus@cisco.com> | 2021-09-07 07:01:36 +0000 |
---|---|---|
committer | pmikus <pmikus@cisco.com> | 2021-09-07 07:01:36 +0000 |
commit | 80a4b408b1bec92f51813c5b758d7b2739f665ed (patch) | |
tree | 6f8b989ee1e2e6489bc2bde3a7e52787b252193d /fdio.infra.terraform/1n_nmd/main.tf | |
parent | 16770f5d50d0e1f9f82901e19b106fc1b88c41b8 (diff) |
Terraform: Cleanup
+ Use terraform fmt to format
Signed-off-by: pmikus <pmikus@cisco.com>
Change-Id: I32bd2f81c5838722506d44a8ff8ab48da204643a
Diffstat (limited to 'fdio.infra.terraform/1n_nmd/main.tf')
-rw-r--r-- | fdio.infra.terraform/1n_nmd/main.tf | 172 |
1 files changed, 86 insertions, 86 deletions
diff --git a/fdio.infra.terraform/1n_nmd/main.tf b/fdio.infra.terraform/1n_nmd/main.tf index d48f12a046..a8a1bb9315 100644 --- a/fdio.infra.terraform/1n_nmd/main.tf +++ b/fdio.infra.terraform/1n_nmd/main.tf @@ -5,24 +5,24 @@ # and have them automatically associated with the root provider # configurations. module "alertmanager" { - source = "./alertmanager" - providers = { + source = "./alertmanager" + providers = { nomad = nomad.yul1 } # nomad - nomad_datacenters = [ "yul1" ] + nomad_datacenters = ["yul1"] # alertmanager - alertmanager_job_name = "prod-alertmanager" - alertmanager_use_canary = true - alertmanager_group_count = 1 - alertmanager_vault_secret = { - use_vault_provider = false, - vault_kv_policy_name = "kv-secret", - vault_kv_path = "secret/data/prometheus", - vault_kv_field_access_key = "access_key", - vault_kv_field_secret_key = "secret_key" + alertmanager_job_name = "prod-alertmanager" + alertmanager_use_canary = true + alertmanager_group_count = 1 + alertmanager_vault_secret = { + use_vault_provider = false, + vault_kv_policy_name = "kv-secret", + vault_kv_path = "secret/data/prometheus", + vault_kv_field_access_key = "access_key", + vault_kv_field_secret_key = "secret_key" } alertmanager_version = "0.21.0" alertmanager_cpu = 1000 @@ -35,131 +35,131 @@ module "alertmanager" { } module "grafana" { - source = "./grafana" - providers = { + source = "./grafana" + providers = { nomad = nomad.yul1 } # nomad - nomad_datacenters = [ "yul1" ] + nomad_datacenters = ["yul1"] # grafana - grafana_job_name = "prod-grafana" - grafana_use_canary = true - grafana_group_count = 1 - grafana_vault_secret = { - use_vault_provider = false, - vault_kv_policy_name = "kv-secret", - vault_kv_path = "secret/data/grafana", - vault_kv_field_access_key = "access_key", - vault_kv_field_secret_key = "secret_key" + grafana_job_name = "prod-grafana" + grafana_use_canary = true + grafana_group_count = 1 + grafana_vault_secret = { + use_vault_provider = false, + vault_kv_policy_name = "kv-secret", + vault_kv_path = "secret/data/grafana", + vault_kv_field_access_key = "access_key", + vault_kv_field_secret_key = "secret_key" } - grafana_container_image = "grafana/grafana:7.3.7" - grafana_cpu = 1000 - grafana_mem = 2048 - grafana_port = 3000 + grafana_container_image = "grafana/grafana:7.3.7" + grafana_cpu = 1000 + grafana_mem = 2048 + grafana_port = 3000 } module "minio" { - source = "./minio" - providers = { + source = "./minio" + providers = { nomad = nomad.yul1 } # nomad - nomad_datacenters = [ "yul1" ] - nomad_host_volume = "prod-volume-data1-1" + nomad_datacenters = ["yul1"] + nomad_host_volume = "prod-volume-data1-1" # minio - minio_job_name = "prod-minio" - minio_group_count = 4 - minio_service_name = "storage" - minio_host = "http://10.32.8.1{4...7}" - minio_port = 9000 - minio_container_image = "minio/minio:RELEASE.2021-07-27T02-40-15Z" - minio_vault_secret = { - use_vault_provider = false, - vault_kv_policy_name = "kv-secret", - vault_kv_path = "secret/data/minio", - vault_kv_field_access_key = "access_key", - vault_kv_field_secret_key = "secret_key" + minio_job_name = "prod-minio" + minio_group_count = 4 + minio_service_name = "storage" + minio_host = "http://10.32.8.1{4...7}" + minio_port = 9000 + minio_container_image = "minio/minio:RELEASE.2021-07-27T02-40-15Z" + minio_vault_secret = { + use_vault_provider = false, + vault_kv_policy_name = "kv-secret", + vault_kv_path = "secret/data/minio", + vault_kv_field_access_key = "access_key", + vault_kv_field_secret_key = "secret_key" } - minio_data_dir = "/data/" - minio_use_host_volume = true - minio_use_canary = true - minio_envs = [ "MINIO_BROWSER=\"off\"" ] + minio_data_dir = "/data/" + minio_use_host_volume = true + minio_use_canary = true + minio_envs = ["MINIO_BROWSER=\"off\""] # minio client - mc_job_name = "prod-mc" - mc_container_image = "minio/mc:RELEASE.2021-07-27T02-40-15Z" - mc_extra_commands = [ + mc_job_name = "prod-mc" + mc_container_image = "minio/mc:RELEASE.2021-07-27T02-40-15Z" + mc_extra_commands = [ "mc policy set public LOCALMINIO/logs.fd.io", "mc policy set public LOCALMINIO/docs.fd.io", "mc ilm add --expiry-days '180' LOCALMINIO/logs.fd.io", "mc admin user add LOCALMINIO storage Storage1234", "mc admin policy set LOCALMINIO writeonly user=storage" ] - minio_buckets = [ "logs.fd.io", "docs.fd.io" ] + minio_buckets = ["logs.fd.io", "docs.fd.io"] } module "nginx" { - source = "./nginx" - providers = { + source = "./nginx" + providers = { nomad = nomad.yul1 } # nomad - nomad_datacenters = [ "yul1" ] - nomad_host_volume = "prod-volume-data1-1" + nomad_datacenters = ["yul1"] + nomad_host_volume = "prod-volume-data1-1" # nginx - nginx_job_name = "prod-nginx" - nginx_use_host_volume = true + nginx_job_name = "prod-nginx" + nginx_use_host_volume = true } module "prometheus" { - source = "./prometheus" - providers = { + source = "./prometheus" + providers = { nomad = nomad.yul1 } # nomad - nomad_datacenters = [ "yul1" ] - nomad_host_volume = "prod-volume-data1-1" + nomad_datacenters = ["yul1"] + nomad_host_volume = "prod-volume-data1-1" # prometheus - prometheus_job_name = "prod-prometheus" - prometheus_use_canary = true - prometheus_group_count = 4 - prometheus_vault_secret = { - use_vault_provider = false, - vault_kv_policy_name = "kv-secret", - vault_kv_path = "secret/data/prometheus", - vault_kv_field_access_key = "access_key", - vault_kv_field_secret_key = "secret_key" + prometheus_job_name = "prod-prometheus" + prometheus_use_canary = true + prometheus_group_count = 4 + prometheus_vault_secret = { + use_vault_provider = false, + vault_kv_policy_name = "kv-secret", + vault_kv_path = "secret/data/prometheus", + vault_kv_field_access_key = "access_key", + vault_kv_field_secret_key = "secret_key" } - prometheus_data_dir = "/data/" - prometheus_use_host_volume = true - prometheus_version = "2.28.1" - prometheus_cpu = 2000 - prometheus_mem = 8192 - prometheus_port = 9090 + prometheus_data_dir = "/data/" + prometheus_use_host_volume = true + prometheus_version = "2.28.1" + prometheus_cpu = 2000 + prometheus_mem = 8192 + prometheus_port = 9090 } module "vpp_device" { - source = "./vpp_device" - providers = { + source = "./vpp_device" + providers = { nomad = nomad.yul1 } # nomad - nomad_datacenters = [ "yul1" ] + nomad_datacenters = ["yul1"] # csit_shim - csit_shim_job_name = "prod-device-csit-shim" - csit_shim_group_count = "1" - csit_shim_cpu = "1500" - csit_shim_mem = "4096" - csit_shim_image_aarch64 = "fdiotools/csit_shim-ubuntu2004:2021_03_02_143938_UTC-aarch64" - csit_shim_image_x86_64 = "fdiotools/csit_shim-ubuntu2004:2021_03_04_142103_UTC-x86_64" + csit_shim_job_name = "prod-device-csit-shim" + csit_shim_group_count = "1" + csit_shim_cpu = "1500" + csit_shim_mem = "4096" + csit_shim_image_aarch64 = "fdiotools/csit_shim-ubuntu2004:2021_03_02_143938_UTC-aarch64" + csit_shim_image_x86_64 = "fdiotools/csit_shim-ubuntu2004:2021_03_04_142103_UTC-x86_64" } |