aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml1
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml1
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.23.yaml1
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.24.yaml1
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.25.yaml1
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.26.yaml1
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.27.yaml1
-rw-r--r--fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.28.yaml1
-rw-r--r--fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-stats.hcl.tftpl12
-rw-r--r--fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-hoststack.hcl.tftpl14
-rw-r--r--fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-mrr.hcl.tftpl14
-rw-r--r--fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-ndrpdr.hcl.tftpl14
-rw-r--r--fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-soak.hcl.tftpl12
-rw-r--r--fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/main.tf451
-rw-r--r--fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/variables.tf4
-rw-r--r--fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/versions.tf4
-rw-r--r--fdio.infra.terraform/terraform-nomad-pyspark-etl/variables.tf4
17 files changed, 287 insertions, 250 deletions
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml
index 581dc3c9a1..a86631695e 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.21.yaml
@@ -39,6 +39,7 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
fingerprint.network.disallow_link_local: true
+ artifact.disable_filesystem_isolation: true
nomad_service_mgr: "systemd"
nomad_consul_use_ssl: false
nomad_use_tls: false
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml
index 5de1de40a2..196f8b6745 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.22.yaml
@@ -39,6 +39,7 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
fingerprint.network.disallow_link_local: true
+ artifact.disable_filesystem_isolation: true
nomad_service_mgr: "systemd"
nomad_consul_use_ssl: false
nomad_use_tls: false
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.23.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.23.yaml
index dd55ffa32f..70b75f7e20 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.23.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.23.yaml
@@ -39,6 +39,7 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
fingerprint.network.disallow_link_local: true
+ artifact.disable_filesystem_isolation: true
nomad_service_mgr: "systemd"
nomad_consul_use_ssl: false
nomad_use_tls: false
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.24.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.24.yaml
index 579f6d6f07..6bb2c1fd57 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.24.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.24.yaml
@@ -39,6 +39,7 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
fingerprint.network.disallow_link_local: true
+ artifact.disable_filesystem_isolation: true
nomad_service_mgr: "systemd"
nomad_consul_use_ssl: false
nomad_use_tls: false
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.25.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.25.yaml
index 528383bcdb..c016c56961 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.25.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.25.yaml
@@ -39,6 +39,7 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
fingerprint.network.disallow_link_local: true
+ artifact.disable_filesystem_isolation: true
nomad_service_mgr: "systemd"
nomad_consul_use_ssl: false
nomad_use_tls: false
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.26.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.26.yaml
index 018174ceea..99333dd2e6 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.26.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.26.yaml
@@ -39,6 +39,7 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
fingerprint.network.disallow_link_local: true
+ artifact.disable_filesystem_isolation: true
nomad_service_mgr: "systemd"
nomad_consul_use_ssl: false
nomad_use_tls: false
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.27.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.27.yaml
index a73d79fc4a..4383dd2499 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.27.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.27.yaml
@@ -38,6 +38,7 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
fingerprint.network.disallow_link_local: true
+ artifact.disable_filesystem_isolation: true
nomad_service_mgr: "systemd"
nomad_consul_use_ssl: false
nomad_use_tls: false
diff --git a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.28.yaml b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.28.yaml
index 5880d076d1..329b0dc9af 100644
--- a/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.28.yaml
+++ b/fdio.infra.ansible/inventories/lf_inventory/host_vars/10.30.51.28.yaml
@@ -38,6 +38,7 @@ nomad_options:
docker.volumes.enabled: true
driver.whitelist: "docker,raw_exec,exec"
fingerprint.network.disallow_link_local: true
+ artifact.disable_filesystem_isolation: true
nomad_service_mgr: "systemd"
nomad_consul_use_ssl: false
nomad_use_tls: false
diff --git a/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-stats.hcl.tftpl b/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-stats.hcl.tftpl
index 86ca584de7..6634018988 100644
--- a/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-stats.hcl.tftpl
+++ b/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-stats.hcl.tftpl
@@ -1,5 +1,5 @@
job "${job_name}" {
- datacenters = "${datacenters}"
+ datacenters = ["${datacenters}"]
type = "${type}"
periodic {
cron = "${cron}"
@@ -21,8 +21,12 @@ job "${job_name}" {
}
task "${job_name}" {
artifact {
- source = "git::https://github.com/FDio/csit"
- destination = "local/csit"
+ source = "https://raw.githubusercontent.com/FDio/csit/master/csit.infra.etl/stats.py"
+ destination = "local/"
+ }
+ artifact {
+ source = "https://raw.githubusercontent.com/FDio/csit/master/csit.infra.etl/stats_sra.json"
+ destination = "local/"
}
driver = "docker"
config {
@@ -33,7 +37,7 @@ job "${job_name}" {
"--executor-memory", "10g",
"stats.py"
]
- work_dir = "/local/csit/csit.infra.etl"
+ work_dir = "/local"
}
env {
AWS_ACCESS_KEY_ID = "${aws_access_key_id}"
diff --git a/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-hoststack.hcl.tftpl b/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-hoststack.hcl.tftpl
index 24aa4095d2..b8f7cc27c4 100644
--- a/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-hoststack.hcl.tftpl
+++ b/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-hoststack.hcl.tftpl
@@ -1,5 +1,5 @@
job "${job_name}" {
- datacenters = "${datacenters}"
+ datacenters = ["${datacenters}"]
type = "${type}"
periodic {
cron = "${cron}"
@@ -21,8 +21,12 @@ job "${job_name}" {
}
task "${job_name}" {
artifact {
- source = "git::https://github.com/FDio/csit"
- destination = "local/csit"
+ source = "https://raw.githubusercontent.com/FDio/csit/master/csit.infra.etl/trending_hoststack.py"
+ destination = "local/"
+ }
+ artifact {
+ source = "https://raw.githubusercontent.com/FDio/csit/master/csit.infra.etl/trending_hoststack.json"
+ destination = "local/"
}
driver = "docker"
config {
@@ -33,7 +37,7 @@ job "${job_name}" {
"--executor-memory", "30g",
"trending_hoststack.py"
]
- work_dir = "/local/csit/csit.infra.etl"
+ work_dir = "/local"
}
env {
AWS_ACCESS_KEY_ID = "${aws_access_key_id}"
@@ -50,4 +54,4 @@ job "${job_name}" {
}
}
}
-} \ No newline at end of file
+}
diff --git a/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-mrr.hcl.tftpl b/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-mrr.hcl.tftpl
index 47d6149eed..d3b301d5b3 100644
--- a/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-mrr.hcl.tftpl
+++ b/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-mrr.hcl.tftpl
@@ -1,5 +1,5 @@
job "${job_name}" {
- datacenters = "${datacenters}"
+ datacenters = ["${datacenters}"]
type = "${type}"
periodic {
cron = "${cron}"
@@ -21,8 +21,12 @@ job "${job_name}" {
}
task "${job_name}" {
artifact {
- source = "git::https://github.com/FDio/csit"
- destination = "local/csit"
+ source = "https://raw.githubusercontent.com/FDio/csit/master/csit.infra.etl/trending_mrr.py"
+ destination = "local/"
+ }
+ artifact {
+ source = "https://raw.githubusercontent.com/FDio/csit/master/csit.infra.etl/trending_mrr.json"
+ destination = "local/"
}
driver = "docker"
config {
@@ -33,7 +37,7 @@ job "${job_name}" {
"--executor-memory", "30g",
"trending_mrr.py"
]
- work_dir = "/local/csit/csit.infra.etl"
+ work_dir = "/local"
}
env {
AWS_ACCESS_KEY_ID = "${aws_access_key_id}"
@@ -50,4 +54,4 @@ job "${job_name}" {
}
}
}
-} \ No newline at end of file
+}
diff --git a/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-ndrpdr.hcl.tftpl b/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-ndrpdr.hcl.tftpl
index 8cd40f537e..53a0aa2393 100644
--- a/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-ndrpdr.hcl.tftpl
+++ b/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-ndrpdr.hcl.tftpl
@@ -1,5 +1,5 @@
job "${job_name}" {
- datacenters = "${datacenters}"
+ datacenters = ["${datacenters}"]
type = "${type}"
periodic {
cron = "${cron}"
@@ -21,8 +21,12 @@ job "${job_name}" {
}
task "${job_name}" {
artifact {
- source = "git::https://github.com/FDio/csit"
- destination = "local/csit"
+ source = "https://raw.githubusercontent.com/FDio/csit/master/csit.infra.etl/trending_ndrpdr.py"
+ destination = "local/"
+ }
+ artifact {
+ source = "https://raw.githubusercontent.com/FDio/csit/master/csit.infra.etl/trending_ndrpdr.json"
+ destination = "local/"
}
driver = "docker"
config {
@@ -35,7 +39,7 @@ job "${job_name}" {
"--master", "local[2]",
"trending_ndrpdr.py"
]
- work_dir = "/local/csit/csit.infra.etl"
+ work_dir = "/local"
}
env {
AWS_ACCESS_KEY_ID = "${aws_access_key_id}"
@@ -52,4 +56,4 @@ job "${job_name}" {
}
}
}
-} \ No newline at end of file
+}
diff --git a/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-soak.hcl.tftpl b/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-soak.hcl.tftpl
index 6d77a898df..b18ce527bc 100644
--- a/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-soak.hcl.tftpl
+++ b/fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl-trending-soak.hcl.tftpl
@@ -1,5 +1,5 @@
job "${job_name}" {
- datacenters = "${datacenters}"
+ datacenters = ["${datacenters}"]
type = "${type}"
periodic {
cron = "${cron}"
@@ -21,8 +21,12 @@ job "${job_name}" {
}
task "${job_name}" {
artifact {
- source = "git::https://github.com/FDio/csit"
- destination = "local/csit"
+ source = "https://raw.githubusercontent.com/FDio/csit/master/csit.infra.etl/trending_soak.py"
+ destination = "local/"
+ }
+ artifact {
+ source = "https://raw.githubusercontent.com/FDio/csit/master/csit.infra.etl/trending_soak.json"
+ destination = "local/"
}
driver = "docker"
config {
@@ -35,7 +39,7 @@ job "${job_name}" {
"--master", "local[2]",
"trending_soak.py"
]
- work_dir = "/local/csit/csit.infra.etl"
+ work_dir = "/local"
}
env {
AWS_ACCESS_KEY_ID = "${aws_access_key_id}"
diff --git a/fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/main.tf b/fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/main.tf
index 28234bf6c7..026ab168d9 100644
--- a/fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/main.tf
+++ b/fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/main.tf
@@ -1,23 +1,33 @@
-data "vault_generic_secret" "fdio_logs" {
- path = "kv/secret/data/etl/fdio_logs"
+data "vault_kv_secret_v2" "fdio_logs" {
+ mount = "kv"
+ name = "etl/fdio_logs"
}
-data "vault_generic_secret" "fdio_docs" {
- path = "kv/secret/data/etl/fdio_docs"
+data "vault_kv_secret_v2" "fdio_docs" {
+ mount = "kv"
+ name = "etl/fdio_docs"
}
+#data "vault_kv_secret_v2" "fdio_logs" {
+# path = "kv/data/etl/fdio_logs"
+#}
+#
+#data "vault_kv_secret_v2" "fdio_docs" {
+# path = "kv/data/etl/fdio_docs"
+#}
+
module "etl-stats" {
providers = {
nomad = nomad.yul1
}
source = "../"
- aws_access_key_id = data.vault_generic_secret.fdio_logs.data["access_key"]
- aws_secret_access_key = data.vault_generic_secret.fdio_logs.data["secret_key"]
- aws_default_region = data.vault_generic_secret.fdio_logs.data["region"]
- out_aws_access_key_id = data.vault_generic_secret.fdio_docs.data["access_key"]
- out_aws_secret_access_key = data.vault_generic_secret.fdio_docs.data["secret_key"]
- out_aws_default_region = data.vault_generic_secret.fdio_docs.data["region"]
+ aws_access_key_id = data.vault_kv_secret_v2.fdio_logs.data.access_key
+ aws_secret_access_key = data.vault_kv_secret_v2.fdio_logs.data.secret_key
+ aws_default_region = data.vault_kv_secret_v2.fdio_logs.data.region
+ out_aws_access_key_id = data.vault_kv_secret_v2.fdio_docs.data.access_key
+ out_aws_secret_access_key = data.vault_kv_secret_v2.fdio_docs.data.secret_key
+ out_aws_default_region = data.vault_kv_secret_v2.fdio_docs.data.region
cron = "0 30 0 * * * *"
datacenters = ["yul1"]
job_name = "etl-stats"
@@ -29,12 +39,12 @@ module "etl-trending-hoststack" {
}
source = "../"
- aws_access_key_id = data.vault_generic_secret.fdio_logs.data["access_key"]
- aws_secret_access_key = data.vault_generic_secret.fdio_logs.data["secret_key"]
- aws_default_region = data.vault_generic_secret.fdio_logs.data["region"]
- out_aws_access_key_id = data.vault_generic_secret.fdio_docs.data["access_key"]
- out_aws_secret_access_key = data.vault_generic_secret.fdio_docs.data["secret_key"]
- out_aws_default_region = data.vault_generic_secret.fdio_docs.data["region"]
+ aws_access_key_id = data.vault_kv_secret_v2.fdio_logs.data.access_key
+ aws_secret_access_key = data.vault_kv_secret_v2.fdio_logs.data.secret_key
+ aws_default_region = data.vault_kv_secret_v2.fdio_logs.data.region
+ out_aws_access_key_id = data.vault_kv_secret_v2.fdio_docs.data.access_key
+ out_aws_secret_access_key = data.vault_kv_secret_v2.fdio_docs.data.secret_key
+ out_aws_default_region = data.vault_kv_secret_v2.fdio_docs.data.region
cron = "0 30 0 * * * *"
datacenters = ["yul1"]
job_name = "etl-trending-hoststack"
@@ -46,12 +56,12 @@ module "etl-trending-mrr" {
}
source = "../"
- aws_access_key_id = data.vault_generic_secret.fdio_logs.data["access_key"]
- aws_secret_access_key = data.vault_generic_secret.fdio_logs.data["secret_key"]
- aws_default_region = data.vault_generic_secret.fdio_logs.data["region"]
- out_aws_access_key_id = data.vault_generic_secret.fdio_docs.data["access_key"]
- out_aws_secret_access_key = data.vault_generic_secret.fdio_docs.data["secret_key"]
- out_aws_default_region = data.vault_generic_secret.fdio_docs.data["region"]
+ aws_access_key_id = data.vault_kv_secret_v2.fdio_logs.data.access_key
+ aws_secret_access_key = data.vault_kv_secret_v2.fdio_logs.data.secret_key
+ aws_default_region = data.vault_kv_secret_v2.fdio_logs.data.region
+ out_aws_access_key_id = data.vault_kv_secret_v2.fdio_docs.data.access_key
+ out_aws_secret_access_key = data.vault_kv_secret_v2.fdio_docs.data.secret_key
+ out_aws_default_region = data.vault_kv_secret_v2.fdio_docs.data.region
cron = "0 30 0 * * * *"
datacenters = ["yul1"]
job_name = "etl-trending-mrr"
@@ -64,12 +74,12 @@ module "etl-trending-ndrpdr" {
}
source = "../"
- aws_access_key_id = data.vault_generic_secret.fdio_logs.data["access_key"]
- aws_secret_access_key = data.vault_generic_secret.fdio_logs.data["secret_key"]
- aws_default_region = data.vault_generic_secret.fdio_logs.data["region"]
- out_aws_access_key_id = data.vault_generic_secret.fdio_docs.data["access_key"]
- out_aws_secret_access_key = data.vault_generic_secret.fdio_docs.data["secret_key"]
- out_aws_default_region = data.vault_generic_secret.fdio_docs.data["region"]
+ aws_access_key_id = data.vault_kv_secret_v2.fdio_logs.data.access_key
+ aws_secret_access_key = data.vault_kv_secret_v2.fdio_logs.data.secret_key
+ aws_default_region = data.vault_kv_secret_v2.fdio_logs.data.region
+ out_aws_access_key_id = data.vault_kv_secret_v2.fdio_docs.data.access_key
+ out_aws_secret_access_key = data.vault_kv_secret_v2.fdio_docs.data.secret_key
+ out_aws_default_region = data.vault_kv_secret_v2.fdio_docs.data.region
cron = "0 30 0 * * * *"
datacenters = ["yul1"]
job_name = "etl-trending-ndrpdr"
@@ -82,202 +92,201 @@ module "etl-trending-soak" {
}
source = "../"
- aws_access_key_id = data.vault_generic_secret.fdio_logs.data["access_key"]
- aws_secret_access_key = data.vault_generic_secret.fdio_logs.data["secret_key"]
- aws_default_region = data.vault_generic_secret.fdio_logs.data["region"]
- out_aws_access_key_id = data.vault_generic_secret.fdio_docs.data["access_key"]
- out_aws_secret_access_key = data.vault_generic_secret.fdio_docs.data["secret_key"]
- out_aws_default_region = data.vault_generic_secret.fdio_docs.data["region"]
+ aws_access_key_id = data.vault_kv_secret_v2.fdio_logs.data.access_key
+ aws_secret_access_key = data.vault_kv_secret_v2.fdio_logs.data.secret_key
+ aws_default_region = data.vault_kv_secret_v2.fdio_logs.data.region
+ out_aws_access_key_id = data.vault_kv_secret_v2.fdio_docs.data.access_key
+ out_aws_secret_access_key = data.vault_kv_secret_v2.fdio_docs.data.secret_key
+ out_aws_default_region = data.vault_kv_secret_v2.fdio_docs.data.region
cron = "0 30 0 * * * *"
datacenters = ["yul1"]
job_name = "etl-trending-soak"
memory = 60000
}
-module "etl-iterative-hoststack-rls2406" {
- providers = {
- nomad = nomad.yul1
- }
- source = "../"
-
- aws_access_key_id = data.vault_generic_secret.fdio_logs.data["access_key"]
- aws_secret_access_key = data.vault_generic_secret.fdio_logs.data["secret_key"]
- aws_default_region = data.vault_generic_secret.fdio_logs.data["region"]
- out_aws_access_key_id = data.vault_generic_secret.fdio_docs.data["access_key"]
- out_aws_secret_access_key = data.vault_generic_secret.fdio_docs.data["secret_key"]
- out_aws_default_region = data.vault_generic_secret.fdio_docs.data["region"]
- cron = "0 30 0 * * * *"
- datacenters = ["yul1"]
- job_name = "etl-iterative-hoststack-rls2406"
-}
-
-module "etl-iterative-mrr-rls2406" {
- providers = {
- nomad = nomad.yul1
- }
- source = "../"
-
- aws_access_key_id = data.vault_generic_secret.fdio_logs.data["access_key"]
- aws_secret_access_key = data.vault_generic_secret.fdio_logs.data["secret_key"]
- aws_default_region = data.vault_generic_secret.fdio_logs.data["region"]
- out_aws_access_key_id = data.vault_generic_secret.fdio_docs.data["access_key"]
- out_aws_secret_access_key = data.vault_generic_secret.fdio_docs.data["secret_key"]
- out_aws_default_region = data.vault_generic_secret.fdio_docs.data["region"]
- cron = "0 30 0 * * * *"
- datacenters = ["yul1"]
- job_name = "etl-iterative-mrr-rls2406"
-}
-
-module "etl-iterative-ndrpdr-rls2406" {
- providers = {
- nomad = nomad.yul1
- }
- source = "../"
-
- aws_access_key_id = data.vault_generic_secret.fdio_logs.data["access_key"]
- aws_secret_access_key = data.vault_generic_secret.fdio_logs.data["secret_key"]
- aws_default_region = data.vault_generic_secret.fdio_logs.data["region"]
- out_aws_access_key_id = data.vault_generic_secret.fdio_docs.data["access_key"]
- out_aws_secret_access_key = data.vault_generic_secret.fdio_docs.data["secret_key"]
- out_aws_default_region = data.vault_generic_secret.fdio_docs.data["region"]
- cron = "0 30 0 * * * *"
- datacenters = ["yul1"]
- job_name = "etl-iterative-ndrpdr-rls2406"
-}
-
-module "etl-iterative-reconf-rls2406" {
- providers = {
- nomad = nomad.yul1
- }
- source = "../"
-
- aws_access_key_id = data.vault_generic_secret.fdio_logs.data["access_key"]
- aws_secret_access_key = data.vault_generic_secret.fdio_logs.data["secret_key"]
- aws_default_region = data.vault_generic_secret.fdio_logs.data["region"]
- out_aws_access_key_id = data.vault_generic_secret.fdio_docs.data["access_key"]
- out_aws_secret_access_key = data.vault_generic_secret.fdio_docs.data["secret_key"]
- out_aws_default_region = data.vault_generic_secret.fdio_docs.data["region"]
- cron = "0 30 0 * * * *"
- datacenters = ["yul1"]
- job_name = "etl-iterative-reconf-rls2406"
-}
-
-module "etl-iterative-soak-rls2406" {
- providers = {
- nomad = nomad.yul1
- }
- source = "../"
-
- aws_access_key_id = data.vault_generic_secret.fdio_logs.data["access_key"]
- aws_secret_access_key = data.vault_generic_secret.fdio_logs.data["secret_key"]
- aws_default_region = data.vault_generic_secret.fdio_logs.data["region"]
- out_aws_access_key_id = data.vault_generic_secret.fdio_docs.data["access_key"]
- out_aws_secret_access_key = data.vault_generic_secret.fdio_docs.data["secret_key"]
- out_aws_default_region = data.vault_generic_secret.fdio_docs.data["region"]
- cron = "0 30 0 * * * *"
- datacenters = ["yul1"]
- job_name = "etl-iterative-soak-rls2406"
-}
-
-module "etl-coverage-device-rls2406" {
- providers = {
- nomad = nomad.yul1
- }
- source = "../"
-
- aws_access_key_id = data.vault_generic_secret.fdio_logs.data["access_key"]
- aws_secret_access_key = data.vault_generic_secret.fdio_logs.data["secret_key"]
- aws_default_region = data.vault_generic_secret.fdio_logs.data["region"]
- out_aws_access_key_id = data.vault_generic_secret.fdio_docs.data["access_key"]
- out_aws_secret_access_key = data.vault_generic_secret.fdio_docs.data["secret_key"]
- out_aws_default_region = data.vault_generic_secret.fdio_docs.data["region"]
- cron = "0 30 0 * * * *"
- datacenters = ["yul1"]
- job_name = "etl-coverage-device-rls2406"
-}
-
-module "etl-coverage-hoststack-rls2406" {
- providers = {
- nomad = nomad.yul1
- }
- source = "../"
-
- aws_access_key_id = data.vault_generic_secret.fdio_logs.data["access_key"]
- aws_secret_access_key = data.vault_generic_secret.fdio_logs.data["secret_key"]
- aws_default_region = data.vault_generic_secret.fdio_logs.data["region"]
- out_aws_access_key_id = data.vault_generic_secret.fdio_docs.data["access_key"]
- out_aws_secret_access_key = data.vault_generic_secret.fdio_docs.data["secret_key"]
- out_aws_default_region = data.vault_generic_secret.fdio_docs.data["region"]
- cron = "0 30 0 * * * *"
- datacenters = ["yul1"]
- job_name = "etl-coverage-hoststack-rls2406"
-}
-
-module "etl-coverage-mrr-rls2406" {
- providers = {
- nomad = nomad.yul1
- }
- source = "../"
-
- aws_access_key_id = data.vault_generic_secret.fdio_logs.data["access_key"]
- aws_secret_access_key = data.vault_generic_secret.fdio_logs.data["secret_key"]
- aws_default_region = data.vault_generic_secret.fdio_logs.data["region"]
- out_aws_access_key_id = data.vault_generic_secret.fdio_docs.data["access_key"]
- out_aws_secret_access_key = data.vault_generic_secret.fdio_docs.data["secret_key"]
- out_aws_default_region = data.vault_generic_secret.fdio_docs.data["region"]
- cron = "0 30 0 * * * *"
- datacenters = ["yul1"]
- job_name = "etl-coverage-mrr-rls2406"
-}
-
-module "etl-coverage-ndrpdr-rls2406" {
- providers = {
- nomad = nomad.yul1
- }
- source = "../"
-
- aws_access_key_id = data.vault_generic_secret.fdio_logs.data["access_key"]
- aws_secret_access_key = data.vault_generic_secret.fdio_logs.data["secret_key"]
- aws_default_region = data.vault_generic_secret.fdio_logs.data["region"]
- out_aws_access_key_id = data.vault_generic_secret.fdio_docs.data["access_key"]
- out_aws_secret_access_key = data.vault_generic_secret.fdio_docs.data["secret_key"]
- out_aws_default_region = data.vault_generic_secret.fdio_docs.data["region"]
- cron = "0 30 0 * * * *"
- datacenters = ["yul1"]
- job_name = "etl-coverage-ndrpdr-rls2406"
-}
-
-module "etl-coverage-reconf-rls2406" {
- providers = {
- nomad = nomad.yul1
- }
- source = "../"
-
- aws_access_key_id = data.vault_generic_secret.fdio_logs.data["access_key"]
- aws_secret_access_key = data.vault_generic_secret.fdio_logs.data["secret_key"]
- aws_default_region = data.vault_generic_secret.fdio_logs.data["region"]
- out_aws_access_key_id = data.vault_generic_secret.fdio_docs.data["access_key"]
- out_aws_secret_access_key = data.vault_generic_secret.fdio_docs.data["secret_key"]
- out_aws_default_region = data.vault_generic_secret.fdio_docs.data["region"]
- cron = "0 30 0 * * * *"
- datacenters = ["yul1"]
- job_name = "etl-coverage-reconf-rls2406"
-}
-
-module "etl-coverage-soak-rls2406" {
- providers = {
- nomad = nomad.yul1
- }
- source = "../"
-
- aws_access_key_id = data.vault_generic_secret.fdio_logs.data["access_key"]
- aws_secret_access_key = data.vault_generic_secret.fdio_logs.data["secret_key"]
- aws_default_region = data.vault_generic_secret.fdio_logs.data["region"]
- out_aws_access_key_id = data.vault_generic_secret.fdio_docs.data["access_key"]
- out_aws_secret_access_key = data.vault_generic_secret.fdio_docs.data["secret_key"]
- out_aws_default_region = data.vault_generic_secret.fdio_docs.data["region"]
- cron = "0 30 0 * * * *"
- datacenters = ["yul1"]
- job_name = "etl-coverage-soak-rls2406"
-}
-
+#module "etl-iterative-hoststack-rls2406" {
+# providers = {
+# nomad = nomad.yul1
+# }
+# source = "../"
+#
+# aws_access_key_id = data.vault_kv_secret_v2.fdio_logs.data["access_key"]
+# aws_secret_access_key = data.vault_kv_secret_v2.fdio_logs.data["secret_key"]
+# aws_default_region = data.vault_kv_secret_v2.fdio_logs.data["region"]
+# out_aws_access_key_id = data.vault_kv_secret_v2.fdio_docs.data["access_key"]
+# out_aws_secret_access_key = data.vault_kv_secret_v2.fdio_docs.data["secret_key"]
+# out_aws_default_region = data.vault_kv_secret_v2.fdio_docs.data["region"]
+# cron = "0 30 0 * * * *"
+# datacenters = ["yul1"]
+# job_name = "etl-iterative-hoststack-rls2406"
+#}
+#
+#module "etl-iterative-mrr-rls2406" {
+# providers = {
+# nomad = nomad.yul1
+# }
+# source = "../"
+#
+# aws_access_key_id = data.vault_kv_secret_v2.fdio_logs.data["access_key"]
+# aws_secret_access_key = data.vault_kv_secret_v2.fdio_logs.data["secret_key"]
+# aws_default_region = data.vault_kv_secret_v2.fdio_logs.data["region"]
+# out_aws_access_key_id = data.vault_kv_secret_v2.fdio_docs.data["access_key"]
+# out_aws_secret_access_key = data.vault_kv_secret_v2.fdio_docs.data["secret_key"]
+# out_aws_default_region = data.vault_kv_secret_v2.fdio_docs.data["region"]
+# cron = "0 30 0 * * * *"
+# datacenters = ["yul1"]
+# job_name = "etl-iterative-mrr-rls2406"
+#}
+#
+#module "etl-iterative-ndrpdr-rls2406" {
+# providers = {
+# nomad = nomad.yul1
+# }
+# source = "../"
+#
+# aws_access_key_id = data.vault_kv_secret_v2.fdio_logs.data["access_key"]
+# aws_secret_access_key = data.vault_kv_secret_v2.fdio_logs.data["secret_key"]
+# aws_default_region = data.vault_kv_secret_v2.fdio_logs.data["region"]
+# out_aws_access_key_id = data.vault_kv_secret_v2.fdio_docs.data["access_key"]
+# out_aws_secret_access_key = data.vault_kv_secret_v2.fdio_docs.data["secret_key"]
+# out_aws_default_region = data.vault_kv_secret_v2.fdio_docs.data["region"]
+# cron = "0 30 0 * * * *"
+# datacenters = ["yul1"]
+# job_name = "etl-iterative-ndrpdr-rls2406"
+#}
+#
+#module "etl-iterative-reconf-rls2406" {
+# providers = {
+# nomad = nomad.yul1
+# }
+# source = "../"
+#
+# aws_access_key_id = data.vault_kv_secret_v2.fdio_logs.data["access_key"]
+# aws_secret_access_key = data.vault_kv_secret_v2.fdio_logs.data["secret_key"]
+# aws_default_region = data.vault_kv_secret_v2.fdio_logs.data["region"]
+# out_aws_access_key_id = data.vault_kv_secret_v2.fdio_docs.data["access_key"]
+# out_aws_secret_access_key = data.vault_kv_secret_v2.fdio_docs.data["secret_key"]
+# out_aws_default_region = data.vault_kv_secret_v2.fdio_docs.data["region"]
+# cron = "0 30 0 * * * *"
+# datacenters = ["yul1"]
+# job_name = "etl-iterative-reconf-rls2406"
+#}
+#
+#module "etl-iterative-soak-rls2406" {
+# providers = {
+# nomad = nomad.yul1
+# }
+# source = "../"
+#
+# aws_access_key_id = data.vault_kv_secret_v2.fdio_logs.data["access_key"]
+# aws_secret_access_key = data.vault_kv_secret_v2.fdio_logs.data["secret_key"]
+# aws_default_region = data.vault_kv_secret_v2.fdio_logs.data["region"]
+# out_aws_access_key_id = data.vault_kv_secret_v2.fdio_docs.data["access_key"]
+# out_aws_secret_access_key = data.vault_kv_secret_v2.fdio_docs.data["secret_key"]
+# out_aws_default_region = data.vault_kv_secret_v2.fdio_docs.data["region"]
+# cron = "0 30 0 * * * *"
+# datacenters = ["yul1"]
+# job_name = "etl-iterative-soak-rls2406"
+#}
+#
+#module "etl-coverage-device-rls2406" {
+# providers = {
+# nomad = nomad.yul1
+# }
+# source = "../"
+#
+# aws_access_key_id = data.vault_kv_secret_v2.fdio_logs.data["access_key"]
+# aws_secret_access_key = data.vault_kv_secret_v2.fdio_logs.data["secret_key"]
+# aws_default_region = data.vault_kv_secret_v2.fdio_logs.data["region"]
+# out_aws_access_key_id = data.vault_kv_secret_v2.fdio_docs.data["access_key"]
+# out_aws_secret_access_key = data.vault_kv_secret_v2.fdio_docs.data["secret_key"]
+# out_aws_default_region = data.vault_kv_secret_v2.fdio_docs.data["region"]
+# cron = "0 30 0 * * * *"
+# datacenters = ["yul1"]
+# job_name = "etl-coverage-device-rls2406"
+#}
+#
+#module "etl-coverage-hoststack-rls2406" {
+# providers = {
+# nomad = nomad.yul1
+# }
+# source = "../"
+#
+# aws_access_key_id = data.vault_kv_secret_v2.fdio_logs.data["access_key"]
+# aws_secret_access_key = data.vault_kv_secret_v2.fdio_logs.data["secret_key"]
+# aws_default_region = data.vault_kv_secret_v2.fdio_logs.data["region"]
+# out_aws_access_key_id = data.vault_kv_secret_v2.fdio_docs.data["access_key"]
+# out_aws_secret_access_key = data.vault_kv_secret_v2.fdio_docs.data["secret_key"]
+# out_aws_default_region = data.vault_kv_secret_v2.fdio_docs.data["region"]
+# cron = "0 30 0 * * * *"
+# datacenters = ["yul1"]
+# job_name = "etl-coverage-hoststack-rls2406"
+#}
+#
+#module "etl-coverage-mrr-rls2406" {
+# providers = {
+# nomad = nomad.yul1
+# }
+# source = "../"
+#
+# aws_access_key_id = data.vault_kv_secret_v2.fdio_logs.data["access_key"]
+# aws_secret_access_key = data.vault_kv_secret_v2.fdio_logs.data["secret_key"]
+# aws_default_region = data.vault_kv_secret_v2.fdio_logs.data["region"]
+# out_aws_access_key_id = data.vault_kv_secret_v2.fdio_docs.data["access_key"]
+# out_aws_secret_access_key = data.vault_kv_secret_v2.fdio_docs.data["secret_key"]
+# out_aws_default_region = data.vault_kv_secret_v2.fdio_docs.data["region"]
+# cron = "0 30 0 * * * *"
+# datacenters = ["yul1"]
+# job_name = "etl-coverage-mrr-rls2406"
+#}
+#
+#module "etl-coverage-ndrpdr-rls2406" {
+# providers = {
+# nomad = nomad.yul1
+# }
+# source = "../"
+#
+# aws_access_key_id = data.vault_kv_secret_v2.fdio_logs.data["access_key"]
+# aws_secret_access_key = data.vault_kv_secret_v2.fdio_logs.data["secret_key"]
+# aws_default_region = data.vault_kv_secret_v2.fdio_logs.data["region"]
+# out_aws_access_key_id = data.vault_kv_secret_v2.fdio_docs.data["access_key"]
+# out_aws_secret_access_key = data.vault_kv_secret_v2.fdio_docs.data["secret_key"]
+# out_aws_default_region = data.vault_kv_secret_v2.fdio_docs.data["region"]
+# cron = "0 30 0 * * * *"
+# datacenters = ["yul1"]
+# job_name = "etl-coverage-ndrpdr-rls2406"
+#}
+#
+#module "etl-coverage-reconf-rls2406" {
+# providers = {
+# nomad = nomad.yul1
+# }
+# source = "../"
+#
+# aws_access_key_id = data.vault_kv_secret_v2.fdio_logs.data["access_key"]
+# aws_secret_access_key = data.vault_kv_secret_v2.fdio_logs.data["secret_key"]
+# aws_default_region = data.vault_kv_secret_v2.fdio_logs.data["region"]
+# out_aws_access_key_id = data.vault_kv_secret_v2.fdio_docs.data["access_key"]
+# out_aws_secret_access_key = data.vault_kv_secret_v2.fdio_docs.data["secret_key"]
+# out_aws_default_region = data.vault_kv_secret_v2.fdio_docs.data["region"]
+# cron = "0 30 0 * * * *"
+# datacenters = ["yul1"]
+# job_name = "etl-coverage-reconf-rls2406"
+#}
+#
+#module "etl-coverage-soak-rls2406" {
+# providers = {
+# nomad = nomad.yul1
+# }
+# source = "../"
+#
+# aws_access_key_id = data.vault_kv_secret_v2.fdio_logs.data["access_key"]
+# aws_secret_access_key = data.vault_kv_secret_v2.fdio_logs.data["secret_key"]
+# aws_default_region = data.vault_kv_secret_v2.fdio_logs.data["region"]
+# out_aws_access_key_id = data.vault_kv_secret_v2.fdio_docs.data["access_key"]
+# out_aws_secret_access_key = data.vault_kv_secret_v2.fdio_docs.data["secret_key"]
+# out_aws_default_region = data.vault_kv_secret_v2.fdio_docs.data["region"]
+# cron = "0 30 0 * * * *"
+# datacenters = ["yul1"]
+# job_name = "etl-coverage-soak-rls2406"
+#}
diff --git a/fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/variables.tf b/fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/variables.tf
index db24bdf0fa..60298d4c99 100644
--- a/fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/variables.tf
+++ b/fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/variables.tf
@@ -19,13 +19,13 @@ variable "nomad_provider_ca_file" {
variable "nomad_provider_cert_file" {
description = "A local file path to a PEM-encoded certificate."
type = string
- default = "/etc/nomad.d/ssl/nomad-cli.pem"
+ default = "/etc/nomad.d/ssl/nomad.pem"
}
variable "nomad_provider_key_file" {
description = "A local file path to a PEM-encoded private key."
type = string
- default = "/etc/nomad.d/ssl/nomad-cli-key.pem"
+ default = "/etc/nomad.d/ssl/nomad-key.pem"
}
variable "vault_provider_address" {
diff --git a/fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/versions.tf b/fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/versions.tf
index 0c05e76d65..ffe25bb42e 100644
--- a/fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/versions.tf
+++ b/fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/versions.tf
@@ -7,10 +7,10 @@ terraform {
required_providers {
nomad = {
source = "hashicorp/nomad"
- version = ">= 1.4.20"
+ version = ">= 2.3.0"
}
vault = {
- version = ">= 3.12.0"
+ version = ">= 4.3.0"
}
}
required_version = ">= 1.5.4"
diff --git a/fdio.infra.terraform/terraform-nomad-pyspark-etl/variables.tf b/fdio.infra.terraform/terraform-nomad-pyspark-etl/variables.tf
index f6d318e855..86d1b45753 100644
--- a/fdio.infra.terraform/terraform-nomad-pyspark-etl/variables.tf
+++ b/fdio.infra.terraform/terraform-nomad-pyspark-etl/variables.tf
@@ -106,9 +106,9 @@ variable "vault_secret" {
})
description = "Set of properties to be able to fetch secret from vault."
default = {
- use_vault_provider = false
+ use_vault_provider = true
vault_kv_policy_name = "kv"
- vault_kv_path = "secret/data/etl"
+ vault_kv_path = "data/etl"
vault_kv_field_access_key = "access_key"
vault_kv_field_secret_key = "secret_key"
}