Terraform: Cleanup
[csit.git] / fdio.infra.terraform / 1n_nmd / main.tf
index d48f12a..a8a1bb9 100644 (file)
@@ -5,24 +5,24 @@
 # and have them automatically associated with the root provider
 # configurations.
 module "alertmanager" {
-  source                             = "./alertmanager"
-  providers                          = {
+  source = "./alertmanager"
+  providers = {
     nomad = nomad.yul1
   }
 
   # nomad
-  nomad_datacenters                  = [ "yul1" ]
+  nomad_datacenters = ["yul1"]
 
   # alertmanager
-  alertmanager_job_name              = "prod-alertmanager"
-  alertmanager_use_canary            = true
-  alertmanager_group_count           = 1
-  alertmanager_vault_secret          = {
-    use_vault_provider               = false,
-    vault_kv_policy_name             = "kv-secret",
-    vault_kv_path                    = "secret/data/prometheus",
-    vault_kv_field_access_key        = "access_key",
-    vault_kv_field_secret_key        = "secret_key"
+  alertmanager_job_name    = "prod-alertmanager"
+  alertmanager_use_canary  = true
+  alertmanager_group_count = 1
+  alertmanager_vault_secret = {
+    use_vault_provider        = false,
+    vault_kv_policy_name      = "kv-secret",
+    vault_kv_path             = "secret/data/prometheus",
+    vault_kv_field_access_key = "access_key",
+    vault_kv_field_secret_key = "secret_key"
   }
   alertmanager_version               = "0.21.0"
   alertmanager_cpu                   = 1000
@@ -35,131 +35,131 @@ module "alertmanager" {
 }
 
 module "grafana" {
-  source                         = "./grafana"
-  providers                      = {
+  source = "./grafana"
+  providers = {
     nomad = nomad.yul1
   }
 
   # nomad
-  nomad_datacenters              = [ "yul1" ]
+  nomad_datacenters = ["yul1"]
 
   # grafana
-  grafana_job_name               = "prod-grafana"
-  grafana_use_canary             = true
-  grafana_group_count            = 1
-  grafana_vault_secret           = {
-    use_vault_provider           = false,
-    vault_kv_policy_name         = "kv-secret",
-    vault_kv_path                = "secret/data/grafana",
-    vault_kv_field_access_key    = "access_key",
-    vault_kv_field_secret_key    = "secret_key"
+  grafana_job_name    = "prod-grafana"
+  grafana_use_canary  = true
+  grafana_group_count = 1
+  grafana_vault_secret = {
+    use_vault_provider        = false,
+    vault_kv_policy_name      = "kv-secret",
+    vault_kv_path             = "secret/data/grafana",
+    vault_kv_field_access_key = "access_key",
+    vault_kv_field_secret_key = "secret_key"
   }
-  grafana_container_image        = "grafana/grafana:7.3.7"
-  grafana_cpu                    = 1000
-  grafana_mem                    = 2048
-  grafana_port                   = 3000
+  grafana_container_image = "grafana/grafana:7.3.7"
+  grafana_cpu             = 1000
+  grafana_mem             = 2048
+  grafana_port            = 3000
 }
 
 module "minio" {
-  source                         = "./minio"
-  providers                      = {
+  source = "./minio"
+  providers = {
     nomad = nomad.yul1
   }
 
   # nomad
-  nomad_datacenters              = [ "yul1" ]
-  nomad_host_volume              = "prod-volume-data1-1"
+  nomad_datacenters = ["yul1"]
+  nomad_host_volume = "prod-volume-data1-1"
 
   # minio
-  minio_job_name                 = "prod-minio"
-  minio_group_count              = 4
-  minio_service_name             = "storage"
-  minio_host                     = "http://10.32.8.1{4...7}"
-  minio_port                     = 9000
-  minio_container_image          = "minio/minio:RELEASE.2021-07-27T02-40-15Z"
-  minio_vault_secret             = {
-    use_vault_provider           = false,
-    vault_kv_policy_name         = "kv-secret",
-    vault_kv_path                = "secret/data/minio",
-    vault_kv_field_access_key    = "access_key",
-    vault_kv_field_secret_key    = "secret_key"
+  minio_job_name        = "prod-minio"
+  minio_group_count     = 4
+  minio_service_name    = "storage"
+  minio_host            = "http://10.32.8.1{4...7}"
+  minio_port            = 9000
+  minio_container_image = "minio/minio:RELEASE.2021-07-27T02-40-15Z"
+  minio_vault_secret = {
+    use_vault_provider        = false,
+    vault_kv_policy_name      = "kv-secret",
+    vault_kv_path             = "secret/data/minio",
+    vault_kv_field_access_key = "access_key",
+    vault_kv_field_secret_key = "secret_key"
   }
-  minio_data_dir                 = "/data/"
-  minio_use_host_volume          = true
-  minio_use_canary               = true
-  minio_envs                     = [ "MINIO_BROWSER=\"off\"" ]
+  minio_data_dir        = "/data/"
+  minio_use_host_volume = true
+  minio_use_canary      = true
+  minio_envs            = ["MINIO_BROWSER=\"off\""]
 
   # minio client
-  mc_job_name                    = "prod-mc"
-  mc_container_image             = "minio/mc:RELEASE.2021-07-27T02-40-15Z"
-  mc_extra_commands              = [
+  mc_job_name        = "prod-mc"
+  mc_container_image = "minio/mc:RELEASE.2021-07-27T02-40-15Z"
+  mc_extra_commands = [
     "mc policy set public LOCALMINIO/logs.fd.io",
     "mc policy set public LOCALMINIO/docs.fd.io",
     "mc ilm add --expiry-days '180' LOCALMINIO/logs.fd.io",
     "mc admin user add LOCALMINIO storage Storage1234",
     "mc admin policy set LOCALMINIO writeonly user=storage"
   ]
-  minio_buckets                  = [ "logs.fd.io", "docs.fd.io" ]
+  minio_buckets = ["logs.fd.io", "docs.fd.io"]
 }
 
 module "nginx" {
-  source                         = "./nginx"
-  providers                      = {
+  source = "./nginx"
+  providers = {
     nomad = nomad.yul1
   }
 
   # nomad
-  nomad_datacenters              = [ "yul1" ]
-  nomad_host_volume              = "prod-volume-data1-1"
+  nomad_datacenters = ["yul1"]
+  nomad_host_volume = "prod-volume-data1-1"
 
   # nginx
-  nginx_job_name                 = "prod-nginx"
-  nginx_use_host_volume          = true
+  nginx_job_name        = "prod-nginx"
+  nginx_use_host_volume = true
 }
 
 module "prometheus" {
-  source                         = "./prometheus"
-  providers                      = {
+  source = "./prometheus"
+  providers = {
     nomad = nomad.yul1
   }
 
   # nomad
-  nomad_datacenters              = [ "yul1" ]
-  nomad_host_volume              = "prod-volume-data1-1"
+  nomad_datacenters = ["yul1"]
+  nomad_host_volume = "prod-volume-data1-1"
 
   # prometheus
-  prometheus_job_name            = "prod-prometheus"
-  prometheus_use_canary          = true
-  prometheus_group_count         = 4
-  prometheus_vault_secret        = {
-    use_vault_provider           = false,
-    vault_kv_policy_name         = "kv-secret",
-    vault_kv_path                = "secret/data/prometheus",
-    vault_kv_field_access_key    = "access_key",
-    vault_kv_field_secret_key    = "secret_key"
+  prometheus_job_name    = "prod-prometheus"
+  prometheus_use_canary  = true
+  prometheus_group_count = 4
+  prometheus_vault_secret = {
+    use_vault_provider        = false,
+    vault_kv_policy_name      = "kv-secret",
+    vault_kv_path             = "secret/data/prometheus",
+    vault_kv_field_access_key = "access_key",
+    vault_kv_field_secret_key = "secret_key"
   }
-  prometheus_data_dir            = "/data/"
-  prometheus_use_host_volume     = true
-  prometheus_version             = "2.28.1"
-  prometheus_cpu                 = 2000
-  prometheus_mem                 = 8192
-  prometheus_port                = 9090
+  prometheus_data_dir        = "/data/"
+  prometheus_use_host_volume = true
+  prometheus_version         = "2.28.1"
+  prometheus_cpu             = 2000
+  prometheus_mem             = 8192
+  prometheus_port            = 9090
 }
 
 module "vpp_device" {
-  source                         = "./vpp_device"
-  providers                      = {
+  source = "./vpp_device"
+  providers = {
     nomad = nomad.yul1
   }
 
   # nomad
-  nomad_datacenters              = [ "yul1" ]
+  nomad_datacenters = ["yul1"]
 
   # csit_shim
-  csit_shim_job_name             = "prod-device-csit-shim"
-  csit_shim_group_count          = "1"
-  csit_shim_cpu                  = "1500"
-  csit_shim_mem                  = "4096"
-  csit_shim_image_aarch64        = "fdiotools/csit_shim-ubuntu2004:2021_03_02_143938_UTC-aarch64"
-  csit_shim_image_x86_64         = "fdiotools/csit_shim-ubuntu2004:2021_03_04_142103_UTC-x86_64"
+  csit_shim_job_name      = "prod-device-csit-shim"
+  csit_shim_group_count   = "1"
+  csit_shim_cpu           = "1500"
+  csit_shim_mem           = "4096"
+  csit_shim_image_aarch64 = "fdiotools/csit_shim-ubuntu2004:2021_03_02_143938_UTC-aarch64"
+  csit_shim_image_x86_64  = "fdiotools/csit_shim-ubuntu2004:2021_03_04_142103_UTC-x86_64"
 }