X-Git-Url: https://gerrit.fd.io/r/gitweb?a=blobdiff_plain;f=terraform-ci-infra%2F1n_nmd%2Fmain.tf;h=8f18eb25a25b47f0844730a8e0c009f90e5ca72b;hb=024c95bc16a337be73a73be242ec7df300f21a12;hp=4da8929e8dc697fd746a06579978a5e3b172e60d;hpb=e70feeb9243e28f75b7df69e4ecacd9a1dcd064e;p=csit.git diff --git a/terraform-ci-infra/1n_nmd/main.tf b/terraform-ci-infra/1n_nmd/main.tf index 4da8929e8d..8f18eb25a2 100644 --- a/terraform-ci-infra/1n_nmd/main.tf +++ b/terraform-ci-infra/1n_nmd/main.tf @@ -1,3 +1,4 @@ +<<<<<<< HEAD (e70fee Infra: Remove Consul TLS on clients (Nomad conflict)) # For convenience in simple configurations, a child module automatically # inherits default (un-aliased) provider configurations from its parent. # This means that explicit provider blocks appear only in the root module, @@ -64,4 +65,194 @@ module "nginx" { # providers = { # nomad = nomad.yul1 # } -#} \ No newline at end of file +#} +======= +<<<<<<< HEAD (dae934 Infra: Remove Consul TLS on clients (Nomad conflict)) +======= +# For convenience in simple configurations, a child module automatically +# inherits default (un-aliased) provider configurations from its parent. +# This means that explicit provider blocks appear only in the root module, +# and downstream modules can simply declare resources for that provider +# and have them automatically associated with the root provider +# configurations. +module "alertmanager" { + source = "./alertmanager" + providers = { + nomad = nomad.yul1 + } + + # nomad + nomad_datacenters = [ "yul1" ] + + # alertmanager + alertmanager_job_name = "prod-alertmanager" + alertmanager_use_canary = true + alertmanager_group_count = 1 + alertmanager_vault_secret = { + use_vault_provider = false, + vault_kv_policy_name = "kv-secret", + vault_kv_path = "secret/data/prometheus", + vault_kv_field_access_key = "access_key", + vault_kv_field_secret_key = "secret_key" + } + alertmanager_version = "0.21.0" + alertmanager_cpu = 1000 + alertmanager_mem = 1024 + alertmanager_port = 9093 + alertmanager_slack_api_url = "https://hooks.slack.com/services/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" + alertmanager_slack_channel = "fdio-infra-monitoring" +} + +module "exporter" { + source = "./exporter" + providers = { + nomad = nomad.yul1 + } + + # nomad + nomad_datacenters = [ "yul1" ] + + # exporter + exporter_job_name = "prod-exporter" + exporter_use_canary = false + + # node + node_version = "1.0.1" + node_port = 9100 + + # blackbox + blackbox_version = "0.18.0" + blackbox_port = 9115 + + # cadvisor + cadvisor_image = "gcr.io/cadvisor/cadvisor:latest" + cadvisor_port = 8080 +} + +module "grafana" { + source = "./grafana" + providers = { + nomad = nomad.yul1 + } + + # nomad + nomad_datacenters = [ "yul1" ] + + # grafana + grafana_job_name = "prod-grafana" + grafana_use_canary = true + grafana_group_count = 1 + grafana_vault_secret = { + use_vault_provider = false, + vault_kv_policy_name = "kv-secret", + vault_kv_path = "secret/data/prometheus", + vault_kv_field_access_key = "access_key", + vault_kv_field_secret_key = "secret_key" + } + grafana_container_image = "grafana/grafana:7.3.7" + grafana_cpu = 2000 + grafana_mem = 2048 + grafana_port = 3000 +} + +module "minio" { + source = "./minio" + providers = { + nomad = nomad.yul1 + } + + # nomad + nomad_datacenters = [ "yul1" ] + nomad_host_volume = "prod-volume-data1-1" + + # minio + minio_job_name = "prod-minio" + minio_group_count = 4 + minio_service_name = "storage" + minio_host = "http://10.32.8.1{4...7}" + minio_port = 9000 + minio_container_image = "minio/minio:RELEASE.2020-12-03T05-49-24Z" + minio_vault_secret = { + use_vault_provider = false, + vault_kv_policy_name = "kv-secret", + vault_kv_path = "secret/data/minio", + vault_kv_field_access_key = "access_key", + vault_kv_field_secret_key = "secret_key" + } + minio_data_dir = "/data/" + minio_use_host_volume = true + minio_use_canary = true + minio_envs = [ "MINIO_BROWSER=\"off\"" ] + + # minio client + mc_job_name = "prod-mc" + mc_container_image = "minio/mc:RELEASE.2020-12-10T01-26-17Z" + mc_extra_commands = [ + "mc policy set public LOCALMINIO/logs.fd.io", + "mc policy set public LOCALMINIO/docs.fd.io", + "mc ilm add --expiry-days '180' LOCALMINIO/logs.fd.io", + "mc admin user add LOCALMINIO storage Storage1234", + "mc admin policy set LOCALMINIO writeonly user=storage" + ] + minio_buckets = [ "logs.fd.io", "docs.fd.io" ] +} + +module "nginx" { + source = "./nginx" + providers = { + nomad = nomad.yul1 + } + + # nomad + nomad_datacenters = [ "yul1" ] + + # nginx + nginx_job_name = "prod-nginx" +} + +module "prometheus" { + source = "./prometheus" + providers = { + nomad = nomad.yul1 + } + + # nomad + nomad_datacenters = [ "yul1" ] + nomad_host_volume = "prod-volume-data1-1" + + # prometheus + prometheus_job_name = "prod-prometheus" + prometheus_use_canary = true + prometheus_group_count = 4 + prometheus_vault_secret = { + use_vault_provider = false, + vault_kv_policy_name = "kv-secret", + vault_kv_path = "secret/data/prometheus", + vault_kv_field_access_key = "access_key", + vault_kv_field_secret_key = "secret_key" + } + prometheus_data_dir = "/data/" + prometheus_use_host_volume = true + prometheus_version = "2.24.0" + prometheus_cpu = 2000 + prometheus_mem = 8192 + prometheus_port = 9090 +} + +module "vpp_device" { + source = "./vpp_device" + providers = { + nomad = nomad.yul1 + } + + # nomad + nomad_datacenters = [ "yul1" ] + + # csit_shim + csit_shim_job_name = "prod-device-csit-shim" + csit_shim_group_count = "1" + csit_shim_cpu = "1000" + csit_shim_mem = "5000" +} +>>>>>>> CHANGE (a44eef Infra: Monitoring capability) +>>>>>>> CHANGE (6717a3 Infra: Monitoring capability)