refactor(terraform): Directory structure 46/35846/4
authorPeter Mikus <pmikus@cisco.com>
Thu, 31 Mar 2022 13:59:10 +0000 (15:59 +0200)
committerPeter Mikus <pmikus@cisco.com>
Fri, 1 Apr 2022 10:54:23 +0000 (10:54 +0000)
Signed-off-by: Peter Mikus <pmikus@cisco.com>
Change-Id: I2f3cdf0241aaf7c4a8ba4e00b701df10c9165cf8

48 files changed:
fdio.infra.terraform/1n_nmd/main.tf
fdio.infra.terraform/1n_nmd/minio_s3_gateway/main.tf [deleted file]
fdio.infra.terraform/1n_nmd/minio_s3_gateway/versions.tf [deleted file]
fdio.infra.terraform/terraform-aws-fdio-csit-dash-app-base/main.tf [moved from fdio.infra.terraform/1n_aws_t3/fdio-csit-dash-app-base/main.tf with 79% similarity]
fdio.infra.terraform/terraform-aws-fdio-csit-dash-app-base/output.tf [moved from fdio.infra.terraform/1n_aws_t3/fdio-csit-dash-app-base/output.tf with 100% similarity]
fdio.infra.terraform/terraform-aws-fdio-csit-dash-app-base/providers.tf [moved from fdio.infra.terraform/1n_aws_t3/fdio-csit-dash-app-base/providers.tf with 100% similarity]
fdio.infra.terraform/terraform-aws-fdio-csit-dash-app-base/variables.tf [moved from fdio.infra.terraform/1n_aws_t3/fdio-csit-dash-app-base/variables.tf with 100% similarity]
fdio.infra.terraform/terraform-aws-fdio-csit-dash-app-base/versions.tf [moved from fdio.infra.terraform/1n_aws_t3/fdio-csit-dash-app-base/versions.tf with 100% similarity]
fdio.infra.terraform/terraform-aws-fdio-csit-dash-env/main.tf [moved from fdio.infra.terraform/1n_aws_t3/fdio-csit-dash-env/main.tf with 96% similarity]
fdio.infra.terraform/terraform-aws-fdio-csit-dash-env/output.tf [moved from fdio.infra.terraform/1n_aws_t3/fdio-csit-dash-env/output.tf with 100% similarity]
fdio.infra.terraform/terraform-aws-fdio-csit-dash-env/providers.tf [moved from fdio.infra.terraform/1n_aws_t3/fdio-csit-dash-env/providers.tf with 100% similarity]
fdio.infra.terraform/terraform-aws-fdio-csit-dash-env/variables.tf [moved from fdio.infra.terraform/1n_aws_t3/fdio-csit-dash-env/variables.tf with 100% similarity]
fdio.infra.terraform/terraform-aws-fdio-csit-dash-env/versions.tf [moved from fdio.infra.terraform/1n_aws_t3/fdio-csit-dash-env/versions.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-alertmanager/conf/nomad/alertmanager.hcl.tftpl [moved from fdio.infra.terraform/1n_nmd/alertmanager/conf/nomad/alertmanager.hcl.tftpl with 100% similarity]
fdio.infra.terraform/terraform-nomad-alertmanager/fdio/main.tf [moved from fdio.infra.terraform/1n_nmd/alertmanager/fdio/main.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-alertmanager/fdio/providers.tf [moved from fdio.infra.terraform/1n_nmd/alertmanager/fdio/providers.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-alertmanager/fdio/variables.tf [moved from fdio.infra.terraform/1n_nmd/alertmanager/fdio/variables.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-alertmanager/fdio/versions.tf [moved from fdio.infra.terraform/1n_nmd/alertmanager/fdio/versions.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-alertmanager/main.tf [moved from fdio.infra.terraform/1n_nmd/alertmanager/main.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-alertmanager/variables.tf [moved from fdio.infra.terraform/1n_nmd/alertmanager/variables.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-alertmanager/versions.tf [moved from fdio.infra.terraform/1n_nmd/alertmanager/versions.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-loki/conf/nomad/loki.hcl.tftpl [moved from fdio.infra.terraform/1n_nmd/minio_s3_gateway/conf/nomad/minio.hcl with 78% similarity]
fdio.infra.terraform/terraform-nomad-loki/main.tf [new file with mode: 0644]
fdio.infra.terraform/terraform-nomad-loki/variables.tf [moved from fdio.infra.terraform/1n_nmd/minio_s3_gateway/variables.tf with 54% similarity]
fdio.infra.terraform/terraform-nomad-loki/versions.tf [moved from fdio.infra.terraform/1n_nmd/etl/versions.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-prometheus/conf/nomad/prometheus.hcl.tftpl [moved from fdio.infra.terraform/1n_nmd/prometheus/conf/nomad/prometheus.hcl.tftpl with 98% similarity]
fdio.infra.terraform/terraform-nomad-prometheus/fdio/main.tf [moved from fdio.infra.terraform/1n_nmd/prometheus/fdio/main.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-prometheus/fdio/providers.tf [moved from fdio.infra.terraform/1n_nmd/prometheus/fdio/providers.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-prometheus/fdio/variables.tf [moved from fdio.infra.terraform/1n_nmd/prometheus/fdio/variables.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-prometheus/fdio/versions.tf [moved from fdio.infra.terraform/1n_nmd/prometheus/fdio/versions.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-prometheus/main.tf [moved from fdio.infra.terraform/1n_nmd/prometheus/main.tf with 94% similarity]
fdio.infra.terraform/terraform-nomad-prometheus/variables.tf [moved from fdio.infra.terraform/1n_nmd/prometheus/variables.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-prometheus/versions.tf [moved from fdio.infra.terraform/1n_nmd/prometheus/versions.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-pyspark-etl/conf/nomad/etl.hcl.tftpl [moved from fdio.infra.terraform/1n_nmd/etl/conf/nomad/etl.hcl.tftpl with 100% similarity]
fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/main.tf [moved from fdio.infra.terraform/1n_nmd/etl/fdio/main.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/providers.tf [moved from fdio.infra.terraform/1n_nmd/etl/fdio/providers.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/variables.tf [moved from fdio.infra.terraform/1n_nmd/etl/fdio/variables.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-pyspark-etl/fdio/versions.tf [moved from fdio.infra.terraform/1n_nmd/etl/fdio/versions.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-pyspark-etl/main.tf [moved from fdio.infra.terraform/1n_nmd/etl/main.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-pyspark-etl/variables.tf [moved from fdio.infra.terraform/1n_nmd/etl/variables.tf with 100% similarity]
fdio.infra.terraform/terraform-nomad-pyspark-etl/versions.tf [new file with mode: 0644]
fdio.infra.terraform/terraform-vault-aws-secret-backend/fdio/main.tf [moved from fdio.infra.terraform/1n_nmd/vault-aws-secret-backend/fdio/main.tf with 100% similarity]
fdio.infra.terraform/terraform-vault-aws-secret-backend/fdio/providers.tf [moved from fdio.infra.terraform/1n_nmd/vault-aws-secret-backend/fdio/providers.tf with 100% similarity]
fdio.infra.terraform/terraform-vault-aws-secret-backend/fdio/variables.tf [moved from fdio.infra.terraform/1n_nmd/vault-aws-secret-backend/fdio/variables.tf with 100% similarity]
fdio.infra.terraform/terraform-vault-aws-secret-backend/fdio/versions.tf [moved from fdio.infra.terraform/1n_nmd/vault-aws-secret-backend/fdio/versions.tf with 100% similarity]
fdio.infra.terraform/terraform-vault-aws-secret-backend/main.tf [moved from fdio.infra.terraform/1n_nmd/vault-aws-secret-backend/main.tf with 100% similarity]
fdio.infra.terraform/terraform-vault-aws-secret-backend/variables.tf [moved from fdio.infra.terraform/1n_nmd/vault-aws-secret-backend/variables.tf with 100% similarity]
fdio.infra.terraform/terraform-vault-aws-secret-backend/versions.tf [moved from fdio.infra.terraform/1n_nmd/vault-aws-secret-backend/versions.tf with 100% similarity]

index 7cdd245..24d5ff3 100644 (file)
@@ -4,31 +4,6 @@
 # and downstream modules can simply declare resources for that provider
 # and have them automatically associated with the root provider
 # configurations.
-module "grafana" {
-  source = "./grafana"
-  providers = {
-    nomad = nomad.yul1
-  }
-
-  # nomad
-  nomad_datacenters = ["yul1"]
-
-  # grafana
-  grafana_job_name    = "prod-grafana"
-  grafana_use_canary  = true
-  grafana_group_count = 1
-  grafana_vault_secret = {
-    use_vault_provider        = false,
-    vault_kv_policy_name      = "kv-secret",
-    vault_kv_path             = "secret/data/grafana",
-    vault_kv_field_access_key = "access_key",
-    vault_kv_field_secret_key = "secret_key"
-  }
-  grafana_container_image = "grafana/grafana:7.3.7"
-  grafana_cpu             = 1000
-  grafana_mem             = 2048
-  grafana_port            = 3000
-}
 
 #module "minio" {
 #  source = "./minio"
@@ -66,41 +41,6 @@ data "vault_generic_secret" "minio_creds" {
   path = "kv/secret/data/minio"
 }
 
-module "minio_s3_gateway" {
-  source = "./minio_s3_gateway"
-  providers = {
-    nomad = nomad.yul1
-  }
-
-  # nomad
-  datacenters   = ["yul1"]
-  volume_source = "prod-volume-data1-1"
-
-  # minio
-  job_name           = "minio-s3-gateway"
-  group_count        = 4
-  service_name       = "minio"
-  mode               = "gateway"
-  port_base          = 9001
-  port_console       = 9002
-  image              = "minio/minio:latest"
-  access_key         = data.vault_generic_secret.minio_creds.data["access_key"]
-  secret_key         = data.vault_generic_secret.minio_creds.data["secret_key"]
-  volume_destination = "/data/"
-  use_host_volume    = true
-  use_canary         = true
-  envs = [
-    "MINIO_BROWSER=\"off\"",
-    "MINIO_CACHE=\"on\"",
-    "MINIO_CACHE_DRIVES=\"/data/s3_cache1\"",
-    "MINIO_CACHE_EXCLUDE=\"\"",
-    "MINIO_CACHE_QUOTA=80",
-    "MINIO_CACHE_AFTER=1",
-    "MINIO_CACHE_WATERMARK_LOW=70",
-    "MINIO_CACHE_WATERMARK_HIGH=90"
-  ]
-}
-
 #module "nginx" {
 #  source = "./nginx"
 #  providers = {
@@ -116,35 +56,6 @@ module "minio_s3_gateway" {
 #  nginx_use_host_volume = true
 #}
 
-module "prometheus" {
-  source = "./prometheus"
-  providers = {
-    nomad = nomad.yul1
-  }
-
-  # nomad
-  nomad_datacenters = ["yul1"]
-  nomad_host_volume = "prod-volume-data1-1"
-
-  # prometheus
-  prometheus_job_name    = "prod-prometheus"
-  prometheus_use_canary  = true
-  prometheus_group_count = 4
-  prometheus_vault_secret = {
-    use_vault_provider        = false,
-    vault_kv_policy_name      = "kv-secret",
-    vault_kv_path             = "secret/data/prometheus",
-    vault_kv_field_access_key = "access_key",
-    vault_kv_field_secret_key = "secret_key"
-  }
-  prometheus_data_dir        = "/data/"
-  prometheus_use_host_volume = true
-  prometheus_version         = "2.28.1"
-  prometheus_cpu             = 2000
-  prometheus_mem             = 8192
-  prometheus_port            = 9090
-}
-
 module "vpp_device" {
   source = "./vpp_device"
   providers = {
diff --git a/fdio.infra.terraform/1n_nmd/minio_s3_gateway/main.tf b/fdio.infra.terraform/1n_nmd/minio_s3_gateway/main.tf
deleted file mode 100644 (file)
index 2ae3cac..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-locals {
-  datacenters = join(",", var.datacenters)
-  envs        = join("\n", concat([], var.envs))
-  upstreams   = jsonencode(var.upstreams)
-}
-
-data "template_file" "nomad_job_minio" {
-  template = file("${path.module}/conf/nomad/minio.hcl")
-  vars = {
-    access_key                = var.access_key
-    auto_promote              = var.auto_promote
-    auto_revert               = var.auto_revert
-    canary                    = var.canary
-    cpu                       = var.cpu
-    cpu_proxy                 = var.resource_proxy.cpu
-    datacenters               = local.datacenters
-    envs                      = local.envs
-    group_count               = var.group_count
-    host                      = var.host
-    image                     = var.image
-    job_name                  = var.job_name
-    max_parallel              = var.max_parallel
-    memory                    = var.memory
-    memory_proxy              = var.resource_proxy.memory
-    mode                      = var.mode
-    port_base                 = var.port_base
-    port_console              = var.port_console
-    region                    = var.region
-    secret_key                = var.secret_key
-    service_name              = var.service_name
-    use_canary                = var.use_canary
-    use_host_volume           = var.use_host_volume
-    upstreams                 = local.upstreams
-    use_vault_kms             = var.kms_variables.use_vault_kms
-    use_vault_provider        = var.vault_secret.use_vault_provider
-    vault_address             = var.kms_variables.vault_address
-    vault_kms_approle_kv      = var.kms_variables.vault_kms_approle_kv
-    vault_kms_key_name        = var.kms_variables.vault_kms_key_name
-    vault_kv_policy_name      = var.vault_secret.vault_kv_policy_name
-    vault_kv_path             = var.vault_secret.vault_kv_path
-    vault_kv_field_access_key = var.vault_secret.vault_kv_field_access_key
-    vault_kv_field_secret_key = var.vault_secret.vault_kv_field_secret_key
-    volume_destination        = var.volume_destination
-    volume_source             = var.volume_source
-  }
-}
-
-resource "nomad_job" "nomad_job_minio" {
-  jobspec = data.template_file.nomad_job_minio.rendered
-  detach  = false
-}
diff --git a/fdio.infra.terraform/1n_nmd/minio_s3_gateway/versions.tf b/fdio.infra.terraform/1n_nmd/minio_s3_gateway/versions.tf
deleted file mode 100644 (file)
index b80610a..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-terraform {
-  required_providers {
-    nomad = {
-      source  = "hashicorp/nomad"
-      version = "~> 1.4.15"
-    }
-    template = {
-      source  = "hashicorp/template"
-      version = "~> 2.2.0"
-    }
-  }
-  required_version = ">= 1.0.3"
-}
@@ -4,7 +4,7 @@ data "vault_aws_access_credentials" "creds" {
 }
 
 module "elastic_beanstalk_application_version" {
-  source                   = "../../terraform-aws-elastic-beanstalk-application-version"
+  source                   = "../terraform-aws-elastic-beanstalk-application-version"
   application_description  = "FD.io CSIT Results Dashboard"
   application_name         = "fdio-csit-dash-app"
   application_version_name = "fdio-csit-dash-app-base"
@@ -8,7 +8,7 @@ data "vault_aws_access_credentials" "creds" {
 }
 
 module "elastic_beanstalk_application" {
-  source = "../../terraform-aws-elastic-beanstalk-application"
+  source = "../terraform-aws-elastic-beanstalk-application"
 
   # application
   application_description                    = "FD.io CSIT Results Dashboard"
@@ -19,7 +19,7 @@ module "elastic_beanstalk_application" {
 }
 
 module "elastic_beanstalk_environment" {
-  source = "../../terraform-aws-elastic-beanstalk-environment"
+  source = "../terraform-aws-elastic-beanstalk-environment"
 
   # vpc
   vpc_cidr_block           = "192.168.0.0/24"
@@ -69,12 +69,6 @@ job "${job_name}" {
 %{ endif }
   }
 
-  # All groups in this job should be scheduled on different hosts.
-  constraint {
-    operator = "distinct_hosts"
-    value    = "true"
-  }
-
   # The "group" stanza defines a series of tasks that should be co-located on
   # the same Nomad client. Any task within a group will be placed on the same
   # client.
@@ -112,6 +106,22 @@ job "${job_name}" {
       mode     = "delay"
     }
 
+    # The constraint allows restricting the set of eligible nodes. Constraints
+    # may filter on attributes or client metadata.
+    #
+    #     https://www.nomadproject.io/docs/job-specification/constraint
+    #
+    constraint {
+      attribute = "$${attr.cpu.arch}"
+      operator  = "!="
+      value     = "arm64"
+    }
+
+    constraint {
+      attribute = "$${node.class}"
+      value     = "builder"
+    }
+
     # The network stanza specifies the networking requirements for the task
     # group, including the network mode and port allocations. When scheduling
     # jobs in Nomad they are provisioned across your fleet of machines along
@@ -122,20 +132,16 @@ job "${job_name}" {
     #     https://www.nomadproject.io/docs/job-specification/network
     #
     network {
-      port "base" {
-        static = ${port_base}
-        to     = ${port_base}
-      }
-      port "console" {
-        static = ${port_console}
-        to     = ${port_console}
+      port "${service_name}" {
+        static = ${port}
+        to     = ${port}
       }
     }
 
     # The "task" stanza creates an individual unit of work, such as a Docker
     # container, web application, or batch processing.
     #
-    #     https://www.nomadproject.io/docs/job-specification/task.html
+    #     https://www.nomadproject.io/docs/job-specification/task
     #
     task "${job_name}-task-1" {
       # The "driver" parameter specifies the task driver that should be used to
@@ -161,12 +167,7 @@ job "${job_name}" {
       # are specific to each driver, so please see specific driver
       # documentation for more information.
       config {
-        args         = [
-          "${mode}", "s3",
-          "-address", ":${port_base}",
-          "-console-address", ":${port_console}"
-        ]
-        command      = "local/minio"
+        command = "local/loki-linux-amd64"
       }
 
       # The artifact stanza instructs Nomad to fetch and unpack a remote resource,
@@ -174,30 +175,57 @@ job "${job_name}" {
       # popular go-getter library, which permits downloading artifacts from a
       # variety of locations using a URL as the input source.
       #
-      # For more information and examples on the "artifact" stanza, please see
-      # the online documentation at:
-      #
       #     https://www.nomadproject.io/docs/job-specification/artifact
       #
       artifact {
-        source      = "https://dl.min.io/server/minio/release/linux-amd64/minio"
+        source = "${url}"
+        args   = [
+          "-config.file secrets/config.yml"
+        ]
       }
 
-      # The env stanza configures a list of environment variables to populate
-      # the task's environment before starting.
-      env {
-%{ if use_vault_provider }
-{{ with secret "${vault_kv_path}" }}
-        MINIO_ROOT_USER     = "{{ .Data.data.${vault_kv_field_access_key} }}"
-        MINIO_ROOT_PASSWORD = "{{ .Data.data.${vault_kv_field_secret_key} }}"
-{{ end }}
-%{ else }
-        MINIO_ROOT_USER       = "${access_key}"
-        MINIO_ROOT_PASSWORD   = "${secret_key}"
-        AWS_ACCESS_KEY_ID     = "${access_key}"
-        AWS_SECRET_ACCESS_KEY = "${secret_key}"
-%{ endif }
-        ${ envs }
+      template {
+        change_mode     = "noop"
+        change_signal   = "SIGINT"
+        destination     = "secrets/loki.yml"
+        data            = <<EOH
+---
+auth_enabled: false
+
+server:
+  http_listen_port: 3100
+  http_listen_address: 127.0.0.1
+
+schema_config:
+  configs:
+  - from: 2020-05-15
+    store: boltdb
+    object_store: filesystem
+    schema: v11
+    index:
+      prefix: index_
+      period: 168h
+
+storage_config:
+  boltdb:
+    directory: /tmp/loki/index
+
+  filesystem:
+    directory: /tmp/loki/chunks
+
+  aws:
+    bucketnames: loki
+    endpoint: http://storage.service.consul:9000
+    access_key_id: storage
+    secret_access_key: Storage1234
+    insecure: false
+    sse_encryption: false
+    http_config:
+      idle_conn_timeout: 90s
+      response_header_timeout: 0s
+      insecure_skip_verify: false
+    s3forcepathstyle: true
+EOH
       }
 
       # The service stanza instructs Nomad to register a service with Consul.
@@ -205,26 +233,13 @@ job "${job_name}" {
       #     https://www.nomadproject.io/docs/job-specification/service
       #
       service {
-        name       = "${service_name}"
-        port       = "base"
-        tags       = [ "${service_name}$${NOMAD_ALLOC_INDEX}" ]
-        check {
-          name     = "Min.io Server HTTP Check Live"
-          type     = "http"
-          port     = "base"
-          protocol = "http"
-          method   = "GET"
-          path     = "/minio/health/live"
-          interval = "10s"
-          timeout  = "2s"
-        }
+        name = "${service_name}"
+        port = "${service_name}"
+        tags = [ "${service_name}$${NOMAD_ALLOC_INDEX}" ]
         check {
-          name     = "Min.io Server HTTP Check Ready"
+          name     = "Loki Check Live"
           type     = "http"
-          port     = "base"
-          protocol = "http"
-          method   = "GET"
-          path     = "/minio/health/ready"
+          path     = "/-/healthy"
           interval = "10s"
           timeout  = "2s"
         }
@@ -238,8 +253,8 @@ job "${job_name}" {
       #     https://www.nomadproject.io/docs/job-specification/resources
       #
       resources {
-        cpu        = ${cpu}
-        memory     = ${memory}
+        cpu    = ${cpu}
+        memory = ${memory}
       }
     }
   }
diff --git a/fdio.infra.terraform/terraform-nomad-loki/main.tf b/fdio.infra.terraform/terraform-nomad-loki/main.tf
new file mode 100644 (file)
index 0000000..a2fc70d
--- /dev/null
@@ -0,0 +1,40 @@
+locals {
+  datacenters = join(",", var.datacenters)
+  url = join("",
+    [
+      "https://github.com",
+      "/grafana/loki/releases/download/v${var.gl_version}/loki-linux-amd64.zip"
+    ]
+  )
+}
+
+resource "nomad_job" "nomad_job_prometheus" {
+  jobspec = templatefile(
+    "${path.module}/conf/nomad/loki.hcl.tftpl",
+    {
+      auto_promote              = var.auto_promote,
+      auto_revert               = var.auto_revert,
+      canary                    = var.canary,
+      cpu                       = var.cpu,
+      datacenters               = local.datacenters,
+      group_count               = var.group_count,
+      job_name                  = var.job_name,
+      max_parallel              = var.max_parallel,
+      memory                    = var.memory
+      port                      = var.port,
+      region                    = var.region,
+      service_name              = var.service_name,
+      url                       = local.url,
+      use_canary                = var.use_canary,
+      use_host_volume           = var.use_host_volume,
+      use_vault_provider        = var.vault_secret.use_vault_provider,
+      vault_kv_policy_name      = var.vault_secret.vault_kv_policy_name,
+      vault_kv_path             = var.vault_secret.vault_kv_path,
+      vault_kv_field_access_key = var.vault_secret.vault_kv_field_access_key,
+      vault_kv_field_secret_key = var.vault_secret.vault_kv_field_secret_key,
+      version                   = var.gl_version,
+      volume_destination        = var.volume_destination,
+      volume_source             = var.volume_source
+  })
+  detach = false
+}
@@ -1,5 +1,4 @@
 # Nomad
-
 variable "datacenters" {
   description = "Specifies the list of DCs to be considered placing this task"
   type        = list(string)
@@ -15,14 +14,14 @@ variable "region" {
 variable "volume_source" {
   description = "The name of the volume to request"
   type        = string
-  default     = "persistence"
+  default     = ""
 }
 
-# Minio
-variable "access_key" {
-  description = "Minio access key"
+# Grafana Loki
+variable "gl_version" {
+  description = "Grafana Loki version"
   type        = string
-  default     = "minio"
+  default     = "2.4.2"
 }
 
 variable "auto_promote" {
@@ -44,15 +43,15 @@ variable "canary" {
 }
 
 variable "cpu" {
-  description = "Specifies the CPU required to run this task in MHz"
+  description = "CPU allocation"
   type        = number
-  default     = 1000
+  default     = 2000
 }
 
-variable "envs" {
-  description = "Minio environment variables"
-  type        = list(string)
-  default     = []
+variable "data_dir" {
+  description = "Loki data dir allocation"
+  type        = string
+  default     = ""
 }
 
 variable "group_count" {
@@ -61,38 +60,10 @@ variable "group_count" {
   default     = 1
 }
 
-variable "host" {
-  description = "Minio host"
-  type        = string
-  default     = "127.0.0.1"
-}
-
-variable "image" {
-  description = "The Docker image to run"
-  type        = string
-  default     = "minio/minio:latest"
-}
-
 variable "job_name" {
   description = "Specifies a name for the job"
   type        = string
-  default     = "minio"
-}
-
-variable "kms_variables" {
-  type = object({
-    use_vault_kms        = string
-    vault_address        = string,
-    vault_kms_approle_kv = string,
-    vault_kms_key_name   = string
-  })
-  description = "Set of properties to be able to transit secrets in vault"
-  default = {
-    use_vault_kms        = false
-    vault_address        = "",
-    vault_kms_approle_kv = "",
-    vault_kms_key_name   = ""
-  }
+  default     = "loki"
 }
 
 variable "max_parallel" {
@@ -104,68 +75,25 @@ variable "max_parallel" {
 variable "memory" {
   description = "Specifies the memory required in MB"
   type        = number
-  default     = 1024
-}
-
-variable "mode" {
-  description = "Specifies the Minio mode"
-  type        = string
-  default     = "server"
+  default     = 4096
 }
 
-variable "port_base" {
+variable "port" {
   description = "Specifies the static TCP/UDP port to allocate"
   type        = number
-  default     = 9000
-}
-
-variable "port_console" {
-  description = "Specifies the static TCP/UDP port to allocate"
-  type        = number
-  default     = 9001
-}
-
-variable "resource_proxy" {
-  description = "Minio proxy resources"
-  type = object({
-    cpu    = number,
-    memory = number
-  })
-  default = {
-    cpu    = 2000,
-    memory = 1024
-  }
-  validation {
-    condition     = var.resource_proxy.cpu >= 200 && var.resource_proxy.memory >= 128
-    error_message = "Proxy resource must be at least: cpu=200, memory=128."
-  }
+  default     = 3100
 }
 
 variable "service_name" {
   description = "Specifies the name this service will be advertised in Consul"
   type        = string
-  default     = "minio"
-}
-
-variable "secret_key" {
-  description = "Minio secret key"
-  type        = string
-  default     = "minio"
-}
-
-variable "upstreams" {
-  type = list(object({
-    service_name = string,
-    port         = number,
-  }))
-  description = "List of upstream services"
-  default     = []
+  default     = "loki"
 }
 
 variable "use_canary" {
-  description = "Uses canary deployment for Minio"
+  description = "Uses canary deployment"
   type        = bool
-  default     = false
+  default     = true
 }
 
 variable "use_host_volume" {
@@ -174,6 +102,12 @@ variable "use_host_volume" {
   default     = false
 }
 
+variable "volume_destination" {
+  description = "Specifies where the volume should be mounted inside the task"
+  type        = string
+  default     = ""
+}
+
 variable "vault_secret" {
   type = object({
     use_vault_provider        = bool,
@@ -182,18 +116,12 @@ variable "vault_secret" {
     vault_kv_field_access_key = string,
     vault_kv_field_secret_key = string
   })
-  description = "Set of properties to be able to fetch secret from vault"
+  description = "Set of properties to be able to fetch secret from vault."
   default = {
     use_vault_provider        = false
     vault_kv_policy_name      = "kv"
-    vault_kv_path             = "secret/data/minio"
+    vault_kv_path             = "secret/data/prometheus"
     vault_kv_field_access_key = "access_key"
     vault_kv_field_secret_key = "secret_key"
   }
 }
-
-variable "volume_destination" {
-  description = "Specifies where the volume should be mounted inside the task"
-  type        = string
-  default     = "/data/"
-}
@@ -581,13 +581,6 @@ scrape_configs:
     - server: '{{ env "NOMAD_IP_prometheus" }}:8500'
       services: [ 'storage' ]
     metrics_path: /minio/prometheus/metrics
-
-  - job_name: 'Minio Proxy'
-    bearer_token: eyJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJleHAiOjQ3OTAwNjE1NDIsImlzcyI6InByb21ldGhldXMiLCJzdWIiOiJBS0lBUTJBSDdZUFBXVDZDV1hYSSJ9.CU9x9j-yO0_Uta5iep6yqNiGQPolrr2608E3lpU6Yg21rIv_eOwS5zqzXaSvrhzkJP9H5kO1Pj6kqjYhbqjN_w
-    consul_sd_configs:
-    - server: '{{ env "NOMAD_IP_prometheus" }}:8500'
-      services: [ 'minio' ]
-    metrics_path: /minio/v2/metrics/cluster
 EOH
       }
 
@@ -4,8 +4,7 @@ locals {
     [
       "https://github.com",
       "/prometheus/prometheus/releases/download/",
-      "v${var.pm_version}/",
-      "prometheus-${var.pm_version}.linux-amd64.tar.gz"
+      "v${var.pm_version}/prometheus-${var.pm_version}.linux-amd64.tar.gz"
     ]
   )
 }
diff --git a/fdio.infra.terraform/terraform-nomad-pyspark-etl/versions.tf b/fdio.infra.terraform/terraform-nomad-pyspark-etl/versions.tf
new file mode 100644 (file)
index 0000000..a01708f
--- /dev/null
@@ -0,0 +1,9 @@
+terraform {
+  required_providers {
+    nomad = {
+      source  = "hashicorp/nomad"
+      version = ">= 1.4.16"
+    }
+  }
+  required_version = ">= 1.1.4"
+}