terraform 错误:获取“http://localhost/api/v1/namespaces/default/secrets/name-secret”:拨号 tcp [::1]:80:连接:连

Posted

技术标签:

【中文标题】terraform 错误:获取“http://localhost/api/v1/namespaces/default/secrets/name-secret”:拨号 tcp [::1]:80:连接:连接被拒绝【英文标题】:terraform Error: Get "http://localhost/api/v1/namespaces/default/secrets/name-secret": dial tcp [::1]:80: connect: connection refused 【发布时间】:2020-09-17 21:56:59 【问题描述】:

我在 gitlab ci 中遇到问题,当我在本地执行 terraform apply 时一切正常(kubectl 在 gitlab ci 容器和本地工作正常),但是在 gitlab ci 中执行相同的脚本时会抛出如下所示的错误

本地化版本v0.12.24

gitlab ci 容器中的 terraform 版本 v0.12.25

main.tf

provider "google" 
  project = "profiline-russia"
  region  = "us-central1"
  zone    = "us-central1-c"


resource "google_container_cluster" "primary" 
  name     = "main-cluster"
  location = "europe-west3"

  remove_default_node_pool = true
  initial_node_count = 1


resource "google_container_node_pool" "primary_nodes" 
  name       = "node-pool"
  location   = "europe-west3"
  cluster    = google_container_cluster.primary.name
  node_count = 1

  node_config 
    machine_type = "n1-standard-1"
  



# dashboard ui
# module "kubernetes_dashboard" 
#   source = "cookielab/dashboard/kubernetes"
#   version = "0.9.0"

#   kubernetes_namespace_create = true
#   kubernetes_dashboard_csrf = "random-string"
# 


# deployment server
resource "kubernetes_deployment" "deployment-server" 
  metadata 
    name = var.data-deployment-server.metadata.name
    labels = 
      App = var.data-deployment-server.labels.App
    
  

  spec 
    replicas = 1

    selector 
      match_labels = 
        App = var.data-deployment-server.labels.App
      
    

    template 
      metadata 
        labels = 
          App = var.data-deployment-server.labels.App
        
      

      spec 
        container 
          image = var.data-deployment-server.image.name # for passing this i made gcr public
          name = var.data-deployment-server.container.name
          command = var.data-deployment-server.container.command
          port 
            container_port = var.data-deployment-server.container.port
          
          env 
            name  = "ENV"
            value = "production"
          
          env 
            name  = "DB_USERNAME"
            value_from 
              secret_key_ref 
                name = kubernetes_secret.secret-db.metadata.0.name
                key = "db_username"
              
            
          
          env 
            name  = "DB_PASSWORD"
            value_from 
              secret_key_ref 
                name = kubernetes_secret.secret-db.metadata.0.name
                key = "db_password"
              
            
          
          env 
            name  = "DB_NAME"
            value_from 
              secret_key_ref 
                name = kubernetes_secret.secret-db.metadata.0.name
                key = "db_name"
              
            
          
          env 
            name  = "DEFAULT_BUCKET_NAME"
            value = var.default-bucket-name
          
          env 
            name  = "DATABASE_ClOUD_SQL_NAME"
            value = var.database-cloud-sql-name
          
          env 
            name  = "PROJECT_GCP_ID"
            value = var.project-gcp-id
          
          env 
            name  = "K8S_SA_CLOUD_STORAGE"
            value_from 
              secret_key_ref 
                name = kubernetes_secret.secret-sa-cloud-storage.metadata.0.name
                key = "sa-cloud-storage.json"
              
            
          
          env 
            name = "GOOGLE_APPLICATION_CREDENTIALS"
            value = "/app/secrets/sa-cloud-storage.json"
          

          liveness_probe 
            http_get 
              path = "/swagger"
              port = var.data-deployment-server.container.port
            

            initial_delay_seconds = 10
            period_seconds = 10
          
        

        container 
          image = var.data-cloud-sql-proxy.image.name
          name  = var.data-cloud-sql-proxy.container.name
          command = var.data-cloud-sql-proxy.container.command
          volume_mount 
            name = var.data-cloud-sql-proxy.volume.name
            mount_path = "/secrets/"
            read_only = true
          
        

        volume 
          name = var.data-cloud-sql-proxy.volume.name
          secret 
            secret_name = kubernetes_secret.secret-gsa.metadata.0.name
          
        
      


    
  


resource "kubernetes_service" "service-server"  # wget http://name-service-server:8000/swagger
  metadata 
    name = var.data-deployment-server.service.name
  
  spec 
    selector = 
      App = var.data-deployment-server.labels.App
    
    port 
      port = var.data-deployment-server.container.port
    

    type = var.data-deployment-server.service.type
  



# deployment client-web
resource "kubernetes_deployment" "deployment-client-web" 
  metadata 
    name = var.data-deployment-client-web.metadata.name
    labels = 
      App = var.data-deployment-client-web.labels.App
    
  

  spec 
    replicas = 1

    selector 
      match_labels = 
        App = var.data-deployment-client-web.labels.App
      
    

    template 
      metadata 
        labels = 
          App = var.data-deployment-client-web.labels.App
        
      

      spec 
        container 
          image = var.data-deployment-client-web.image.name
          command = var.data-deployment-client-web.container.command
          name  = var.data-deployment-client-web.container.name
          port 
            container_port = var.data-deployment-client-web.container.port
          

          liveness_probe 
            http_get 
              path = "/"
              port = var.data-deployment-client-web.container.port
            

            initial_delay_seconds = 300
            period_seconds = 10
          
        
      
    
  


resource "kubernetes_service" "service-client-web"  # wget http://name-service-server:8000/swagger
  metadata 
    name = var.data-deployment-client-web.service.name
  
  spec 
    selector = 
      App = var.data-deployment-client-web.labels.App
    
    port 
      port = var.data-deployment-client-web.container.port
    

    type = var.data-deployment-client-web.service.type
  



# database
resource "google_sql_database" "database" 
  name = "database-profiline-russia"
  instance = google_sql_database_instance.db-instance.name


resource "google_sql_database_instance" "db-instance" 
  name = "db-master-instance"
  region = "europe-west3"
  database_version = "POSTGRES_11"
  settings 
    tier = "db-f1-micro"
  


resource "google_sql_user" "db-user" 
  name = "..."
  instance = google_sql_database_instance.db-instance.name
  password = "..."



resource "kubernetes_secret" "secret-db" 
  metadata 
    name = "name-secret-db"
  

  data = 
    db_username = google_sql_user.db-user.name
    db_password = google_sql_user.db-user.password
    db_name = google_sql_database.database.name
  

  type = "Opaque"


resource "kubernetes_secret" "secret-gsa" 
  metadata 
    name = "name-secret-gsa"
  

  data = 
    "service_account.json" = file(var.cred-sa-default)
  

  type = "Opaque"


resource "kubernetes_secret" "secret-sa-cloud-storage" 
  metadata 
    name = "name-secret-sa-cloud-storage"
  

  data = 
    "sa-cloud-storage.json" = file(var.cred-sa-cloud-storage)
  

  type = "Opaque"

vars.tf

variable "default-bucket-name" 
  type = string
  description = "default bucket name(bucket doesnt recreated(created previously by hands))"


variable "database-cloud-sql-name" 
  type = string
  description = "full database name"


variable "project-gcp-id" 
  type = string
  description = "gcp project id"


variable "cred-sa-default" 
  type = string
  description = "default service account credentials file"


variable "cred-sa-cloud-storage" 
  type = string
  description = "cloud storage service account credentials file"


variable "data-deployment-server" 
    type = object(
        metadata = object(
            name = string
        )
        image = object(
            name = string
        )
        labels = object(
            App = string
        )
        container = object(
            name = string
            command = list(string)
            port = number
        )
        service = object(
            name = string
            type = string
        )
    )


variable "data-cloud-sql-proxy" 
    type = object(
        image = object(
            name = string
        )
        container = object(
            name = string
            command = list(string)
        )
        volume = object(
            name = string
        )
    )


variable "data-deployment-client-web" 
    type = object(
        metadata = object(
            name = string
        )
        image = object(
            name = string
        )
        labels = object(
            App = string
        )
        container = object(
            name = string
            command = list(string)
            port = number
        )
        service = object(
            name = string
            type = string
        )
    )

terraform.tfvars 具有私有变量的值

gitlab ci 容器中的错误:

 $ terraform apply -auto-approve
 kubernetes_secret.secret-sa-cloud-storage: Refreshing state... [id=default/name-secret-sa-cloud-storage]
 kubernetes_secret.secret-gsa: Refreshing state... [id=default/name-secret-gsa]
 module.kubernetes_dashboard.kubernetes_secret.kubernetes_dashboard_certs: Refreshing state... [id=kubernetes-dashboard/kubernetes-dashboard-certs]
 module.kubernetes_dashboard.kubernetes_namespace.kubernetes_dashboard[0]: Refreshing state... [id=kubernetes-dashboard]
 module.kubernetes_dashboard.kubernetes_service.kubernetes_dashboard: Refreshing state... [id=kubernetes-dashboard/kubernetes-dashboard]
 module.kubernetes_dashboard.kubernetes_service_account.kubernetes_dashboard: Refreshing state... [id=kubernetes-dashboard/kubernetes-dashboard]
 module.kubernetes_dashboard.kubernetes_cluster_role.kubernetes_dashboard: Refreshing state... [id=kubernetes-dashboard]
 module.kubernetes_dashboard.kubernetes_cluster_role_binding.kubernetes_dashboard: Refreshing state... [id=kubernetes-dashboard]
 module.kubernetes_dashboard.kubernetes_role.kubernetes_dashboard: Refreshing state... [id=kubernetes-dashboard/kubernetes-dashboard]
 module.kubernetes_dashboard.kubernetes_secret.kubernetes_dashboard_csrf: Refreshing state... [id=kubernetes-dashboard/kubernetes-dashboard-csrf]
 module.kubernetes_dashboard.kubernetes_config_map.kubernetes_dashboard_settings: Refreshing state... [id=kubernetes-dashboard/kubernetes-dashboard-settings]
 google_container_cluster.primary: Refreshing state... [id=projects/profiline-russia/locations/europe-west3/clusters/main-cluster]
 module.kubernetes_dashboard.kubernetes_service.kubernetes_metrics_scraper: Refreshing state... [id=kubernetes-dashboard/dashboard-metrics-scraper]
 kubernetes_service.service-server: Refreshing state... [id=default/name-service-server]
 google_sql_database_instance.db-instance: Refreshing state... [id=db-master-instance]
 kubernetes_service.service-client-web: Refreshing state... [id=default/name-service-client-web]
 module.kubernetes_dashboard.kubernetes_role_binding.kubernetes_dashboard: Refreshing state... [id=kubernetes-dashboard/kubernetes-dashboard]
 module.kubernetes_dashboard.kubernetes_secret.kubernetes_dashboard_key_holder: Refreshing state... [id=kubernetes-dashboard/kubernetes-dashboard-key-holder]
 google_sql_user.db-user: Refreshing state... [id=username//db-master-instance]
 google_sql_database.database: Refreshing state... [id=projects/profiline-russia/instances/db-master-instance/databases/database-profiline-russia]
 module.kubernetes_dashboard.kubernetes_deployment.kubernetes_dashboard: Refreshing state... [id=kubernetes-dashboard/kubernetes-dashboard]
 module.kubernetes_dashboard.kubernetes_deployment.kubernetes_metrics_scraper: Refreshing state... [id=kubernetes-dashboard/kubernetes-metrics-scraper]
 kubernetes_deployment.deployment-client-web: Refreshing state... [id=default/deployment-client-web]
 google_container_node_pool.primary_nodes: Refreshing state... [id=projects/profiline-russia/locations/europe-west3/clusters/main-cluster/nodePools/node-pool]
 kubernetes_secret.secret-db: Refreshing state... [id=default/name-secret-db]
 Error: Get "http://localhost/api/v1/namespaces/kubernetes-dashboard/serviceaccounts/kubernetes-dashboard": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/api/v1/namespaces/kubernetes-dashboard/services/dashboard-metrics-scraper": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/apis/apps/v1/namespaces/kubernetes-dashboard/deployments/kubernetes-dashboard": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/apis/apps/v1/namespaces/default/deployments/deployment-client-web": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/api/v1/namespaces/kubernetes-dashboard/secrets/kubernetes-dashboard-key-holder": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/api/v1/namespaces/default/services/name-service-client-web": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/apis/apps/v1/namespaces/kubernetes-dashboard/deployments/kubernetes-metrics-scraper": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/api/v1/namespaces/default/secrets/name-secret-gsa": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/kubernetes-dashboard": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/apis/rbac.authorization.k8s.io/v1/clusterroles/kubernetes-dashboard": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/apis/rbac.authorization.k8s.io/v1/namespaces/kubernetes-dashboard/roles/kubernetes-dashboard": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/api/v1/namespaces/kubernetes-dashboard/secrets/kubernetes-dashboard-certs": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/api/v1/namespaces/default/services/name-service-server": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/api/v1/namespaces/kubernetes-dashboard": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/api/v1/namespaces/kubernetes-dashboard/services/kubernetes-dashboard": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/api/v1/namespaces/default/secrets/name-secret-sa-cloud-storage": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/api/v1/namespaces/kubernetes-dashboard/secrets/kubernetes-dashboard-csrf": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/apis/rbac.authorization.k8s.io/v1/namespaces/kubernetes-dashboard/rolebindings/kubernetes-dashboard": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/api/v1/namespaces/default/secrets/name-secret-db": dial tcp [::1]:80: connect: connection refused
 Error: Get "http://localhost/api/v1/namespaces/kubernetes-dashboard/configmaps/kubernetes-dashboard-settings": dial tcp [::1]:80: connect: connection refused
Running after_script
00:01
Uploading artifacts for failed job
00:02
 ERROR: Job failed: exit code 1

【问题讨论】:

在我的 main.tf 文件部分仪表板中受到赞扬,在本地应用此功能并在容器中运行相同的脚本后,仪表板错误将消失 【参考方案1】:

如果您不使用本地配置文件,请确保在 kubernetes 提供程序配置中 load_config_filefalse。这解决了我的错误。

load_config_file = false # when you wish not to load the local config file

【讨论】:

kubernetes 提供程序 2.0.0 及更高版本不支持 load_config_file。 是的杰森,然后你呢?

以上是关于terraform 错误:获取“http://localhost/api/v1/namespaces/default/secrets/name-secret”:拨号 tcp [::1]:80:连接:连的主要内容,如果未能解决你的问题,请参考以下文章

获取对象时发生 AWS S3 错误。 S3 错误代码:带有 Terraform 的 NoSuchKey

Terraform:获取状态锁时出错:ConditionalCheckFailedException

terraform 错误:获取“http://localhost/api/v1/namespaces/default/secrets/name-secret”:拨号 tcp [::1]:80:连接:连

Terraform 和 AWS:未找到配置文件错误

获取 AWS 安全组 Terraform 模块的 ID

使用 Azure Key Vault 进行 Terraform 以获取机密值