Run Airflow on nomad (translate from docker-compose)

Hello guys. I’m using Nomad v1.6.1 and currently facing an issue.
I tried to run Airflow job by translating docker-compose to nomad format.
I referenced the video & other’s similar question “https://www.youtube.com/watch?v=Egk5L2AM-28” , “Translating docker-compose.yml with multiple services using a common image ... to Nomad Job
Docker-compose file came from here:
https://github.com/hnawaz007/pythondataanalysis/blob/main/AirflowSession2/docker-compose.yml
Below is the error and the job file that I wrote:

job "airflow" {
  datacenters = [
    "dc1"]
  type        = "service"

  update {
    max_parallel      = 1
    min_healthy_time  = "10s"
    healthy_deadline  = "5m"
    progress_deadline = "10m"
    auto_revert       = false
    canary            = 0
  }
group "postgres" {
    count = 1

    restart {
      attempts = 2
      interval = "10m"
      delay    = "15s"
      mode     = "fail"
    }

    affinity {
      attribute  = "${node.unique.name}"
      value     = "nomad78"
      weight    = 60
    }

    network{
        port "postgres"{
            static = 5432
            to = 5432
        }
    }
    service {
        name = "postgres"
        port = "postgres"
        provider="nomad"
    }
    task "postgres" {
      driver = "docker"
        env {
POSTGRES_USER= "airflow"
POSTGRES_PASSWORD= "airflow"
POSTGRES_DB= "airflow"
        }

      config {
        ports=["postgres"]
        image = "postgres:9.6"
        network_mode="host"
        volumes=[
            "local/postgres-db-volume:/var/lib/postgresql/data"
        ]

      }
      
      resources {
        cpu    = 100
        memory = 1024
      }


    }
  }

  group "redis" {
    count = 1

    affinity {
      attribute  = "${node.unique.name}"
      value     = "nomad78"
      weight    = 60
    }
    network{
        port "redis"{
            static = 6379
            to = 6379
        }
    }
    restart {
      attempts = 2
      interval = "10m"
      delay    = "15s"
      mode     = "fail"
    }
        service {
        name = "redis"
        port = "redis"
        provider="nomad"
    }

    task "redis" {
      driver = "docker"
      config {
        image = "redis:6"
        network_mode="host"
        ports=["redis"]
      }
      resources {
        cpu    = 100
        memory = 1024
      }
    }
  }

  group "airflow-webserver" {
    count = 1


        affinity {
      attribute  = "${node.unique.name}"
      value     = "nomad78"
      weight    = 60
    }

      network{
        port "afwebserver"{
            static = 8080
            to = 8080
        }
    }
    restart {
      attempts = 2
      interval = "10m"
      delay    = "15s"
      mode     = "fail"
    }
    task "airflow-webserver" {
      driver = "docker"
      user="root"
      env = {
        AIRFLOW__CORE__EXECUTOR="CeleryExecutor"
        AIRFLOW__DATABASE__SQL_ALCHEMY_CONN= "postgresql+psycopg2://airflow:airflow@nomad78:5432/airflow"
        AIRFLOW__CORE__SQL_ALCHEMY_CONN= "postgresql+psycopg2://airflow:airflow@nomad78:5432/airflow"
        AIRFLOW__CELERY__BROKER_URL= "redis://:@nomad78:6379/0"
        AIRFLOW__CORE__LOAD_EXAMPLES="False"
        AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION= "true"
        AIRFLOW__API__AUTH_BACKEND="airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session"
    }

template{
        destination= "local/config/airflow.cfg"
        data = <<EOF
"{{key "conf/airflow/airflow.cfg"}}"
EOF
    }

       config {
        image = "apache/airflow:2.3.3"
        network_mode="host"
        volumes=[
          "local/dags:/opt/airflow/dags",
          "local/logs:/opt/airflow/logs",
          "local/plugins:/opt/airflow/plugins",
          "local/sql:/opt/airflow/sql",
          "local/config/airflow.cfg:/opt/airflow/airflow.cfg"
        ]
        // args=["db","init"]
        args=["webserver"]
        ports=["afwebserver"]
  
      }
      resources {
        cpu    = 100
        memory = 512
      }


    }
  }

    group "airflow-scheduler" {
    count = 1
    

        affinity {
      attribute  = "${node.unique.name}"
      value     = "nomad78"
      weight    = 60
    }

    restart {
      attempts = 2
      interval = "10m"
      delay    = "15s"
      mode     = "fail"
    }
    task "airflow-scheduler" {
      driver = "docker"
      user="root"
      env = {
        AIRFLOW__CORE__EXECUTOR="CeleryExecutor"
        AIRFLOW__DATABASE__SQL_ALCHEMY_CONN= "postgresql+psycopg2://airflow:airflow@nomad78:5432/airflow"
        AIRFLOW__CORE__SQL_ALCHEMY_CONN= "postgresql+psycopg2://airflow:airflow@nomad78:5432/airflow"
        AIRFLOW__CELERY__BROKER_URL= "redis://:@nomad78:6379/0"
        AIRFLOW__CORE__LOAD_EXAMPLES="False"
        AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION= "true"
        AIRFLOW__API__AUTH_BACKEND="airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session"
    }

template{
        destination= "local/config/airflow.cfg"
        data = <<EOF
"{{key "conf/airflow/airflow.cfg"}}"
EOF
    }

       config {
        image = "apache/airflow:2.3.3"
        network_mode="host"
        volumes=[
          "local/dags:/opt/airflow/dags",
          "local/logs:/opt/airflow/logs",
          "local/plugins:/opt/airflow/plugins",
          "local/sql:/opt/airflow/sql",
          "local/config/airflow.cfg:/opt/airflow/airflow.cfg"
        ]

        args=["scheduler"]
        
      }
      resources {
        cpu    = 100
        memory = 512
      }
    }
  }

      group "airflow-worker" {
        count = 1

        affinity {
      attribute  = "${node.unique.name}"
      value     = "nomad78"
      weight    = 60
    }

    restart {
      attempts = 2
      interval = "10m"
      delay    = "15s"
      mode     = "fail"
    }
    task "airflow-worker" {
      driver = "docker"
      user="root"
      env = {
        AIRFLOW__CORE__EXECUTOR="CeleryExecutor"
        AIRFLOW__DATABASE__SQL_ALCHEMY_CONN= "postgresql+psycopg2://airflow:airflow@nomad78:5432/airflow"
        AIRFLOW__CORE__SQL_ALCHEMY_CONN= "postgresql+psycopg2://airflow:airflow@nomad78:5432/airflow"
        AIRFLOW__CELERY__BROKER_URL= "redis://:@nomad78:6379/0"
        AIRFLOW__CORE__LOAD_EXAMPLES="False"
        AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION= "true"
        AIRFLOW__API__AUTH_BACKEND="airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session"
    }

template{
        destination= "local/config/airflow.cfg"
        data = <<EOF
"{{key "conf/airflow/airflow.cfg"}}"
EOF
    }

       config {
        image = "apache/airflow:2.3.3"
        network_mode="host"
        volumes=[
          "local/dags:/opt/airflow/dags",
          "local/logs:/opt/airflow/logs",
          "local/plugins:/opt/airflow/plugins",
          "local/sql:/opt/airflow/sql",
          "local/config/airflow.cfg:/opt/airflow/airflow.cfg"
        ]
        args=["celery","worker"]
        
      }
      resources {
        cpu    = 100
        memory = 512
      }
    }
  }

        group "airflow-triggerer" {
    count = 1


        affinity {
      attribute  = "${node.unique.name}"
      value     = "nomad78"
      weight    = 60
    }

    restart {
      attempts = 2
      interval = "10m"
      delay    = "15s"
      mode     = "fail"
    }
    task "airflow-triggerer" {
      driver = "docker"
      user="root"
      env = {
        AIRFLOW__CORE__EXECUTOR="CeleryExecutor"
        AIRFLOW__DATABASE__SQL_ALCHEMY_CONN= "postgresql+psycopg2://airflow:airflow@nomad78:5432/airflow"
        AIRFLOW__CORE__SQL_ALCHEMY_CONN= "postgresql+psycopg2://airflow:airflow@nomad78:5432/airflow"
        AIRFLOW__CELERY__BROKER_URL= "redis://:@nomad78:6379/0"
        AIRFLOW__CORE__LOAD_EXAMPLES="False"
        AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION= "true"
        AIRFLOW__API__AUTH_BACKEND="airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session"
    }

template{
        destination= "local/config/airflow.cfg"
        data = <<EOF
"{{key "conf/airflow/airflow.cfg"}}"
EOF
    }


       config {
        image = "apache/airflow:2.3.3"
        network_mode="host"
        volumes=[
          "local/dags:/opt/airflow/dags",
          "local/logs:/opt/airflow/logs",
          "local/plugins:/opt/airflow/plugins",
          "local/sql:/opt/airflow/sql",
          "local/config/airflow.cfg:/opt/airflow/airflow.cfg"
        ]
        args=["triggerer"]
  
      }
      resources {
        cpu    = 100
        memory = 512
      }
    }
  }

    group "airflow-init" {
    count = 1
 

        affinity {
      attribute  = "${node.unique.name}"
      value     = "nomad78"
      weight    = 60
    }

    restart {
      attempts = 2
      interval = "10m"
      delay    = "15s"
      mode     = "fail"
    }
    task "airflow-init" {
      driver = "docker"
        user="0:0"
        env{
        AIRFLOW__CORE__EXECUTOR="CeleryExecutor"
        AIRFLOW__DATABASE__SQL_ALCHEMY_CONN= "postgresql+psycopg2://airflow:airflow@nomad78:5432/airflow"
        AIRFLOW__CORE__SQL_ALCHEMY_CONN= "postgresql+psycopg2://airflow:airflow@nomad78:5432/airflow"
        AIRFLOW__CELERY__BROKER_URL= "redis://:@nomad78:6379/0"
        AIRFLOW__CORE__LOAD_EXAMPLES="False"
        AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION= "true"
        AIRFLOW__API__AUTH_BACKEND="airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session"
        AIRFLOW_DB_UPGRADE= "true"
        AIRFLOW_WWW_USER_CREATE= "true"
        AIRFLOW_WWW_USER_USERNAME= "airflow"
        AIRFLOW_WWW_USER_PASSWORD= "airflow"
    }

       config {
        image = "apache/airflow:2.3.3"
        network_mode="host"
        volumes=[
          "local/airflowinit:/sources"
        ]
        command = "/bin/bash"
        
        args=["-c","|","mkdir","./dags","./logs","./plugins",".sql"]
  
      }
      resources {
        cpu    = 100
        memory = 512
      }
    }
  } 
}

Airflow-init
Exit Code: 2, Exit Message: “Docker container exited with non-zero exit code: 2”

The rest of task error
Exit Code: 1, Exit Message: “Docker container exited with non-zero exit code: 1”