NodeCreationFailure: Instances failed to join the kubernetes cluster

We are trying to create the EKS cluster using eks module as below:
terraform-aws-modules/eks/aws - version 18.26.3

But somehow worker nodes are not able to joining the cluster. Getting below error

NodeCreationFailure: Instances failed to join the kubernetes cluster

I am using below config

module "eks" {
  source  = "terraform-aws-modules/eks/aws"
  version = "18.26.3"

  cluster_name    = var.cluster_name
  cluster_version = "1.22"
  subnet_ids      = ["subnet-###78b******","subnet-###ca7*********"]
  # cluster_endpoint_private_access = true # Enable private API endpoint (requests within cluster VPC use this endpoint)

  tags = merge(local.aws_tags, { Name = var.cluster_name })

  vpc_id = "vpc-0d7ff8aaa*******"
  
  cluster_addons = {
    coredns = {
      resolve_conflicts = "OVERWRITE"
    }
    kube-proxy = {}
    vpc-cni = {
      resolve_conflicts = "OVERWRITE"
    }
  }
  eks_managed_node_group_defaults = {
    # root_volume_type = "gp2"
    disk_size      = 20
  }

  # eks_managed_node_groups = local.eks_managed_node_groups_map
  eks_managed_node_groups = {
    green = {
      min_size     = 1
      max_size     = 10
      desired_size = 1

      instance_types = ["t3.large"]

      labels = {
        Environment = "test"
        GithubRepo  = "terraform-aws-eks"
        GithubOrg   = "terraform-aws-modules"
      }
    }
  }
  aws_auth_roles = [
    {
      username = "eks-admin"
      rolearn  = var.role_arn
      groups   = ["system:masters"]
    },
  ]
  # aws_auth_users    = []
  cluster_ip_family = "ipv4"
  # cluster_security_group_name = ""
  cluster_service_ipv4_cidr = "172.16.0.0/12"
  iam_role_description = "eks-admin role"
  iam_role_name = "eks-admin"
  manage_aws_auth_configmap = true
}