Failed to upload script: scp: /home/osboxes/Desktop/terraform: No such file or directory

Hi,

Below im getting error when i hit terraform apply

aws_instance.ec2_instance (remote-exec): Connecting to remote host via SSH...
aws_instance.ec2_instance (remote-exec):   Host: x.x.x.x
aws_instance.ec2_instance (remote-exec):   User: ubuntu
aws_instance.ec2_instance (remote-exec):   Password: false
aws_instance.ec2_instance (remote-exec):   Private key: true
aws_instance.ec2_instance (remote-exec):   Certificate: false
aws_instance.ec2_instance (remote-exec):   SSH Agent: false
aws_instance.ec2_instance (remote-exec):   Checking Host Key: false
aws_instance.ec2_instance (remote-exec):   Target Platform: unix
aws_instance.ec2_instance (remote-exec): Connected!
╷
│ Error: remote-exec provisioner error
│
│   with aws_instance.ec2_instance,
│   on main.tf line 138, in resource "aws_instance" "ec2_instance":
│  138:  provisioner "remote-exec" {
│
│ Failed to upload script: scp: /home/osboxes/Desktop/terraform: No such file or directory

Here is my main.tf

# configured aws provider with proper credentials
provider "aws" {
}


# create default vpc if one does not exit
resource "aws_default_vpc" "default_vpc" {

  tags = {
    Name = "default vpc"
  }
}


# use data source to get all avalablility zones in region
data "aws_availability_zones" "available_zones" {}


# create default subnet if one does not exit
resource "aws_default_subnet" "default_az1" {
  availability_zone = data.aws_availability_zones.available_zones.names[0]

  tags = {
    Name = "default subnet"
  }
}


# create security group for the ec2 instance
resource "aws_security_group" "ec2_security_group_terraform" {
  name        = "ec2 security group terraform"
  description = "allow access on ports 80 and 22"
  vpc_id      = aws_default_vpc.default_vpc.id

  ingress {
    description = "http access"
    from_port   = 80
    to_port     = 80
    protocol    = "tcp"
    cidr_blocks = ["0.0.0.0/0"]
  }

  ingress {
    description = "ssh access"
    from_port   = 22
    to_port     = 22
    protocol    = "tcp"
    cidr_blocks = ["0.0.0.0/0"]
  }

  egress {
    from_port   = 0
    to_port     = 0
    protocol    = -1
    cidr_blocks = ["0.0.0.0/0"]
  }

  tags = {
    Name = "ec2 security group terraform"
  }
}
# use data source to get a registered amazon linux 2 ami
data "aws_ami" "ubuntu" {
  most_recent = true
  #  owners      = ["amazon"]

  filter {
    name   = "name"
    values = ["ubuntu/images/hvm-ssd/ubuntu-focal-20.04-amd64-server-*"]
  }

  filter {
    name   = "virtualization-type"
    values = ["hvm*"]
  }

  owners = ["09xxxxxx77"] # Canonical
}


# launch the ec2 instance and install website
resource "aws_instance" "ec2_instance" {
  ami                    = data.aws_ami.ubuntu.id
  instance_type          = "t2.xlarge"
#  instance_count         = "2"
  subnet_id              = aws_default_subnet.default_az1.id
  vpc_security_group_ids = [aws_security_group.ec2_security_group_terraform.id]
  key_name               = "terraformkey"
  user_data              = "${file("kubectl.sh")}"
#  user_data              = file("kubectl.sh")
#  user_data              = file("${path.module}/kubectl.sh")
  tags = {
    Name = "k8s install"
  }

#  connection {
#      type     = "ssh"
#      user     = "ubuntu"
# #     private_key = file("terraformkey.pem")
# #      host = aws_instance.public_ip
#       host = "${self.public_ip}"
#  }

#  provisioner "local-exec" {
#    command = "touch kubectl-install-local"
#  }

#  provisioner "remote-exec" {
#    script = "/home/osboxes/Desktop/terraform/kubectl.sh"
#  }

#  provisioner "file" {
#    source      = "terraform.tfstate.backup"
#    destination = "/tmp/"
#  }
# }

#  provisioner "file" {
#     source      = "/home/osboxes/Desktop/terraform/kubectl.sh"
#     destination = "/tmp/kubectl.sh"
#  }

  connection {
     type     = "ssh"
     user     = "ubuntu"
     private_key = file("/home/osboxes/Desktop/terraform/terraformkey.pem")
 
provisioner "remote-exec" {
   script = "/home/osboxes/Desktop/terraform/kubectl.sh"
 }

  # provisioner "remote-exec" {
  #    inline = [
  #      "chmod +x /tmp/kubectl.sh",
  #         #  "sudo su - root 'bash test.sh' &"
  #      "/tmp/kubectl.sh args",
  #    ]
  #  }
}
# print the ec2's public ipv4 address
output "public_ipv4_address" {
  value = aws_instance.ec2_instance.public_ip
}

please tell me on how to pass user_data in tf and provisioner “remote-exec”
my shell script and pem file and main.tf are store at same pathorlocation