Copy the updated shell script to EC2 without recreating it every time

I am trying to copy the shell script to the running ec2 every time the shell scripts get updated.

The main requirement is I do not want the running EC2 to be stopped recreated or rebuilt, but Terraform should just copy the updated shell script from local to inside the running EC2 instance.

Below is my code using null_resource and triggers based on the script file’s sha256 changes. But it destroys the EC2 and Elastic IP and recreates fresh ones every time when I try to run terraform apply.

Kindly help.

resource "aws_instance" "my-ec2" {
  ami           = var.ami
  instance_type = "t3.medium"
  root_block_device {
    volume_size = 20
  }
  key_name                    = aws_key_pair.server_instance.key_name
  vpc_security_group_ids      = [aws_security_group.server_sg.id]
  subnet_id                   = aws_subnet.public_subnet_a.id
  associate_public_ip_address = false
}

resource "aws_eip" "server_eip" {
  domain = "vpc"
}

resource "aws_eip_association" "eip-association" {
  instance_id   = aws_instance.my-ec2.id
  allocation_id = aws_eip.server_eip_eip.id
}

resource "null_resource" "shell_script" {
  triggers = {
    script_sha = sha256(file("${path.module}/script.sh"))
  }
  provisioner "file" {
    source      = "./script.sh"
    destination = "/home/ubuntu/script.sh"
  }
  connection {
    type        = "ssh"
    user        = "ubuntu"
    private_key = tls_private_key.server_instance.private_key_openssh
    host        = aws_eip.server_eip.public_ip
  }
}