04 - Terraform
Continuação de 01 - Configurando exportação de dados automática do Cloudsql
Perfeito, vamos montar tudo bem organizado.
Vou usar nomes genéricos e você só troca pelos seus valores reais.
🌲 Estrutura de pastas sugerida
infra-backup-sql/
├── function/
│ └── main.go
├── main.tf
├── variables.tf
├── outputs.tf
├── deploy.sh
└── security_check.sh
1️⃣ Terraform completo
variables.tf
variable "project_id" {
description = "ID do projeto GCP"
type = string
}
variable "region" {
description = "Região principal (ex: us-central1)"
type = string
default = "us-central1"
}
variable "bucket_location" {
description = "Local do bucket (ex: US, EU)"
type = string
default = "US"
}
variable "backup_bucket_name" {
description = "Nome do bucket para os dumps"
type = string
default = "backups-mysql-producao"
}
variable "backup_retention_days" {
description = "Dias para manter os dumps no bucket"
type = number
default = 60
}
variable "cloudsql_instance_id" {
description = "ID da instância Cloud SQL (apenas o nome, não o caminho completo)"
type = string
default = "db-mysql-producao-v8"
}
variable "kms_key_ring" {
description = "Nome do key ring KMS"
type = string
default = "my-keyring"
}
variable "kms_crypto_key" {
description = "Nome da crypto key KMS"
type = string
default = "mysql-backup-key"
}
variable "service_account_name" {
description = "Nome da service account para backups"
type = string
default = "cloudsql-backup"
}
variable "scheduler_cron" {
description = "Expressão CRON (ex: 0 2 * * * = 02h todos os dias)"
type = string
default = "0 2 * * *"
}
main.tf
terraform {
required_version = ">= 1.6.0"
required_providers {
google = {
source = "hashicorp/google"
version = "~> 5.0"
}
}
}
provider "google" {
project = var.project_id
region = var.region
}
# -------------------------------
# KMS: KeyRing + CryptoKey (CMEK)
# -------------------------------
resource "google_kms_key_ring" "backups" {
name = var.kms_key_ring
location = var.bucket_location
}
resource "google_kms_crypto_key" "mysql_backups" {
name = var.kms_crypto_key
key_ring = google_kms_key_ring.backups.id
rotation_period = "7776000s" # 90 dias
lifecycle {
prevent_destroy = true
}
}
# -------------------------------
# Bucket de backups com CMEK + Lifecycle
# -------------------------------
resource "google_storage_bucket" "mysql_backups" {
name = var.backup_bucket_name
location = var.bucket_location
force_destroy = false
uniform_bucket_level_access = true
versioning {
enabled = false
}
lifecycle_rule {
action {
type = "Delete"
}
condition {
age = var.backup_retention_days
}
}
encryption {
default_kms_key_name = google_kms_crypto_key.mysql_backups.id
}
}
# Bucket para armazenar o código da Cloud Function
resource "google_storage_bucket" "cf_source" {
name = "${var.project_id}-cf-source"
location = var.bucket_location
uniform_bucket_level_access = true
}
# Objeto ZIP com o código da Function
resource "google_storage_bucket_object" "function_zip" {
name = "sql-export.zip"
bucket = google_storage_bucket.cf_source.name
source = "${path.module}/function/sql-export.zip"
}
# -------------------------------
# Service Account
# -------------------------------
resource "google_service_account" "cloudsql_backup" {
account_id = var.service_account_name
display_name = "Cloud SQL Automated Backup Service Account"
}
# Permissões de projeto: Cloud SQL Admin + Storage Admin
resource "google_project_iam_member" "sa_cloudsql_admin" {
project = var.project_id
role = "roles/cloudsql.admin"
member = "serviceAccount:${google_service_account.cloudsql_backup.email}"
}
resource "google_project_iam_member" "sa_storage_admin" {
project = var.project_id
role = "roles/storage.admin"
member = "serviceAccount:${google_service_account.cloudsql_backup.email}"
}
# Permissão para usar a chave KMS (CMEK)
resource "google_kms_crypto_key_iam_member" "sa_kms_usage" {
crypto_key_id = google_kms_crypto_key.mysql_backups.id
role = "roles/cloudkms.cryptoKeyEncrypterDecrypter"
member = "serviceAccount:${google_service_account.cloudsql_backup.email}"
}
# -------------------------------
# Cloud Function (Gen 1 – HTTP)
# -------------------------------
resource "google_cloudfunctions_function" "sql_export" {
name = "sql-export"
description = "Exporta o MySQL do Cloud SQL para GCS diariamente"
runtime = "go122"
available_memory_mb = 256
source_archive_bucket = google_storage_bucket.cf_source.name
source_archive_object = google_storage_bucket_object.function_zip.name
trigger_http = true
entry_point = "ExportBackup"
service_account_email = google_service_account.cloudsql_backup.email
environment_variables = {
PROJECT_ID = var.project_id
INSTANCE_ID = var.cloudsql_instance_id
BACKUP_BUCKET = google_storage_bucket.mysql_backups.name
BACKUP_PREFIX = "daily"
KMS_KEY_NAME = google_kms_crypto_key.mysql_backups.id
}
}
# Permitir SOMENTE a Service Account chamar a Function
resource "google_cloudfunctions_function_iam_member" "invoker_sa" {
project = var.project_id
region = var.region
cloud_function = google_cloudfunctions_function.sql_export.name
role = "roles/cloudfunctions.invoker"
member = "serviceAccount:${google_service_account.cloudsql_backup.email}"
}
# -------------------------------
# Cloud Scheduler Job
# -------------------------------
resource "google_cloud_scheduler_job" "sql_export_daily" {
name = "sql-export-daily"
description = "Agenda diária para export SQL → GCS"
schedule = var.scheduler_cron
time_zone = "America/Sao_Paulo"
http_target {
uri = google_cloudfunctions_function.sql_export.https_trigger_url
http_method = "GET"
oidc_token {
service_account_email = google_service_account.cloudsql_backup.email
}
}
}
outputs.tf
output "backup_bucket_name" {
value = google_storage_bucket.mysql_backups.name
description = "Bucket onde os dumps são salvos"
}
output "cloud_function_url" {
value = google_cloudfunctions_function.sql_export.https_trigger_url
description = "URL HTTP da Cloud Function (protegida por IAM)"
}
output "service_account_email" {
value = google_service_account.cloudsql_backup.email
description = "Service Account usada pela Function/Scheduler"
}
output "kms_key_name" {
value = google_kms_crypto_key.mysql_backups.id
description = "Nome completo da KMS CryptoKey (CMEK)"
}
2️⃣ Código da Cloud Function (Go)
function/main.go
package backup
import (
"context"
"fmt"
"log"
"net/http"
"os"
"time"
"google.golang.org/api/cloudsqladmin/v1beta4"
)
func ExportBackup(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
project := os.Getenv("PROJECT_ID")
instance := os.Getenv("INSTANCE_ID")
bucket := os.Getenv("BACKUP_BUCKET") // apenas o nome, sem gs://
prefix := os.Getenv("BACKUP_PREFIX")
if project == "" || instance == "" || bucket == "" {
http.Error(w, "Variáveis de ambiente não configuradas", http.StatusInternalServerError)
return
}
now := time.Now().Format("2006-01-02-150405")
objectPath := fmt.Sprintf("%s/backup-%s.sql.gz", prefix, now)
uri := fmt.Sprintf("gs://%s/%s", bucket, objectPath)
sqlService, err := cloudsqladmin.NewService(ctx)
if err != nil {
log.Printf("Erro ao criar serviço Cloud SQL Admin: %v", err)
http.Error(w, "Erro interno", http.StatusInternalServerError)
return
}
req := &cloudsqladmin.SqlInstancesExportRequest{
ExportContext: &cloudsqladmin.ExportContext{
Kind: "sql#exportContext",
FileType: "SQL",
Uri: uri,
SqlExportOptions: &cloudsqladmin.ExportContextSqlExportOptions{
Compression: "GZIP",
},
},
}
op, err := sqlService.Instances.Export(project, instance, req).Context(ctx).Do()
if err != nil {
log.Printf("Erro ao iniciar exportação: %v", err)
http.Error(w, "Erro ao iniciar exportação", http.StatusInternalServerError)
return
}
log.Printf("Exportação iniciada para %s (operação: %s)", uri, op.Name)
fmt.Fprintf(w, "Exportação iniciada para %s. Operação: %s\n", uri, op.Name)
}
Observação: você precisa gerar o
sql-export.zipcom o código da função antes doterraform apply(o script abaixo faz isso).
3️⃣ Script shell de deploy automático
deploy.sh
#!/usr/bin/env bash
set -euo pipefail
# Configurações básicas
PROJECT_ID="${PROJECT_ID:-SEU_PROJETO}"
REGION="${REGION:-us-central1}"
echo "Usando projeto: $PROJECT_ID"
echo "Região: $REGION"
# 1. Configurar projeto padrão
gcloud config set project "$PROJECT_ID"
# 2. Empacotar a Cloud Function
echo "Gerando ZIP da Cloud Function..."
pushd function >/dev/null
go mod init sql-export-function 2>/dev/null || true
go mod tidy
zip -r ../function/sql-export.zip . >/dev/null
popd >/dev/null
# 3. Terraform
echo "Rodando terraform init..."
terraform init
echo "Rodando terraform apply..."
terraform apply -auto-confirm -var "project_id=${PROJECT_ID}" -var "region=${REGION}"
echo "Deploy concluído!"
Você pode exportar
PROJECT_IDeREGIONantes de rodar:
export PROJECT_ID=meu-projeto
./deploy.sh
4️⃣ Script de validação de segurança + auditoria IAM
security_check.sh
#!/usr/bin/env bash
set -euo pipefail
PROJECT_ID="${PROJECT_ID:-SEU_PROJETO}"
REGION="${REGION:-us-central1}"
BUCKET="${BUCKET:-backups-mysql-producao}"
KMS_KEY_RING="${KMS_KEY_RING:-my-keyring}"
KMS_KEY_NAME="${KMS_KEY_NAME:-mysql-backup-key}"
SA_NAME="${SA_NAME:-cloudsql-backup}"
SA_EMAIL="${SA_NAME}@${PROJECT_ID}.iam.gserviceaccount.com"
KMS_PATH="projects/${PROJECT_ID}/locations/US/keyRings/${KMS_KEY_RING}/cryptoKeys/${KMS_KEY_NAME}"
echo "=== Verificando configurações de segurança ==="
echo "Projeto: ${PROJECT_ID}"
echo "Bucket: gs://${BUCKET}"
echo "SA: ${SA_EMAIL}"
echo "KMS: ${KMS_PATH}"
echo
echo "1) Políticas IAM do projeto para a Service Account:"
gcloud projects get-iam-policy "$PROJECT_ID" \
--flatten="bindings[].members" \
--format="table(bindings.role, bindings.members)" \
--filter="bindings.members:serviceAccount:${SA_EMAIL}"
echo
echo "2) IAM da CryptoKey (KMS):"
gcloud kms keys get-iam-policy "${KMS_KEY_NAME}" \
--keyring="${KMS_KEY_RING}" \
--location="US" \
--project="${PROJECT_ID}"
echo
echo "3) Detalhes do bucket de backup:"
gcloud storage buckets describe "gs://${BUCKET}" \
--format="yaml(encryption,iamConfiguration,location)"
echo
echo "4) IAM da Cloud Function:"
CF_NAME="sql-export"
gcloud functions get-iam-policy "${CF_NAME}" \
--region="${REGION}" \
--project="${PROJECT_ID}"
echo
echo "5) Testando chamada ANÔNIMA (deve falhar 403):"
CF_URL=$(gcloud functions describe "${CF_NAME}" --region="${REGION}" --format="value(httpsTrigger.url)")
echo "URL da Function: ${CF_URL}"
echo "curl (sem autenticação):"
set +e
curl -i "${CF_URL}"
set -e
echo
echo "Se a chamada anônima retornou 403, a função NÃO está pública. ✅"
5️⃣ Diagrama completo em Mermaid
flowchart LR subgraph GCP["Google Cloud"] direction LR subgraph SCHED["Cloud Scheduler"] JOB["Job CRON diário"] end subgraph CF["Cloud Function (sql-export)"] FUNC["ExportBackup (Go)"] end subgraph SQL["Cloud SQL - MySQL"] DB["Instância db-mysql-producao-v8"] end subgraph STORAGE["Cloud Storage"] BUCKET["Bucket gs://backups-mysql-producao\nCMEK + Lifecycle"] end subgraph KMS["Cloud KMS"] KEY["CryptoKey mysql-backup-key\nKeyRing my-keyring"] end end JOB -->|HTTP + OIDC SA cloudsql-backup| FUNC FUNC -->|Chama Cloud SQL Admin API SqlInstances.Export| DB DB -->|dump .sql.gz| BUCKET BUCKET -->|Criptografia padrão| KEY