Posté par Dino, mise à jour le 09/05/2026 à 09:09:36
Dans un monde DevOps idéal, chaque ressource cloud provient de Terraform ou Bicep. Dans la vraie vie, nous traitons avec des « ClickOps ». Un ingénieur ajuste manuellement un Network Security Group (NSG) pour réparer une panne de production, ou un groupe de ressources hérité existe sans aucune définition de code.
import os
import subprocess
import shutil
import datetime
from azure.identity import DefaultAzureCredential
from azure.storage.blob import BlobServiceClient
# Configuration
SUBSCRIPTION_ID = os.getenv("AZURE_SUBSCRIPTION_ID")
TARGET_RG = "mission-critical-rg"
BACKUP_CONTAINER = "infra-backups"
STORAGE_ACCOUNT_URL = "https://mybackupvault.blob.core.windows.net"
def run_export(resource_group):
"""
Runs aztfexport to reverse-engineer the Azure Resource Group
into Terraform configuration files.
"""
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
output_dir = f"./exports/{resource_group}/{timestamp}"
print(f" Starting export for {resource_group}...")
# Construct the command
# --non-interactive: Auto-accepts defaults
# --hcl-only: Generates .tf files (you can omit this to get state file as well)
cmd = [
"aztfexport",
"resource-group",
resource_group,
"--non-interactive",
"--output-dir", output_dir
]
try:
# Run the export tool
result = subprocess.run(
cmd,
check=True,
capture_output=True,
text=True
)
print(f" Export successful for {resource_group}")
return output_dir
except subprocess.CalledProcessError as e:
print(f" Export failed: {e.stderr}")
return None
def archive_and_upload(source_dir, resource_group):
"""
Compresses the Terraform files and uploads to Azure Blob Storage
"""
# 1. Create Zip Archive
zip_name = f"{resource_group}-backup"
shutil.make_archive(zip_name, 'zip', source_dir)
full_zip_path = f"{zip_name}.zip"
# 2. Upload to Blob Storage
print(f" Uploading {full_zip_path} to immutable storage...")
credential = DefaultAzureCredential()
blob_service_client = BlobServiceClient(account_url=STORAGE_ACCOUNT_URL, credential=credential)
container_client = blob_service_client.get_container_client(container=BACKUP_CONTAINER)
with open(full_zip_path, "rb") as data:
container_client.upload_blob(name=full_zip_path, data=data, overwrite=True)
print(f" Backup secured: {full_zip_path}")
# Cleanup local files
os.remove(full_zip_path)
shutil.rmtree(source_dir)
if __name__ == "__main__":
# Orchestration Logic
export_path = run_export(TARGET_RG)
if export_path:
archive_and_upload(export_path, TARGET_RG)