TerraformInfrastructure as CodeDevOpsCloudAutomation
Terraform Infrastructure as Code 2026: Cloud Automation Mastery
Miraclin Technologies•

Terraform Infrastructure as Code 2026: Cloud Automation Mastery
Terraform is the leading Infrastructure as Code (IaC) tool, enabling teams to provision and manage cloud infrastructure through declarative configuration files. This guide covers advanced Terraform concepts, best practices, and strategies for building scalable, maintainable infrastructure.
Terraform Fundamentals
Basic Configuration
# main.tf
terraform {
required_version = ">= 1.0"
required_providers {
aws = {
source = "hashicorp/aws"
version = "~> 5.0"
}
}
}
provider "aws" {
region = var.aws_region
}
# variables.tf
variable "aws_region" {
description = "AWS region"
type = string
default = "us-west-2"
}
variable "environment" {
description = "Environment name"
type = string
validation {
condition = contains(["dev", "staging", "prod"], var.environment)
error_message = "Environment must be dev, staging, or prod."
}
}
# outputs.tf
output "vpc_id" {
description = "ID of the VPC"
value = aws_vpc.main.id
}
output "public_subnet_ids" {
description = "IDs of the public subnets"
value = aws_subnet.public[*].id
}
Resource Management
# VPC Configuration
resource "aws_vpc" "main" {
cidr_block = "10.0.0.0/16"
enable_dns_hostnames = true
enable_dns_support = true
tags = {
Name = "${var.environment}-vpc"
Environment = var.environment
}
}
# Internet Gateway
resource "aws_internet_gateway" "main" {
vpc_id = aws_vpc.main.id
tags = {
Name = "${var.environment}-igw"
}
}
# Public Subnets
resource "aws_subnet" "public" {
count = length(var.availability_zones)
vpc_id = aws_vpc.main.id
cidr_block = "10.0.${count.index + 1}.0/24"
availability_zone = var.availability_zones[count.index]
map_public_ip_on_launch = true
tags = {
Name = "${var.environment}-public-${count.index + 1}"
Type = "public"
}
}
# Route Table
resource "aws_route_table" "public" {
vpc_id = aws_vpc.main.id
route {
cidr_block = "0.0.0.0/0"
gateway_id = aws_internet_gateway.main.id
}
tags = {
Name = "${var.environment}-public-rt"
}
}
# Route Table Association
resource "aws_route_table_association" "public" {
count = length(aws_subnet.public)
subnet_id = aws_subnet.public[count.index].id
route_table_id = aws_route_table.public.id
}
Advanced Terraform Features
Modules
# modules/vpc/main.tf
variable "environment" {
description = "Environment name"
type = string
}
variable "vpc_cidr" {
description = "CIDR block for VPC"
type = string
default = "10.0.0.0/16"
}
variable "availability_zones" {
description = "Availability zones"
type = list(string)
}
resource "aws_vpc" "this" {
cidr_block = var.vpc_cidr
enable_dns_hostnames = true
enable_dns_support = true
tags = {
Name = "${var.environment}-vpc"
Environment = var.environment
}
}
# modules/vpc/outputs.tf
output "vpc_id" {
description = "ID of the VPC"
value = aws_vpc.this.id
}
output "vpc_cidr_block" {
description = "CIDR block of the VPC"
value = aws_vpc.this.cidr_block
}
# Using the module
module "vpc" {
source = "./modules/vpc"
environment = var.environment
vpc_cidr = "10.0.0.0/16"
availability_zones = ["us-west-2a", "us-west-2b", "us-west-2c"]
}
Data Sources and Locals
# Data sources
data "aws_ami" "amazon_linux" {
most_recent = true
owners = ["amazon"]
filter {
name = "name"
values = ["amzn2-ami-hvm-*-x86_64-gp2"]
}
}
data "aws_availability_zones" "available" {
state = "available"
}
# Local values
locals {
common_tags = {
Environment = var.environment
Project = "myapp"
ManagedBy = "terraform"
}
availability_zones = slice(data.aws_availability_zones.available.names, 0, 3)
subnet_cidrs = [
for i in range(length(local.availability_zones)) :
cidrsubnet(var.vpc_cidr, 8, i + 1)
]
}
# Using locals
resource "aws_instance" "web" {
ami = data.aws_ami.amazon_linux.id
instance_type = "t3.micro"
tags = merge(local.common_tags, {
Name = "${var.environment}-web-server"
})
}
Dynamic Blocks
# Security Group with dynamic rules
resource "aws_security_group" "web" {
name_prefix = "${var.environment}-web-"
vpc_id = module.vpc.vpc_id
dynamic "ingress" {
for_each = var.ingress_rules
content {
from_port = ingress.value.from_port
to_port = ingress.value.to_port
protocol = ingress.value.protocol
cidr_blocks = ingress.value.cidr_blocks
}
}
dynamic "egress" {
for_each = var.egress_rules
content {
from_port = egress.value.from_port
to_port = egress.value.to_port
protocol = egress.value.protocol
cidr_blocks = egress.value.cidr_blocks
}
}
tags = local.common_tags
}
# Variable definition
variable "ingress_rules" {
description = "List of ingress rules"
type = list(object({
from_port = number
to_port = number
protocol = string
cidr_blocks = list(string)
}))
default = [
{
from_port = 80
to_port = 80
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
},
{
from_port = 443
to_port = 443
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
]
}
State Management
Remote State Configuration
# backend.tf
terraform {
backend "s3" {
bucket = "mycompany-terraform-state"
key = "environments/prod/terraform.tfstate"
region = "us-west-2"
encrypt = true
dynamodb_table = "terraform-state-lock"
}
}
# Create S3 bucket for state
resource "aws_s3_bucket" "terraform_state" {
bucket = "mycompany-terraform-state"
lifecycle {
prevent_destroy = true
}
}
resource "aws_s3_bucket_versioning" "terraform_state" {
bucket = aws_s3_bucket.terraform_state.id
versioning_configuration {
status = "Enabled"
}
}
resource "aws_s3_bucket_server_side_encryption_configuration" "terraform_state" {
bucket = aws_s3_bucket.terraform_state.id
rule {
apply_server_side_encryption_by_default {
sse_algorithm = "AES256"
}
}
}
# DynamoDB table for state locking
resource "aws_dynamodb_table" "terraform_state_lock" {
name = "terraform-state-lock"
billing_mode = "PAY_PER_REQUEST"
hash_key = "LockID"
attribute {
name = "LockID"
type = "S"
}
}
State Import and Migration
# Import existing resources
terraform import aws_instance.web i-1234567890abcdef0
# State manipulation
terraform state list
terraform state show aws_instance.web
terraform state mv aws_instance.web aws_instance.web_server
terraform state rm aws_instance.old
# Refresh state
terraform refresh
# State backup
terraform state pull > terraform.tfstate.backup
Multi-Environment Management
Workspace Strategy
# Create workspaces
terraform workspace new dev
terraform workspace new staging
terraform workspace new prod
# Switch workspaces
terraform workspace select dev
terraform workspace list
# Environment-specific variables
terraform apply -var-file="environments/dev.tfvars"
Directory Structure
terraform/
├── environments/
│ ├── dev/
│ │ ├── main.tf
│ │ ├── variables.tf
│ │ └── terraform.tfvars
│ ├── staging/
│ │ ├── main.tf
│ │ ├── variables.tf
│ │ └── terraform.tfvars
│ └── prod/
│ ├── main.tf
│ ├── variables.tf
│ └── terraform.tfvars
├── modules/
│ ├── vpc/
│ ├── ec2/
│ └── rds/
└── shared/
├── backend.tf
└── providers.tf
Environment Configuration
# environments/dev/terraform.tfvars
environment = "dev"
instance_type = "t3.micro"
min_size = 1
max_size = 2
desired_capacity = 1
# environments/prod/terraform.tfvars
environment = "prod"
instance_type = "t3.large"
min_size = 3
max_size = 10
desired_capacity = 3
Advanced Patterns
For Each and Count
# Using for_each with maps
variable "users" {
description = "Map of users"
type = map(object({
role = string
tags = map(string)
}))
default = {
"alice" = {
role = "admin"
tags = { department = "engineering" }
}
"bob" = {
role = "developer"
tags = { department = "engineering" }
}
}
}
resource "aws_iam_user" "users" {
for_each = var.users
name = each.key
path = "/"
tags = merge(each.value.tags, {
Role = each.value.role
})
}
# Using count with conditional logic
resource "aws_instance" "web" {
count = var.create_instances ? var.instance_count : 0
ami = data.aws_ami.amazon_linux.id
instance_type = var.instance_type
tags = {
Name = "${var.environment}-web-${count.index + 1}"
}
}
Custom Validation
variable "instance_type" {
description = "EC2 instance type"
type = string
validation {
condition = can(regex("^t[2-4]\\.", var.instance_type))
error_message = "Instance type must be a t2, t3, or t4 instance."
}
}
variable "environment" {
description = "Environment name"
type = string
validation {
condition = contains(["dev", "staging", "prod"], var.environment)
error_message = "Environment must be one of: dev, staging, prod."
}
}
Provisioners and Null Resources
# File provisioner
resource "aws_instance" "web" {
ami = data.aws_ami.amazon_linux.id
instance_type = "t3.micro"
key_name = aws_key_pair.deployer.key_name
provisioner "file" {
source = "scripts/setup.sh"
destination = "/tmp/setup.sh"
connection {
type = "ssh"
user = "ec2-user"
private_key = file("~/.ssh/id_rsa")
host = self.public_ip
}
}
provisioner "remote-exec" {
inline = [
"chmod +x /tmp/setup.sh",
"/tmp/setup.sh"
]
connection {
type = "ssh"
user = "ec2-user"
private_key = file("~/.ssh/id_rsa")
host = self.public_ip
}
}
}
# Null resource for external actions
resource "null_resource" "database_setup" {
depends_on = [aws_db_instance.main]
provisioner "local-exec" {
command = "python scripts/setup_database.py ${aws_db_instance.main.endpoint}"
}
triggers = {
db_instance_id = aws_db_instance.main.id
}
}
Testing and Validation
Terraform Validate and Plan
# Validate configuration
terraform validate
# Format code
terraform fmt -recursive
# Plan with detailed output
terraform plan -detailed-exitcode -out=tfplan
# Apply specific plan
terraform apply tfplan
# Destroy with auto-approve
terraform destroy -auto-approve
Testing with Terratest
// test/terraform_test.go
package test
import (
"testing"
"github.com/gruntwork-io/terratest/modules/terraform"
"github.com/stretchr/testify/assert"
)
func TestTerraformVPC(t *testing.T) {
terraformOptions := &terraform.Options{
TerraformDir: "../",
Vars: map[string]interface{}{
"environment": "test",
"vpc_cidr": "10.0.0.0/16",
},
}
defer terraform.Destroy(t, terraformOptions)
terraform.InitAndApply(t, terraformOptions)
vpcId := terraform.Output(t, terraformOptions, "vpc_id")
assert.NotEmpty(t, vpcId)
}
CI/CD Integration
GitHub Actions
# .github/workflows/terraform.yml
name: Terraform
on:
push:
branches: [main]
pull_request:
branches: [main]
jobs:
terraform:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup Terraform
uses: hashicorp/setup-terraform@v2
with:
terraform_version: 1.6.0
- name: Terraform Init
run: terraform init
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: Terraform Validate
run: terraform validate
- name: Terraform Plan
run: terraform plan -no-color
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: Terraform Apply
if: github.ref == 'refs/heads/main'
run: terraform apply -auto-approve
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
GitLab CI
# .gitlab-ci.yml
stages:
- validate
- plan
- apply
variables:
TF_ROOT: ${CI_PROJECT_DIR}
TF_ADDRESS: ${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/terraform/state/production
cache:
key: production
paths:
- ${TF_ROOT}/.terraform
before_script:
- cd ${TF_ROOT}
- terraform --version
- terraform init
validate:
stage: validate
script:
- terraform validate
- terraform fmt -check
plan:
stage: plan
script:
- terraform plan -out="planfile"
artifacts:
paths:
- planfile
apply:
stage: apply
script:
- terraform apply -input=false "planfile"
dependencies:
- plan
only:
- main
when: manual
Security Best Practices
Sensitive Data Management
# Using AWS Secrets Manager
data "aws_secretsmanager_secret_version" "db_password" {
secret_id = "prod/db/password"
}
resource "aws_db_instance" "main" {
allocated_storage = 20
storage_type = "gp2"
engine = "mysql"
engine_version = "8.0"
instance_class = "db.t3.micro"
db_name = "myapp"
username = "admin"
password = jsondecode(data.aws_secretsmanager_secret_version.db_password.secret_string)["password"]
skip_final_snapshot = true
tags = local.common_tags
}
# Mark sensitive outputs
output "db_password" {
description = "Database password"
value = aws_db_instance.main.password
sensitive = true
}
IAM Policies
# Least privilege IAM policy
data "aws_iam_policy_document" "ec2_assume_role" {
statement {
actions = ["sts:AssumeRole"]
principals {
type = "Service"
identifiers = ["ec2.amazonaws.com"]
}
}
}
resource "aws_iam_role" "ec2_role" {
name = "${var.environment}-ec2-role"
assume_role_policy = data.aws_iam_policy_document.ec2_assume_role.json
}
data "aws_iam_policy_document" "ec2_policy" {
statement {
effect = "Allow"
actions = [
"s3:GetObject",
"s3:PutObject"
]
resources = [
"${aws_s3_bucket.app_bucket.arn}/*"
]
}
}
resource "aws_iam_policy" "ec2_policy" {
name = "${var.environment}-ec2-policy"
policy = data.aws_iam_policy_document.ec2_policy.json
}
Multi-Cloud Deployment
AWS and Azure
# providers.tf
terraform {
required_providers {
aws = {
source = "hashicorp/aws"
version = "~> 5.0"
}
azurerm = {
source = "hashicorp/azurerm"
version = "~> 3.0"
}
}
}
provider "aws" {
region = var.aws_region
}
provider "azurerm" {
features {}
}
# AWS resources
resource "aws_vpc" "main" {
cidr_block = "10.0.0.0/16"
tags = {
Name = "${var.environment}-aws-vpc"
}
}
# Azure resources
resource "azurerm_resource_group" "main" {
name = "${var.environment}-rg"
location = var.azure_location
}
resource "azurerm_virtual_network" "main" {
name = "${var.environment}-vnet"
address_space = ["10.1.0.0/16"]
location = azurerm_resource_group.main.location
resource_group_name = azurerm_resource_group.main.name
}
Career Opportunities
High-Demand Roles
- DevOps Engineer: $100,000 - $140,000
- Infrastructure Engineer: $95,000 - $135,000
- Cloud Architect: $130,000 - $170,000
- Site Reliability Engineer: $120,000 - $160,000
Essential Skills
- Infrastructure as Code principles
- Multi-cloud platform knowledge
- State management and workflows
- Module development and reuse
- Security and compliance
- CI/CD integration
- Testing and validation
- Troubleshooting and optimization
Conclusion
Terraform is essential for modern infrastructure management, enabling teams to provision, manage, and scale cloud resources efficiently. Master these advanced concepts, patterns, and best practices to build robust, maintainable infrastructure as code. The growing adoption of cloud technologies creates excellent opportunities for Terraform specialists in DevOps and cloud engineering roles.