Compare commits

..

9 Commits

Author SHA1 Message Date
Gal Szkolnik 00280e2516 git-ignore + documentation 2023-08-01 06:51:51 +00:00
Gal Szkolnik 1e066646e4 Added traffic manager profiel - incomplete 2023-08-01 06:51:22 +00:00
Gal Szkolnik d3d0da157c Added network/lb module 2023-08-01 06:50:50 +00:00
Gal Szkolnik a1440c1be2 Initial plan - only VMs 2023-08-01 06:49:10 +00:00
Gal Szkolnik fcab4a6b95 Added hosts as modules 2023-08-01 06:47:20 +00:00
Gal Szkolnik 111b454307 Add my personal TF utility code 2023-08-01 06:46:43 +00:00
Gal Szkolnik f76a3162ce Moved TF code into _tf 2023-08-01 06:46:21 +00:00
Gal (Azure CloudShell) 148b9c3166 Added aztexport of template project 2023-07-31 05:43:41 +00:00
Gal Szkolnik e3f5954739 Initialize terraform plan with auth details 2023-07-30 13:53:38 +00:00
46 changed files with 2688 additions and 1 deletions

View File

@ -1,2 +1,5 @@
local.init.tf local.init.tf
.terraform .terraform*
*.tfstate
_logs
_.tmp.*

View File

@ -0,0 +1,20 @@
# 3rd Assignment
## Main Challanges
My main challange is the lack of experience with Azure envrionments.
However, the requiremetns were pretty straight forward, and so, I
started with reviewing the tools available to work with terrafom and
Azure.
The tool that seems to be a good fit, is the [aztexport](https://github.com/Azure/aztfexport)
utility, which exports an existing azure state into a collecton of
terraform plan files.
This helped me with understanding the particulars of how azure
objects/hirarchies/relationships work.
<https://github.com/Azure/azure-cli/issues/11749#issuecomment-570975762>
## Additons of my own

View File

@ -0,0 +1,24 @@
terraform {
required_providers {
azurerm = {
source = "hashicorp/azurerm"
version = "~>2.0"
}
random = {
source = "hashicorp/random"
version = "~>3.0"
}
}
}
provider "azurerm" {
features {}
subscription_id = "ced61991-09cf-4e19-a773-fa454609e1ed"
tenant_id = "6b4b1b0d-23f1-4063-bbbd-b65e2984b893"
# Client ID of SzTerraform service-prinicipal:
# Azure Active Directory -> App registration -> SzTerraform
client_id = "0de043f8-9006-4791-baa1-f48a60809c1c"
client_secret = local.secret
}

View File

@ -0,0 +1,8 @@
# Copy this file into local.init.tf (which is in .gitignore) as fill
# in the secret's value
locals {
# Secret for the SzTerraform service-prinicipal:
# Azure Active Directory -> App registration -> SzTerraform -> Certificates & secrets
secret = "<Secret>"
}

View File

@ -0,0 +1,41 @@
variable "username" {
type = string
description = "VMs admin username"
}
variable "ssh_public_key" {
type = string
description = "SSH public signature's of the admin"
}
variable "machine_type" {
type = string
description = "Machine type/size"
default = "Standard_B1s" # Defaulting to the 750 free hours tier machine.
}
variable "host_name" {
type = string
description = "Name of VM instance"
}
variable "resource_group_name" {
type = string
description = "Azure resource group name"
}
variable "location" {
type = string
description = "location of resource"
default = "eastus"
}
variable "local_network_cidr" {
type = string
description = "CIDR Range of the local virtual network"
}
variable "local_subnet_cidr" {
type = string
description = "CIDR Range of the local subnet, usually contained within the local_network_cidr"
}

View File

@ -0,0 +1,10 @@
output "resources" {
value = {
host = azurerm_linux_virtual_machine.deployed_host
nic = azurerm_network_interface.host_network_interface
net = azurerm_virtual_network.local_network
subnet = azurerm_subnet.local_subnet
pub_ip = azurerm_public_ip.host_public_ip
}
sensitive = false
}

View File

@ -0,0 +1,26 @@
resource "azurerm_linux_virtual_machine" "deployed_host" {
admin_username = var.username
location = var.location
name = var.host_name
resource_group_name = var.resource_group_name
secure_boot_enabled = true
size = var.machine_type
vtpm_enabled = true
admin_ssh_key {
public_key = var.ssh_public_key
username = var.username
}
boot_diagnostics {
}
os_disk {
caching = "ReadWrite"
storage_account_type = "Premium_LRS"
}
source_image_reference {
offer = "0001-com-ubuntu-server-jammy"
publisher = "canonical"
sku = "22_04-lts-gen2"
version = "latest"
}
network_interface_ids = [ azurerm_network_interface.host_network_interface.id ]
}

View File

@ -0,0 +1,34 @@
resource "azurerm_virtual_network" "local_network" {
address_space = [ var.local_network_cidr ]
location = var.location
name = "${var.host_name}-vnet"
resource_group_name = var.resource_group_name
}
resource "azurerm_subnet" "local_subnet" {
address_prefixes = [ var.local_subnet_cidr ]
name = "default"
resource_group_name = var.resource_group_name
virtual_network_name = azurerm_virtual_network.local_network.name
}
resource "azurerm_public_ip" "host_public_ip" {
name = "${var.host_name}-ip"
allocation_method = "Static"
domain_name_label = var.host_name
location = var.location
resource_group_name = var.resource_group_name
sku = "Standard"
}
resource "azurerm_network_interface" "host_network_interface" {
name = "${var.host_name}-nic"
location = var.location
resource_group_name = var.resource_group_name
ip_configuration {
name = "ipconfig1"
private_ip_address_allocation = "Dynamic"
public_ip_address_id = azurerm_public_ip.host_public_ip.id
subnet_id = azurerm_subnet.local_subnet.id
}
}

View File

@ -0,0 +1,25 @@
variable "resource_group_name" {
type = string
description = "Azure resource group name"
}
variable "location" {
type = string
description = "Resource location (eastus / northeurope)"
}
variable "shortname" {
type = string
description = "Short name of the resource's location (use / eun)"
}
variable "zones" {
type = list(string)
description = "Short name of the resource's location (use / eun)"
default = ["1", "2", "3"]
}
variable "network_interfaces" {
type = list(string)
description = "list of network interfaces to associate with the deployed network"
}

View File

@ -0,0 +1,10 @@
output "all" {
value = {
lb = azurerm_lb.lb
be_pool = azurerm_lb_backend_address_pool.be_pool
nsg = azurerm_network_security_group.vm-nsg
xref = azurerm_network_interface_backend_address_pool_association.be-pool-xref
vm_nsg_assoc = azurerm_network_interface_security_group_association.vm_nsg_assoc
ssh = azurerm_network_security_rule.nsrule-allow-ssh
}
}

View File

@ -0,0 +1,51 @@
resource "azurerm_lb" "lb" {
location = var.location
name = "${var.shortname}-lb"
resource_group_name = var.resource_group_name
sku = "Standard"
frontend_ip_configuration {
name = "${var.shortname}-fe-ip-conf"
}
}
resource "azurerm_lb_backend_address_pool" "be_pool" {
loadbalancer_id = azurerm_lb.lb.id
name = "${var.shortname}-be-pool"
}
resource "azurerm_network_security_group" "vm-nsg" {
location = var.location
name = "${var.location}-nsg"
resource_group_name = var.resource_group_name
}
resource "azurerm_network_interface_backend_address_pool_association" "be-pool-xref" {
for_each = { for k, v in var.network_interfaces: k => v }
network_interface_id = each.value
backend_address_pool_id = azurerm_lb_backend_address_pool.be_pool.id
ip_configuration_name = "ipconfig1" # each.value.host.
}
resource "azurerm_network_interface_security_group_association" "vm_nsg_assoc" {
for_each = { for k, v in var.network_interfaces: k => v }
network_interface_id = each.value
network_security_group_id = azurerm_network_security_group.vm-nsg.id
}
resource "azurerm_network_security_rule" "nsrule-allow-ssh" {
for_each = { for k, v in azurerm_network_interface_security_group_association.vm_nsg_assoc: k => v }
access = "Allow"
destination_address_prefix = "*"
destination_port_range = "22"
direction = "Inbound"
name = "SSH-${each.key}"
network_security_group_name = each.key
priority = 300
protocol = "Tcp"
resource_group_name = var.resource_group_name
source_address_prefix = "*"
source_port_range = "*"
}

View File

@ -0,0 +1,93 @@
resource "azurerm_resource_group" "res-group" {
location = "northeurope"
name = "varonis-assignment-03"
}
locals {
resource_group_name = azurerm_resource_group.res-group.name
username = "sz"
ssh_public_key = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDYnCJkpvIoEzjoYBmGKekFXGEOOlfcxD3RTtoYYy+b8PTVeyhY609UTn713hUxC/WKtY2QZgxs02GMmPfkqDTnt8JiD+PRMANBwYZPEe3BkuLoLFznFAb81ATpNbhqX26yauYLDSfoqUZ2EoRoKZvgKu0woNUrEHcQ1/Be28lF3vHdHga+Xo3xsH7cxIl5yHlbgfgtSPlqPckuiuu+V0I8rsPSW+fiP4NqZWjirm85QQPh0+CriFm5N+EKRhanLN+w5O//Ev0ZgOMR8CX+S62BqqG+DiW11irL7//1Z0oeRuBaeiuw1H5g38Gk6PFX1+GjaBm5bAg/ymej5f+F3HBpMvpSFKcUhe1hoqDP2cy6kSTGjl5HxOmL9uclq9NApyw+einkvL/t69ET1OzN4LMTjQjeWLzmrouG5suarhVlp8Lrup3/L6AaPyN2I81+lFlOTh2PJMlPlxtzcD1lT8IFhb7OFuk1Y7fC/gzDVgmH6E1Gqsw4+eg3k0IsdNZxa5M= szkolnik@Sygin"
locations = tomap({
"eastus" = {
shortname = "use"
zones = ["1", "2", "3"]
}
"northeurope" = {
shortname = "eun"
zones = ["1", "2", "3"]
}
})
vm_list = tomap({
"use-vm1" = {
location = "eastus"
network_cidr = "10.1.0.0/16"
subnet_cidr = "10.1.0.0/24"
}
"use-vm2" = {
location = "eastus"
network_cidr = "10.1.0.0/16"
subnet_cidr = "10.1.1.0/24"
}
"eun-vm1" = {
location = "northeurope"
network_cidr = "10.2.0.0/16"
subnet_cidr = "10.2.0.0/24"
}
"eun-vm2" = {
location = "northeurope"
network_cidr = "10.2.0.0/16"
subnet_cidr = "10.2.1.0/24"
}
})
}
module "deployed_host" {
source = "./modules/deployed_host"
for_each = local.vm_list
host_name = each.key
location = each.value.location
resource_group_name = local.resource_group_name
username = local.username
ssh_public_key = local.ssh_public_key
local_network_cidr = each.value.network_cidr
local_subnet_cidr = each.value.subnet_cidr
}
module "deployed_network" {
source = "./modules/deployed_net"
for_each = local.locations
resource_group_name = local.resource_group_name
location = each.key
shortname = each.value.shortname
network_interfaces = [for h in module.deployed_host :
h.resources.nic.id if h.resources.host.location == each.key
]
}
resource "azurerm_traffic_manager_profile" "traffic_manager" {
name = "traffic-mgr"
resource_group_name = local.resource_group_name
traffic_routing_method = "Geographic"
dns_config {
relative_name = "traffic-mgr"
ttl = 60
}
monitor_config {
path = "/"
port = 443
protocol = "HTTP"
}
}
# output "debug" {
# value = [ for o in module.deployed_host : o.resources.host.name ]
# }

View File

@ -0,0 +1,22 @@
# This file is maintained automatically by "terraform init".
# Manual edits may be lost in future updates.
provider "registry.terraform.io/hashicorp/azurerm" {
version = "3.65.0"
constraints = "3.65.0"
hashes = [
"h1:Wpq9+x8PynJqzfxaI1hnxhFgHSXkCz07UqroUVJCseU=",
"zh:0077d19c1cbd8916a6d96bad17e72f88535ac207fb7f88b714c6fc6da736f80d",
"zh:084f9de2f0f84e6508f81b6578ff195afeed79e5d18a0c8d2348abd7d22611c9",
"zh:0ea05826c0f9d2e4a5a9887e6d182ba1a5db6eba52b22eb45f0b8576d2d5ddb5",
"zh:5142f9cf59f8152bdb9debcdc39c04cb4ca8b26bd50e44f4605b2bcdc4fc514e",
"zh:67af71aa233dbe5e2ce59f8b8aa02a7ce71f55b4389dc6bdd1c85e463f810f37",
"zh:785b2c4845a0e99fc1a00d1c293cee49cf150a4f1a83d86632dd3fcd9e953d9c",
"zh:aae6352ff80d760bebd2148cd86a544cd6df8e1e5abd6d472143e40875983428",
"zh:aff6914ad258d27781ba66a915ef714a3f0d31136eeb06b12ed2220cc6530b4b",
"zh:b21ca9e271db7a57e5f08bf2b47bd8db291faf699fabf14bb38d4a73a9a05c21",
"zh:c8ff94c42249a9fdab87b6c974d6eb59af4c01c955cd76279b7a4f66eacd9754",
"zh:f4053b76a6efd46f79b45098c3e3df06b8e6340532970c91d1a9ead63dcf72b6",
"zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c",
]
}

View File

@ -0,0 +1,132 @@
{
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/VARONIS-ASSIGNMENT-03/providers/Microsoft.Compute/virtualMachines/TMPL-USE-VM1": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/VARONIS-ASSIGNMENT-03/providers/Microsoft.Compute/virtualMachines/TMPL-USE-VM1",
"resource_type": "azurerm_linux_virtual_machine",
"resource_name": "res-0"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03",
"resource_type": "azurerm_resource_group",
"resource_name": "res-1"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Compute/virtualMachines/tmpl-eun-vm01": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Compute/virtualMachines/tmpl-eun-vm01",
"resource_type": "azurerm_linux_virtual_machine",
"resource_name": "res-2"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/loadBalancers/tmpl-eun-lb": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/loadBalancers/tmpl-eun-lb",
"resource_type": "azurerm_lb",
"resource_name": "res-3"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/loadBalancers/tmpl-eun-lb/backendAddressPools/tmpl-eun-be-pool": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/loadBalancers/tmpl-eun-lb/backendAddressPools/tmpl-eun-be-pool",
"resource_type": "azurerm_lb_backend_address_pool",
"resource_name": "res-4"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/loadBalancers/tmpl-use-lb": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/loadBalancers/tmpl-use-lb",
"resource_type": "azurerm_lb",
"resource_name": "res-5"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/loadBalancers/tmpl-use-lb/backendAddressPools/tmpl-use-be-pool": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/loadBalancers/tmpl-use-lb/backendAddressPools/tmpl-use-be-pool",
"resource_type": "azurerm_lb_backend_address_pool",
"resource_name": "res-6"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-eun-vm01457": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-eun-vm01457",
"resource_type": "azurerm_network_interface",
"resource_name": "res-7"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-eun-vm01457/ipConfigurations/ipconfig1/loadBalancerBackendAddressPools/L3N1YnNjcmlwdGlvbnMvY2VkNjE5OTEtMDljZi00ZTE5LWE3NzMtZmE0NTQ2MDllMWVkL3Jlc291cmNlR3JvdXBzL3Zhcm9uaXMtYXNzaWdubWVudC0wMy9wcm92aWRlcnMvTWljcm9zb2Z0Lk5ldHdvcmsvbG9hZEJhbGFuY2Vycy90bXBsLWV1bi1sYi9iYWNrZW5kQWRkcmVzc1Bvb2xzL3RtcGwtZXVuLWJlLXBvb2w=": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-eun-vm01457/ipConfigurations/ipconfig1|/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/loadBalancers/tmpl-eun-lb/backendAddressPools/tmpl-eun-be-pool",
"resource_type": "azurerm_network_interface_backend_address_pool_association",
"resource_name": "res-8"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-eun-vm01457/networkSecurityGroups/L3N1YnNjcmlwdGlvbnMvY2VkNjE5OTEtMDljZi00ZTE5LWE3NzMtZmE0NTQ2MDllMWVkL3Jlc291cmNlR3JvdXBzL3Zhcm9uaXMtYXNzaWdubWVudC0wMy9wcm92aWRlcnMvTWljcm9zb2Z0Lk5ldHdvcmsvbmV0d29ya1NlY3VyaXR5R3JvdXBzL3RtcGwtZXVuLXZtMDEtbnNn": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-eun-vm01457|/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkSecurityGroups/tmpl-eun-vm01-nsg",
"resource_type": "azurerm_network_interface_security_group_association",
"resource_name": "res-9"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-use-vm1329": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-use-vm1329",
"resource_type": "azurerm_network_interface",
"resource_name": "res-10"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-use-vm1329/ipConfigurations/ipconfig1/loadBalancerBackendAddressPools/L3N1YnNjcmlwdGlvbnMvY2VkNjE5OTEtMDljZi00ZTE5LWE3NzMtZmE0NTQ2MDllMWVkL3Jlc291cmNlR3JvdXBzL3Zhcm9uaXMtYXNzaWdubWVudC0wMy9wcm92aWRlcnMvTWljcm9zb2Z0Lk5ldHdvcmsvbG9hZEJhbGFuY2Vycy90bXBsLXVzZS1sYi9iYWNrZW5kQWRkcmVzc1Bvb2xzL3RtcGwtdXNlLWJlLXBvb2w=": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-use-vm1329/ipConfigurations/ipconfig1|/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/loadBalancers/tmpl-use-lb/backendAddressPools/tmpl-use-be-pool",
"resource_type": "azurerm_network_interface_backend_address_pool_association",
"resource_name": "res-11"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-use-vm1329/networkSecurityGroups/L3N1YnNjcmlwdGlvbnMvY2VkNjE5OTEtMDljZi00ZTE5LWE3NzMtZmE0NTQ2MDllMWVkL3Jlc291cmNlR3JvdXBzL3Zhcm9uaXMtYXNzaWdubWVudC0wMy9wcm92aWRlcnMvTWljcm9zb2Z0Lk5ldHdvcmsvbmV0d29ya1NlY3VyaXR5R3JvdXBzL3RtcGwtdXNlLXZtMS1uc2c=": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-use-vm1329|/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkSecurityGroups/tmpl-use-vm1-nsg",
"resource_type": "azurerm_network_interface_security_group_association",
"resource_name": "res-12"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkSecurityGroups/tmpl-eun-vm01-nsg": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkSecurityGroups/tmpl-eun-vm01-nsg",
"resource_type": "azurerm_network_security_group",
"resource_name": "res-13"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkSecurityGroups/tmpl-eun-vm01-nsg/securityRules/SSH": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkSecurityGroups/tmpl-eun-vm01-nsg/securityRules/SSH",
"resource_type": "azurerm_network_security_rule",
"resource_name": "res-14"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkSecurityGroups/tmpl-use-vm1-nsg": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkSecurityGroups/tmpl-use-vm1-nsg",
"resource_type": "azurerm_network_security_group",
"resource_name": "res-15"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkSecurityGroups/tmpl-use-vm1-nsg/securityRules/SSH": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkSecurityGroups/tmpl-use-vm1-nsg/securityRules/SSH",
"resource_type": "azurerm_network_security_rule",
"resource_name": "res-16"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/publicIPAddresses/tmpl-eun-public-ip": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/publicIPAddresses/tmpl-eun-public-ip",
"resource_type": "azurerm_public_ip",
"resource_name": "res-17"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/publicIPAddresses/tmpl-eun-vm01-ip": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/publicIPAddresses/tmpl-eun-vm01-ip",
"resource_type": "azurerm_public_ip",
"resource_name": "res-18"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/publicIPAddresses/tmpl-use-public-ip": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/publicIPAddresses/tmpl-use-public-ip",
"resource_type": "azurerm_public_ip",
"resource_name": "res-19"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/publicIPAddresses/tmpl-use-vm1-ip": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/publicIPAddresses/tmpl-use-vm1-ip",
"resource_type": "azurerm_public_ip",
"resource_name": "res-20"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/trafficmanagerprofiles/tmpl-traffic-mgr": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/trafficManagerProfiles/tmpl-traffic-mgr",
"resource_type": "azurerm_traffic_manager_profile",
"resource_name": "res-21"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/virtualNetworks/tmpl-eun-vm01-vnet": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/virtualNetworks/tmpl-eun-vm01-vnet",
"resource_type": "azurerm_virtual_network",
"resource_name": "res-22"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/virtualNetworks/tmpl-eun-vm01-vnet/subnets/default": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/virtualNetworks/tmpl-eun-vm01-vnet/subnets/default",
"resource_type": "azurerm_subnet",
"resource_name": "res-23"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/virtualNetworks/tmpl-use-vm1-vnet": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/virtualNetworks/tmpl-use-vm1-vnet",
"resource_type": "azurerm_virtual_network",
"resource_name": "res-24"
},
"/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/virtualNetworks/tmpl-use-vm1-vnet/subnets/default": {
"resource_id": "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/virtualNetworks/tmpl-use-vm1-vnet/subnets/default",
"resource_type": "azurerm_subnet",
"resource_name": "res-25"
}
}

View File

@ -0,0 +1,311 @@
resource "azurerm_linux_virtual_machine" "res-0" {
admin_username = "sz"
location = "eastus"
name = "TMPL-USE-VM1"
network_interface_ids = ["/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-use-vm1329"]
resource_group_name = "VARONIS-ASSIGNMENT-03"
secure_boot_enabled = true
size = "Standard_B1s"
vtpm_enabled = true
admin_ssh_key {
public_key = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDYnCJkpvIoEzjoYBmGKekFXGEOOlfcxD3RTtoYYy+b8PTVeyhY609UTn713hUxC/WKtY2QZgxs02GMmPfkqDTnt8JiD+PRMANBwYZPEe3BkuLoLFznFAb81ATpNbhqX26yauYLDSfoqUZ2EoRoKZvgKu0woNUrEHcQ1/Be28lF3vHdHga+Xo3xsH7cxIl5yHlbgfgtSPlqPckuiuu+V0I8rsPSW+fiP4NqZWjirm85QQPh0+CriFm5N+EKRhanLN+w5O//Ev0ZgOMR8CX+S62BqqG+DiW11irL7//1Z0oeRuBaeiuw1H5g38Gk6PFX1+GjaBm5bAg/ymej5f+F3HBpMvpSFKcUhe1hoqDP2cy6kSTGjl5HxOmL9uclq9NApyw+einkvL/t69ET1OzN4LMTjQjeWLzmrouG5suarhVlp8Lrup3/L6AaPyN2I81+lFlOTh2PJMlPlxtzcD1lT8IFhb7OFuk1Y7fC/gzDVgmH6E1Gqsw4+eg3k0IsdNZxa5M= szkolnik@Sygin"
username = "sz"
}
boot_diagnostics {
}
os_disk {
caching = "ReadWrite"
storage_account_type = "Premium_LRS"
}
source_image_reference {
offer = "0001-com-ubuntu-server-jammy"
publisher = "canonical"
sku = "22_04-lts-gen2"
version = "latest"
}
depends_on = [
azurerm_network_interface.res-10,
]
}
resource "azurerm_resource_group" "res-1" {
location = "northeurope"
name = "varonis-assignment-03"
}
resource "azurerm_linux_virtual_machine" "res-2" {
admin_username = "sz"
location = "northeurope"
name = "tmpl-eun-vm01"
network_interface_ids = ["/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-eun-vm01457"]
resource_group_name = "varonis-assignment-03"
secure_boot_enabled = true
size = "Standard_B1s"
vtpm_enabled = true
admin_ssh_key {
public_key = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDYnCJkpvIoEzjoYBmGKekFXGEOOlfcxD3RTtoYYy+b8PTVeyhY609UTn713hUxC/WKtY2QZgxs02GMmPfkqDTnt8JiD+PRMANBwYZPEe3BkuLoLFznFAb81ATpNbhqX26yauYLDSfoqUZ2EoRoKZvgKu0woNUrEHcQ1/Be28lF3vHdHga+Xo3xsH7cxIl5yHlbgfgtSPlqPckuiuu+V0I8rsPSW+fiP4NqZWjirm85QQPh0+CriFm5N+EKRhanLN+w5O//Ev0ZgOMR8CX+S62BqqG+DiW11irL7//1Z0oeRuBaeiuw1H5g38Gk6PFX1+GjaBm5bAg/ymej5f+F3HBpMvpSFKcUhe1hoqDP2cy6kSTGjl5HxOmL9uclq9NApyw+einkvL/t69ET1OzN4LMTjQjeWLzmrouG5suarhVlp8Lrup3/L6AaPyN2I81+lFlOTh2PJMlPlxtzcD1lT8IFhb7OFuk1Y7fC/gzDVgmH6E1Gqsw4+eg3k0IsdNZxa5M= szkolnik@Sygin"
username = "sz"
}
boot_diagnostics {
}
os_disk {
caching = "ReadWrite"
storage_account_type = "Premium_LRS"
}
source_image_reference {
offer = "0001-com-ubuntu-server-jammy"
publisher = "canonical"
sku = "22_04-lts-gen2"
version = "latest"
}
depends_on = [
azurerm_network_interface.res-7,
]
}
resource "azurerm_lb" "res-3" {
location = "northeurope"
name = "tmpl-eun-lb"
resource_group_name = "varonis-assignment-03"
sku = "Standard"
frontend_ip_configuration {
name = "tmpl-eun-fe-ip-conf"
}
depends_on = [
azurerm_resource_group.res-1,
]
}
resource "azurerm_lb_backend_address_pool" "res-4" {
loadbalancer_id = "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/loadBalancers/tmpl-eun-lb"
name = "tmpl-eun-be-pool"
depends_on = [
azurerm_lb.res-3,
]
}
resource "azurerm_lb" "res-5" {
location = "eastus"
name = "tmpl-use-lb"
resource_group_name = "varonis-assignment-03"
sku = "Standard"
frontend_ip_configuration {
name = "tmpl-use-fe-ip-conf"
}
depends_on = [
azurerm_resource_group.res-1,
]
}
resource "azurerm_lb_backend_address_pool" "res-6" {
loadbalancer_id = "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/loadBalancers/tmpl-use-lb"
name = "tmpl-use-be-pool"
depends_on = [
azurerm_lb.res-5,
]
}
resource "azurerm_network_interface" "res-7" {
location = "northeurope"
name = "tmpl-eun-vm01457"
resource_group_name = "varonis-assignment-03"
ip_configuration {
name = "ipconfig1"
private_ip_address_allocation = "Dynamic"
public_ip_address_id = "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/publicIPAddresses/tmpl-eun-vm01-ip"
subnet_id = "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/virtualNetworks/tmpl-eun-vm01-vnet/subnets/default"
}
depends_on = [
azurerm_public_ip.res-18,
azurerm_subnet.res-23,
]
}
resource "azurerm_network_interface_backend_address_pool_association" "res-8" {
backend_address_pool_id = "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/loadBalancers/tmpl-eun-lb/backendAddressPools/tmpl-eun-be-pool"
ip_configuration_name = "ipconfig1"
network_interface_id = "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-eun-vm01457"
depends_on = [
azurerm_lb_backend_address_pool.res-4,
azurerm_network_interface.res-7,
]
}
resource "azurerm_network_interface_security_group_association" "res-9" {
network_interface_id = "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-eun-vm01457"
network_security_group_id = "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkSecurityGroups/tmpl-eun-vm01-nsg"
depends_on = [
azurerm_network_interface.res-7,
azurerm_network_security_group.res-13,
]
}
resource "azurerm_network_interface" "res-10" {
location = "eastus"
name = "tmpl-use-vm1329"
resource_group_name = "varonis-assignment-03"
ip_configuration {
name = "ipconfig1"
private_ip_address_allocation = "Dynamic"
public_ip_address_id = "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/publicIPAddresses/tmpl-use-vm1-ip"
subnet_id = "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/virtualNetworks/tmpl-use-vm1-vnet/subnets/default"
}
depends_on = [
azurerm_public_ip.res-20,
azurerm_subnet.res-25,
]
}
resource "azurerm_network_interface_backend_address_pool_association" "res-11" {
backend_address_pool_id = "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/loadBalancers/tmpl-use-lb/backendAddressPools/tmpl-use-be-pool"
ip_configuration_name = "ipconfig1"
network_interface_id = "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-use-vm1329"
depends_on = [
azurerm_lb_backend_address_pool.res-6,
azurerm_network_interface.res-10,
]
}
resource "azurerm_network_interface_security_group_association" "res-12" {
network_interface_id = "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkInterfaces/tmpl-use-vm1329"
network_security_group_id = "/subscriptions/ced61991-09cf-4e19-a773-fa454609e1ed/resourceGroups/varonis-assignment-03/providers/Microsoft.Network/networkSecurityGroups/tmpl-use-vm1-nsg"
depends_on = [
azurerm_network_interface.res-10,
azurerm_network_security_group.res-15,
]
}
resource "azurerm_network_security_group" "res-13" {
location = "northeurope"
name = "tmpl-eun-vm01-nsg"
resource_group_name = "varonis-assignment-03"
depends_on = [
azurerm_resource_group.res-1,
]
}
resource "azurerm_network_security_rule" "res-14" {
access = "Allow"
destination_address_prefix = "*"
destination_port_range = "22"
direction = "Inbound"
name = "SSH"
network_security_group_name = "tmpl-eun-vm01-nsg"
priority = 300
protocol = "Tcp"
resource_group_name = "varonis-assignment-03"
source_address_prefix = "*"
source_port_range = "*"
depends_on = [
azurerm_network_security_group.res-13,
]
}
resource "azurerm_network_security_group" "res-15" {
location = "eastus"
name = "tmpl-use-vm1-nsg"
resource_group_name = "varonis-assignment-03"
depends_on = [
azurerm_resource_group.res-1,
]
}
resource "azurerm_network_security_rule" "res-16" {
access = "Allow"
destination_address_prefix = "*"
destination_port_range = "22"
direction = "Inbound"
name = "SSH"
network_security_group_name = "tmpl-use-vm1-nsg"
priority = 300
protocol = "Tcp"
resource_group_name = "varonis-assignment-03"
source_address_prefix = "*"
source_port_range = "*"
depends_on = [
azurerm_network_security_group.res-15,
]
}
resource "azurerm_public_ip" "res-17" {
allocation_method = "Static"
domain_name_label = "tmpl-eun-lb-addr"
location = "northeurope"
name = "tmpl-eun-public-ip"
resource_group_name = "varonis-assignment-03"
sku = "Standard"
zones = ["1", "2", "3"]
depends_on = [
azurerm_resource_group.res-1,
]
}
resource "azurerm_public_ip" "res-18" {
allocation_method = "Static"
domain_name_label = "tmpl-eun-vm1"
location = "northeurope"
name = "tmpl-eun-vm01-ip"
resource_group_name = "varonis-assignment-03"
sku = "Standard"
depends_on = [
azurerm_resource_group.res-1,
]
}
resource "azurerm_public_ip" "res-19" {
allocation_method = "Static"
domain_name_label = "tmpl-use-lb-addr"
location = "eastus"
name = "tmpl-use-public-ip"
resource_group_name = "varonis-assignment-03"
sku = "Standard"
zones = ["1", "2", "3"]
depends_on = [
azurerm_resource_group.res-1,
]
}
resource "azurerm_public_ip" "res-20" {
allocation_method = "Static"
domain_name_label = "tmpl-use-vm1"
location = "eastus"
name = "tmpl-use-vm1-ip"
resource_group_name = "varonis-assignment-03"
sku = "Standard"
depends_on = [
azurerm_resource_group.res-1,
]
}
resource "azurerm_traffic_manager_profile" "res-21" {
name = "tmpl-traffic-mgr"
resource_group_name = "varonis-assignment-03"
traffic_routing_method = "Geographic"
dns_config {
relative_name = "tmpl-traffic-mgr"
ttl = 60
}
monitor_config {
path = "/"
port = 443
protocol = "HTTP"
}
depends_on = [
azurerm_resource_group.res-1,
]
}
resource "azurerm_virtual_network" "res-22" {
address_space = ["10.0.0.0/16"]
location = "northeurope"
name = "tmpl-eun-vm01-vnet"
resource_group_name = "varonis-assignment-03"
depends_on = [
azurerm_resource_group.res-1,
]
}
resource "azurerm_subnet" "res-23" {
address_prefixes = ["10.0.0.0/24"]
name = "default"
resource_group_name = "varonis-assignment-03"
virtual_network_name = "tmpl-eun-vm01-vnet"
depends_on = [
azurerm_virtual_network.res-22,
]
}
resource "azurerm_virtual_network" "res-24" {
address_space = ["10.1.0.0/16"]
location = "eastus"
name = "tmpl-use-vm1-vnet"
resource_group_name = "varonis-assignment-03"
depends_on = [
azurerm_resource_group.res-1,
]
}
resource "azurerm_subnet" "res-25" {
address_prefixes = ["10.1.0.0/24"]
name = "default"
resource_group_name = "varonis-assignment-03"
virtual_network_name = "tmpl-use-vm1-vnet"
depends_on = [
azurerm_virtual_network.res-24,
]
}

View File

@ -0,0 +1,4 @@
provider "azurerm" {
features {
}
}

View File

@ -0,0 +1,9 @@
terraform {
backend "local" {}
required_providers {
azurerm = {
source = "hashicorp/azurerm"
version = "3.65.0"
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,133 @@
# Terraform Utility scripts
The scripts in this directory were made to augment SZs development and
deployment of it's terraform plans.
They all rely on scaffolding provided by `direnv` and the hirearchy of
`.envrc` files in the repo which set context relevant environment
variables which the scripts here interact with to create a cohesive and
streamlined workflow.
## Typical workflow
Just like working with 'vanilla' terraform, the process follows:
```plaintext
+----------------------+-------------------------+
V | [mostly non-production] |
init -> plan -> apply -> | plan-destroy -> apply |
^ | +-------------------------+
+-------+
```
With some subtle differences and defaults which are sensible to SZ's
process, along with persistent logging.
## SZ's workflow
1. `init` - the first step:
* Initializes logging timestamp (aka `tf0`)
* Preprocess template files (all _sz.*.sz_tmpl files)
* Run `terraform init`
* Log output into `_logs` sub-directory.
2. `plan` - The 'brain':
* Run `terraform plan` with support for `TF_TARGET`
and `TF_TARGET_DESTORY` environment variable.
* Generate a `tfplan` file for `apply` action.
* Log output into `_logs` sub-directory.
3. `apply` - The heavy lifting:
* Always reads the `tfplan` file.
* Used for either _building_ or _destroying_.
* Log output into `_logs` sub-directory.
4. `plan-destroy` - Sensible destory planning:
* Initializes logging timestamp (aka `tf0`)
* Always layout the plan
* Always logged
* Supports `TF_TARGET_DESTROY`
## Concepts
* `direnv`'s `.envrc` sets-up the following envrionment variables:
* SZ_TF_NAME
* SZ_GCP_PROJECT
* SZ_GCP_PROJECT_ID
* TF_LOG, TF_LOG_PATH
* TF_VAR_PLAN_PATH="_.tmp.${SZ_TF_NAME}.tfplan"
* TF_VAR_OUT="-out ${TF_VAR_PLAN_PATH}"
* TF_VAR_FILE_CLI="-var-file="
* TF_VAR_FILE_CLI=""
* `tf-init` pre-processes template files `_sz.*.sz_tmpl`.
it is required first and foremost by `_sz.init.tf.sz_tmpl` which
generates the platform initialization code based on the environment
set by `direnv` - making sure you are always working with correct
gcloud environment.
All processed files will have the name pattern `_.gen.*`. Please note
that any file begining with `_.` (that's an underscore and a dot) are
ignored via `.gitignore`
* All planning is written to a persistent plan
(`_.tmp.<tf-plan-name>.tfplan`), whether it's deploying changes, new
components or destroying, the tfplan must be generated, along with the
log files.
* Logging - Each tf call is logged in 3 parallel locations:
* `_logs/0_0_lastrun.log` - the last `tf` run.
* `_logs/0_<action>` - the latest run of a terraform action (plan,
init, etc...). The rationale here is that all actions of a complete
workflow will be easily accessible regardless of timestamps.
* `_logs/yyyymmddHHmmss_<action>` - same as above, only timestamped.
This allows grouping operations that ran together in sequence,
breaking out to a separate sequences (by `tf0`) that will not
overwrite previous runs.
* `less-tf` - less with sensible defaults to review latest `tf` run
results. If no logs file is specifcied, `_logs/0_0_lastrun.log` will
be opened.
## Reference of Scripts
* `tf`:
executes `terraform` while preseving logs including ANSI coloring.
* `tf0`:
same as `tf`, however it will reset the logging timestamp.
* `tf-init`:
perform `init` step.
* `tf-plan` or `tfp` or `tf0-plan`
perform `plan` step. `tf0-plan` resets timestamp.
* `tf-plan-destroy` or `tfpd`
Perform `plan` step for destruction.
* `tf-apply` or `tfa`:
Apply after `plan` step. Unlike `terraform plan` this will not stop to
ask for permission, it is based on the preserved planned-state
supplied by `tf-plan` (above).
* `tf-extract`:
Extracts current state as a json of pairs of names and ids. For use
with `tf-import`.
* `tf-import`:
Given a json files of name/id pairs, generate an import script for
the existing state. This can be used as a non-binary state file.
An example to create such a script from the latest succesful `tf-apply`:
> ```bash
> tf-import _logs/0_9_last_state_ids.json > import-state.sh
> ```
## General utility scripts
Documentaion still pending for the following:
* `switch-dbg-tf-on`
* `less-tf`
* `clear-tf-env`
* `clear-tf-env-targets`
* `clear-tf-end-vars`
* `get-tf-env`
* `get-tf-env-plan`

View File

@ -0,0 +1,57 @@
#!/usr/bin/env -S bash -c 'echo "Not a user script. source(aka .) only"'
# the correct way to load this file is to source it like this:
# eval "$(. _tf_aux_functions)"
function _tfSetLogTS() {
TF_LOG_TS=$(date -d "today" +"%Y%m%d%H%M%S")
export TF_LOG_TS
}
function _tf_sedFullStop() {
sed --unbuffered '/^===FULLSTOP===$/q' | sed --unbuffered '/^===FULLSTOP===$/d'
}
function _tf_save_exitCode() {
echo TF_EXITCODE="$1" > /tmp/TF_EXITCODE
}
# shellcheck disable=SC2120
function _tf_get_exit_code() {
unset TF_EXITCODE
if [[ -r /tmp/TF_EXITCODE ]]; then
source /tmp/TF_EXITCODE
fi
if [[ -z "$TF_EXITCODE" ]]; then
TF_EXITCODE=0
fi
if [[ "$1" != '-' ]]; then
echo "TF_EXITCODE=${TF_EXITCODE}"
fi
if [[ "$TF_EXITCODE" -ne 0 ]]; then
return "$TF_EXITCODE"
fi
}
function _tf_exit_code() {
# shellcheck disable=SC2016
_tf_get_exit_code || echo "return $TF_EXITCODE 2>/dev/null || exit $TF_EXITCODE"
}
[[ -e /tmp/TF_EXITCODE ]] && rm /tmp/TF_EXITCODE
unset TF_EXITCODE
function safe_load() {
if [[ -z "$(find -mindepth 1 -maxdepth 1 -type f -name "*.tf" -or -name "*.tf.sz_tmpl")" ]]; then
local TF_EXIST=''
[[ -d '_tf' ]] && TF_EXIST=" Did you forget to cd into _tf?"
>&2 printf "ERROR: No Terraform files found.%s\n" "$TF_EXIST"
return 2
fi
}
[[ 1 -ne "$_TF_AUX_FUNCTIONS_LOADED" ]] \
&& printf "%s\n" \
"_TF_AUX_FUNCTIONS_LOADED=1" \
"source $(command -v _tf_aux_functions) || { X=$?; return $X 2>/dev/null || exit $X; }" \
|| safe_load

View File

@ -0,0 +1,14 @@
#! /usr/bin/env bash
function _clear-tf-env() {
local PATTERN="${1}"
local TOCLEAR=$(get-tf-env "${PATTERN}")
[[ -z "$TOCLEAR" ]] && {
echo "Could not find environment variables matching: $(echo "^TF_${PATTERN}")"
} || {
unset $TOCLEAR
echo "Cleared the following vars: $(echo "$TOCLEAR" | xargs echo)"
}
}
_clear-tf-env "${@}"
unset _clear-tf-env

View File

@ -0,0 +1,7 @@
#! /usr/bin/env bash
function _clear-tf-env-targets() {
clear-tf-env '[A-Z_]*TARGET$'
}
_clear-tf-env-targets "${@}"
unset _clear-tf-env-targets

View File

@ -0,0 +1,15 @@
#! /usr/bin/env bash
function _clear-tf-env() {
local PATTERN="${1}"
local TOCLEAR=$(get-tf-env "${PATTERN}")
[[ -z "$TOCLEAR" ]] && {
echo "Could not find environment variables matching: $(echo "^TF_${PATTERN}")"
} || {
unset $TOCLEAR
echo "Cleared the following vars: $(echo "$TOCLEAR" | xargs echo)"
}
}
_clear-tf-env "${@}"
unset _clear-tf-env

View File

@ -0,0 +1,12 @@
#! /usr/bin/env bash
function _get-tf-env() {
local PATTERN="${1-}"
compgen -v | grep "^TF_${PATTERN}" \
| while read a; do
set | grep "^$a=" | grep --color=auto '\b='
done
}
_get-tf-env "${@}"
unset _get-tf-env

View File

@ -0,0 +1,17 @@
#! /usr/bin/env bash
function _get-tf-env-plan() {
#set | grep '^TF\(_\(VAR_[a-z]\)\|\([A-Z_]*TARGET=\)\)' | grep '\b='
local PATTERN="${1:-.*}"
compgen -v | grep "^TF\(_\(VAR_[a-z]\)\|\([A-Z_]*TARGET=\)\)" \
| while read a; do
set \
| grep "^$a=" \
| grep "${PATTERN}" \
| grep '\b=' --color
done
}
_get-tf-env-plan "${@}"
unset _get-tf-env-plan

View File

@ -0,0 +1,35 @@
#! /usr/bin/env bash
eval "$(. _tf_aux_functions)"
function _less-tf() {
local _TF_LOG_FILE=${1-_logs/0_0_lastrun.log}
[[ "$1" == "-" ]] && _TF_LOG_FILE='_logs/0_0_lastrun.log'
[[ -z "$SZ_DEBUG" ]] || echo "lessts: ${@} | LESS_NO_WAIT = '${LESS_NO_WAIT}'"
[[ "$1" == "-" ]] && {
[[ -z "$SZ_DEBUG" ]] || echo "tailing..."
trap : INT;
tail -f $_TF_LOG_FILE | _tf_sedFullStop;
} \
|| { [[ -z "${LESS_NO_WAIT}" ]] \
&& {
[[ -z "$SZ_DEBUG" ]] || echo "Invoking less..."
less \
--no-init \
--raw-control-chars \
--line-numbers \
--quiet \
--hilite-unread \
--incsearch \
--ignore-case \
--force \
"${@:2}" -- \
"$_TF_LOG_FILE"
#\
#'+G?([^\w\W][\[0-9m+])Plan:' \
#'+3k'
}
}
}
_less-tf "${@}"
unset _less-tf

View File

@ -0,0 +1,99 @@
#!/usr/bin/env -S bash -c 'echo "Not a user script. source(aka .) only"'
set -e
#
# Loads common environmetn for direnv enabled repos
function has() {
type "$1" &>/dev/null
}
get_clean_path() {
# shellcheck disable=SC2016
sed 's/ *:\?$//g;s/`/``/g;s/:/`:`/g;s/\\`:`/\\:/g;' <<< "$1" | \
awk -v RS='`:`' -v ORS='`:`' '!arr[$0]++' | \
sed 's/`:`/:/g;s/:$//g'
}
function PATH_add() {
for new_path in "${@}"; do
PATH="${new_path}:$PATH"
done
PATH=$( get_clean_path "$PATH" )
}
export LESS='--quit-if-one-screen --ignore-case --line-numbers --quiet --raw-control-chars --hilite-unread --no-init --quit-at-eof '
ENV_SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
PATH_add "$ENV_SCRIPT_DIR"
export PATH
export TF_LOG='info' # Options are: off, error, warn, info, debug, trace
export TF_LOG_PATH='_logs/terraform'
# Terraform deployment plan name
SZ_TF_DIRNAME="$(basename "$PWD")"
LOG_DIR="$SZ_TF_DIRNAME/_tf/_logs" # save for manipulation
if [[ "$SZ_TF_DIRNAME" == _tf ]]; then
SZ_TF_DIRNAME="$(basename "${PWD%/*}")"
fi
# Determine correct log location
LOG_DIR="$(echo "$LOG_DIR" | sed -Ee 's|(^_tf[^/]*/)_tf/|\1|; s|^[^/]*/||')"
[[ -d "${LOG_DIR}" ]] || mkdir "${LOG_DIR}" 2>/dev/null || true
export SZ_TF_DIRNAME SZ_TF_NAME="$SZ_TF_DIRNAME"
TFVARS_RESET_OR_BLANK=RESET
function set_tf_vars() {
if [[ "$1" == "RESET" ]]; then
unset TF_VAR_PLAN_PATH TF_VAR_OUT TF_VAR_FILE_CLI TF_CLI_ARGS
unset TF_CLI_ARGS_init TF_CLI_ARGS_validate TF_CLI_ARGS_apply
unset TF_CLI_ARGS_plan TF_CLI_ARGS_refresh TF_CLI_ARGS_destroy
else
:
fi
TF_VAR_PLAN_PATH="${TF_VAR_PLAN_PATH:-_.tmp.${SZ_TF_NAME}.tfplan}"
TF_VAR_OUT="${TF_VAR_OUT:--out ${TF_VAR_PLAN_PATH}}"
# TF_VAR_FILE_CLI="-var-file="
# TF_VAR_FILE_CLI="${TF_VAR_FILE_CLI}"
# TF_CLI_ARGS=''
# TF_CLI_ARGS_init=''
# TF_CLI_ARGS_validate=''
TF_CLI_ARGS_apply="${TF_CLI_ARGS_apply:-${TF_VAR_PLAN_PATH}}"
TF_CLI_ARGS_plan="${TF_CLI_ARGS_plan:-${TF_VAR_OUT} ${TF_VAR_FILE_CLI}}"
TF_CLI_ARGS_refresh="${TF_CLI_ARGS_refresh:-${TF_VAR_FILE_CLI}}"
# TF_CLI_ARGS_destroy=''
export TF_IN_AUTOMATION="${TF_IN_AUTOMATION:-1}"
# console
# fmt
# force-unlock
# get
# graph
# import
# login
# logout
# output
# providers
# refresh
# show
# state
# taint
# test
# untaint
# version
# workspace
export TF_VAR_PLAN_PATH TF_VAR_OUT TF_VAR_FILE_CLI TF_CLI_ARGS
export TF_CLI_ARGS_init TF_CLI_ARGS_validate TF_CLI_ARGS_apply
export TF_CLI_ARGS_plan TF_CLI_ARGS_refresh TF_CLI_ARGS_destroy
export SZ_TF_NETWORK_NAME
}
set_tf_vars "${TFVARS_RESET_OR_BLANK}"
set +e

View File

@ -0,0 +1,86 @@
#! /usr/bin/env bash
usage() {
cat <<USAGE
Usage:
[RESET=1] [ROOT_TF=1] prep-proj-tf [<template-name>] [<gcp-project>]
Description:
Crates gcp-project/_tf directory based on teamplate-name
When template-name is blank, only links to template-root.
When gcp-project isn't provided, the current directory will
be used if validated as a gcp-project.
ROOT_TF=1 indicates setting up the root's _tf directory, and not any
specific project.
List of templates:
USAGE
list-templates-for-usage
}
set -e
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
# shellcheck disable=SC1091
source "$SCRIPT_DIR/../prep-repo.sh.inc"
RUN_DIR="$PWD"
NAME="$(basename "$RUN_DIR")"
[[ "$NAME" == '_tf' ]] && RUN_DIR="$( dirname "$RUN_DIR" )"
if [[ "$1" == "--help" ]]; then
usage
exit 2
fi
# Make sure we're running on a gcp-project/project level
TST_RUN_DIR="$RUN_DIR"
[[ "$ROOT_TF" != 1 ]] || TST_RUN_DIR="${GIT_ROOT}"
[[ -z "$2" ]] || TST_RUN_DIR="${GIT_ROOT}/$2"
PROJ_NAME="$(basename "${TST_RUN_DIR}")"
if [[ "$TST_RUN_DIR" == "$GIT_ROOT" ]]; then
if [[ "$ROOT_TF" == 1 ]]; then
printf "Setting up git root _tf\n"
else
printf '%s\n' \
'Preparing git root failed.' \
'Are you in the correct location?' \
'Did you forget to pass ROOT_TF=1?'
exit 1
fi
else
if [[ "$TST_RUN_DIR" != "${GIT_ROOT}/${PROJ_NAME}" ]]; then
printf "%s is not a valid project path\n" "$TST_RUN_DIR"
exit 1
fi
RUN_DIR="${GIT_ROOT}/${PROJ_NAME}"
TMP=$("$SCRIPT_DIR/../prep-repo" "$PROJ_NAME")
echo "$TMP"
echo "$TMP" | grep -q "Skipped" && exit 1
fi
echo "Preparing ${PROJ_NAME}"
mkdir -p "${RUN_DIR}/_tf/_logs"
RUN_DIR="${RUN_DIR}/_tf"
remove_links_from_run_dir_on_RESET
[[ "$ROOT_TF" == "1" ]] \
|| safe_link "direnv/envrc.project-tf" "$RUN_DIR/.envrc"
TMPL_NAME="${1}"
SAFE_ROOT="$SZ_COMMON_PATH/tf/templates" link_templates
SAFE_ROOT="$SZ_COMMON_PATH/../tf/templates" link_templates
if [[ -n "${TMPL_NAME}" && $LINKED_TMPL = 0 ]]; then
printf "Failed to initialize %s template, %s\n" \
"${TMPL_NAME}" \
"as the path does not exist"
fi
[[ -r "$RUN_DIR/README.md" ]] || touch "$RUN_DIR/README.md"
[[ -r "$RUN_DIR/DEPLOYMENT-STEPS.md" ]] || touch "$RUN_DIR/DEPLOYMENT-STEPS.md"

View File

@ -0,0 +1,86 @@
#! /usr/bin/env bash
usage() {
cat <<USAGE
Usage:
[RESET=1] prep-tf-host <template-name> <gcp-project/tf-name>
Description:
Crates gcp-project/tf-name directory based on teamplate-name
When template-name is blank, only links to template-root.
When gcp-project/tf-name will grab current directory name and if
it is a valid location, it will use the location to feed the
script's logic.
List of templates:
USAGE
list-templates-for-usage
}
set -e
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
# shellcheck disable=SC1091
source "$SCRIPT_DIR/../prep-repo.sh.inc"
RUN_DIR="$PWD"
NAME="$(basename "$RUN_DIR")"
[[ "$NAME" == '_tf' ]] && RUN_DIR="$( dirname "$RUN_DIR" )"
if [[ "$1" == "--help" ]]; then
usage
exit 2
fi
# Make sure we're running on a gcp-project/project level
TST_RUN_DIR="$RUN_DIR"
[[ -z "$2" ]] || TST_RUN_DIR="${GIT_ROOT}/$2"
PATH_ARG="$(basename "$(dirname "${TST_RUN_DIR}")")/$(basename "${TST_RUN_DIR}")"
if [[ "$TST_RUN_DIR" != "${GIT_ROOT}/${PATH_ARG}" ]]; then
printf "%s is not a valid path\n" "$TST_RUN_DIR"
exit 1
fi
PROJ_NAME="$(basename "$(dirname "${PATH_ARG}")")"
TF_NAME="$(basename "${PATH_ARG}")"
if [[ "$PROJ_NAME/$TF_NAME" != "${PATH_ARG}" ]]; then
printf "ERROR %s does not match argument %s, %s\n" \
"$PROJ_NAME/$TF_NAME" "${PATH_ARG}" \
"this does not seem to be a valid project path."
exit 1
fi
RUN_DIR="${GIT_ROOT}/${PATH_ARG}"
TMP=$("$SCRIPT_DIR/../prep-repo" "$PROJ_NAME")
echo "$TMP"
echo "$TMP" | grep -q "Skipped" && exit 1
echo "Preparing ${PATH_ARG}"
mkdir -p "${RUN_DIR}/_tf/_logs"
remove_links_from_run_dir_on_RESET
safe_link "direnv/envrc.project-tf" "$RUN_DIR/.envrc"
TMPL_NAME="${1}"
RUN_DIR="$RUN_DIR/_tf" SAFE_ROOT="$SZ_COMMON_PATH/tf/templates" link_templates
RUN_DIR="$RUN_DIR/_tf" SAFE_ROOT="$SZ_COMMON_PATH/../tf/templates" link_templates
if [[ -n "${TMPL_NAME}" && $LINKED_TMPL = 0 ]]; then
printf "Failed to initialize %s template, %s\n" \
"${TMPL_NAME}" \
"as the path does not exist"
fi
[[ -r "$RUN_DIR/README.md" ]] || touch "$RUN_DIR/README.md"
[[ -r "$RUN_DIR/DEPLOYMENT-STEPS.md" ]] || touch "$RUN_DIR/DEPLOYMENT-STEPS.md"

View File

@ -0,0 +1,33 @@
#! /usr/bin/env bash
set -e
HOST=$(hostname)
CERT_PREFIX="certs/whoami-$HOST"
CERT_KEY="${CERT_PREFIX}.key"
CERT_CRT="${CERT_PREFIX}.crt"
mkdir -p certs
if [[ ! -r "$CERT_CRT" || ! -r "$CERT_KEY" ]]; then
openssl genrsa -des3 -out "$CERT_KEY" \
-passout "pass:keypassphrase" \
4096
openssl req -x509 \
-key "$CERT_KEY" -passin "pass:keypassphrase" \
-sha256 -days 365 -subj "/C=$HOSTWhoAmI/ST=NC/L=Morrisville/O=acme/OU=devops/CN=whoami.devops" \
-out "$CERT_CRT" \
-passout "pass:pempassphrase"
chmod 664 "$CERT_CRT"
fi
#podman pull 'ghcr.io/traefik/whoami:v1.10.1'
podman pull 'ghcr.io/traefik/whoami:latest'
docker run --detach --restart unless-stopped \
--name whoami-08443 \
-p 8443:443 \
-v "$PWD/certs:/certs" \
traefik/whoami -cert "/$CERT_CRT" -key "/$CERT_KEY"

View File

@ -0,0 +1,13 @@
#! /usr/bin/env bash
function _switch-dbg-tf-on() {
local OFF_TF ON_TF
for OFF_TF in dbg.*.tf.off; do
ON_TF="_.${OFF_TF%\.off}"
[[ ! -r "$ON_TF" ]] && ln -s "$OFF_TF" "$ON_TF"
echo "$ON_TF switched on."
done
}
_switch-dbg-tf-on "${@}"
unset _switch-dbg-tf-on

View File

@ -0,0 +1,32 @@
#! /usr/bin/env bash
# shellcheck disable=SC1091
eval "$(. _tf_aux_functions)"
function _tf() {
[[ -z "$TF_LOG_TS" ]] && _tfSetLogTS
local NAME=$1
[[ "${*}" =~ "-destroy" ]] && NAME="$1-destroy"
echo "===_logs/0_$NAME.log===" > _logs/0_0_lastrun.log
echo "===_logs/${TF_LOG_TS}_$NAME.log===" \
| tee --append _logs/0_0_lastrun.log \
> "_logs/0_$NAME.log"
[[ -z "$SZ_DEBUG" ]] || echo "Executing: terraform ${*}"
{
{ \
terraform "${@}" 2>&1 || _tf_save_exitCode $?
} | tee "_logs/${TF_LOG_TS}_$NAME.log" \
| awk 'BEGIN {p=1}; /<<\W*EOT/ {print; p=0}; /^\W*EOT/ {p=1}; p; fflush();' \
| tee --append _logs/0_0_lastrun.log \
>> "_logs/0_$NAME.log"
echo "===FULLSTOP===" >> _logs/0_0_lastrun.log
} &
less-tf -
}
_tf "${@}"
unset _tf
eval "$( _tf_exit_code )"

View File

@ -0,0 +1,26 @@
#! /usr/bin/env bash
function _tf-pre-plan() {
[ -z "$SZ_TF_NAME" ] \
&& echo "ERROR: SZ_TF_NAME isn't declared!" \
&& return 1
[[ "${*}" =~ .*--no-delete.* ]] || find . -name "_.gen.*" -delete
for TF_TMPL_FILE in $(bash -c 'shopt -s nullglob; echo *.sz_tmpl'); do
local OUT_FILE="${TF_TMPL_FILE%\.sz_tmpl}"
OUT_FILE="_.gen.${SZ_TF_NAME}.${OUT_FILE#_sz\.}"
echo "Generating ${OUT_FILE} file..."
printf "%s\n" \
"# DO NOT EDIT THIS FILE!!!! " \
"# This file was autogenerated by \`_tf-pre-plan\` from ${TF_TMPL_FILE}" \
"" \
"$( envsubst < "${TF_TMPL_FILE}" )" \
> "${OUT_FILE}"
done
printf "\n\n" >&2
}
_tf-pre-plan "${@}"
unset _tf-pre-plan

View File

@ -0,0 +1,12 @@
#! /usr/bin/env bash
# shellcheck disable=SC1091
eval "$(. _tf_aux_functions)"
. tf apply "${@}" && _tfSetLogTS
eval "$( _tf_exit_code )"
. tf-extract \
| tee "_logs/${TF_LOG_TS}_state_ids.json" \
> _logs/0_9_last_state_ids.json \
&& echo "_logs/${TF_LOG_TS}_state_ids.json written" >&2
eval "$( _tf_exit_code )"

View File

@ -0,0 +1,38 @@
#! /usr/bin/env bash
# shellcheck disable=SC1091
eval "$(. _tf_aux_functions)"
final_render() {
if [ "$1" = "--clip" ]; then
jq -r '"- \(.name):|\(.id)"' \
| column -ts'|' | cut -c -${2:-${COLUMNS:-$(tput cols)}}
else
jq
fi
}
terraform show -json | jq \
| tee "_logs/${TF_LOG_TS}_state.json" \
| tee "_logs/0_state.json" \
| jq '
[[.values.root_module.resources,
(.values.root_module.child_modules // [] | .[].resources // [])
] | map(.[] ) | .[]
| select( .mode != "data" )
| {
name: .address,
id: (
if( .type == "google_storage_bucket" ) then
"\(.values.project)/\(.values.id)"
else
.values.id
end
)
}]
' \
| tee "_logs/${TF_LOG_TS}_state_ids.json" \
| tee "_logs/0_9_last_state_ids.json" \
| final_render "$@"
eval "$( _tf_exit_code )"

View File

@ -0,0 +1,13 @@
#! /usr/bin/env bash
# shellcheck disable=SC1091
eval "$(. _tf_aux_functions)"
IDs_JSON="${1:--}"
jq -r ' .[] |
"tf state rm \"\(.name)\";\n tf import \"\(.name)\"% \"\(.id)\";"
' "$IDs_JSON" \
| awk '{printf "%s%s\n", (NR==1 ? "#! /usr/bin/env bash\n\n" : ""), $0;}' \
| column -ts'%'
eval "$( _tf_exit_code )"

View File

@ -0,0 +1,13 @@
#! /usr/bin/env bash
eval "$(. _tf_aux_functions)"
function _tf-init() {
tf-_pre-plan "${@}"
tf0 init "${@}"
}
_tf-init "${@}"
unset _tf-init
eval "$( _tf_exit_code )"

View File

@ -0,0 +1,14 @@
#! /usr/bin/env bash
resources=$()
# Loop over resources and output name and ID pairs
terraform state list | grep -Ev '^data\.' | while read -r r; do
printf 'tf import %s %s\n' \
"$r" \
"$( terraform state show "$r" \
| sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g" \
| grep -E "^\W*(id|self-link)\W*=" \
| head -1 \
| awk '{print $3}' \
)"
done

View File

@ -0,0 +1,31 @@
#! /usr/bin/env bash
# shellcheck disable=SC1091
eval "$(. _tf_aux_functions)"
eval "$( _tf_exit_code )"
_tf-plan() {
[[ -z "$SZ_DEBUG" ]] || echo "DEBUG: tf-plan ${*}"
local _TF_TARGET=""
# shellcheck disable=SC2153,SC2086 # TF_TARGET references an external env
[[ ${#TF_TARGET} -gt 0 ]] && _TF_TARGET="$(printf -- '--target=%s ' ${TF_TARGET})"
# shellcheck disable=SC2086 # word splitting is desired here
[[ "${*}" =~ "-destroy" ]] && [[ ${#TF_DESTROY_TARGET} -gt 0 ]] && _TF_TARGET="$(printf -- '--target=%s ' ${TF_DESTROY_TARGET})"
tf-_pre-plan "${@}"
# shellcheck disable=SC2086 # word splitting is desired here
tf plan ${_TF_TARGET} "${@}"
}
[[ -z "$TF_LOG_TS" ]] && _tfSetLogTS
_tf-plan "${@}"
unset _tf-plan
LOG_NAME="_logs/${TF_LOG_TS}_plan"
[[ "${*}" =~ "-destroy" ]] && LOG_NAME="${LOG_NAME}-destroy"
LOG_NAME="${LOG_NAME}.log"
[[ ! -r /tmp/TF_EXITCODE ]] \
&& grep -E '^(.\[1m)? # .* (forces|(must|will) be)' "${LOG_NAME}" \
| tee --append _logs/0_0_lastrun.log
eval "$( _tf_exit_code )"

View File

@ -0,0 +1,6 @@
#! /usr/bin/env bash
eval "$(. _tf_aux_functions)"
. tf0-plan --destroy "${@}"
eval "$( _tf_exit_code )"

View File

@ -0,0 +1,13 @@
#! /usr/bin/env bash
eval "$(. _tf_aux_functions)"
function _tf0() {
_tfSetLogTS
[ $# -eq 0 ] && return
tf "$@"
}
_tf0 "${@}"
unset _tf0
eval "$( _tf_exit_code )"

View File

@ -0,0 +1,7 @@
#! /usr/bin/env bash
eval "$(. _tf_aux_functions)"
_tfSetLogTS
. tf-plan "${@}"
eval "$( _tf_exit_code )"

View File

@ -0,0 +1,2 @@
#! /usr/bin/env bash
. tf-apply "${@}"

View File

@ -0,0 +1,14 @@
#! /usr/bin/env bash
function _tfcontext() {
[[ -n $(find $PWD -name '*.tf') ]] || return -1
[[ -d _logs ]] || mkdir _logs
TF_VAR_FILE_NAME=${TF_VAR_FILE_CLI-:$(basename $PWD).tfvars}
[[ -r $TF_VAR_FILE_NAME ]] || unset TF_VAR_FILE_NAME
TF_VAR_FILE_CLI=${TF_VAR_FILE_CLI-:-var-file='$TF_VAR_FILE_NAME'}
basename $PWD
}
_tfcontext "${@}"
unset _tfcontext

View File

@ -0,0 +1,2 @@
#! /usr/bin/env bash
. tf-plan "${@}"

View File

@ -0,0 +1,2 @@
#! /usr/bin/env bash
. tf-plan-destroy "${@}"