Ansible is a great automation tool, for work I’ve been using it primarily as a network policy enforcement and automation tool, like to manage ACLs on Cisco routers. But personally I use it more as a server deployment and automation tool. For a while now I’ve been wanting to migrate a few of my general use virtual servers to Vultr [LINK], but hit a road block with free time. Last week in my lab I upgraded to the development version of Ansible (2.6) and noticed that it Vultr support has been added to the cloud modules so started experimenting with them. After a day or two of experimentation it sprouted into a full blown provisioning playbook. The following is what I created…
What I want is a single playbook that can build a server from scratch and have another playbook that can do the same in reverse. For cost saving and security reasons, the deprovisioning playbook I want to be able to run it in crontab to automatically destroy any LAB VMs I spin up at the end of the day.
The following is the provisioning playbook… I have a host_var file that contains my API key and other things common for all my instances. The instances to be provisioned are passed via a variable called “VARS” when the playbook is ran. Since my vars are modular, if I had a real ticketing system, I could have it output a properly formatted instance.yaml file for the playbook to use. Concerning the Ansible Modules; there is one quirky things about them… It requires an ENVRONMENT variable called VULTR_API_KEY. It also requires a vultr.ini file with the same VULTR_API_KEY to be set there. Just to make things easier I try to deal with that transparently within the playbook so it will create that INI file if it does not automatically exist.
vr_provision.yaml: a /usr/local/bin/ansible-playbook script, Vultr server provisioning
#!/usr/local/bin/ansible-playbook ## Provision virtual instances on VULTR. ## 2018 (v.01) - Playbook from www.davideaves.com --- - name: "VULTR provision instance" hosts: localhost connection: local gather_facts: false environment: VULTR_API_KEY: "{{ vultr_common.apikey }}" tasks: - name: "GET user running the deploy" local_action: command whoami changed_when: False register: WHOAMI ### Prerequisite validation ### - block: - name: "Playbook external variables include" include_vars: "{{ VARS }}" when: (VARS is defined) always: - name: "Playbook external variables example" local_action: debug msg="playbook.yaml -e VARS=tag.yaml" when: (VARS is not defined) - name: "Playbook requirement check" fail: msg: | Required variable is undefined! > vultr_common.apikey > vultr_common.inventory_file > vultr.servers: ... when: (vultr_common is undefined) or (vultr_common.apikey is undefined) or (vultr_common.inventory_file is undefined) or (vultr is undefined) or (vultr.servers is undefined) - name: "Inventory file status" stat: path="{{ vultr_common.inventory_file }}" register: INVENTORY_FILE - name: "Inventory file writeable" fail: msg: "{{ vultr_common.inventory_file }} not writeable" when: not(INVENTORY_FILE.stat.writeable) - name: "~/.vultr.ini handling block" block: - name: "Validate ini file exits" file: path: "~/.vultr.ini" mode: 0600 state: touch changed_when: False - name: "VULTR_API_KEY is present" ini_file: path: "~/.vultr.ini" section: default option: "VULTR_API_KEY" value: "{{ vultr_common.apikey }}" no_extra_spaces: yes state: present rescue: - fail: msg: "Unable to handle ~/.vultr.ini" ### Collect account balance and Fail if too low ### - name: "VULTR account facts" local_action: module: vr_account_facts - name: "Account balance requirement check" fail: msg: "Account balance low: {{ ansible_facts.vultr_account_facts.balance|int }}" when: (ansible_facts.vultr_account_facts.balance|int > vultr_common.min_balance) ### Configure SSH Keys ### - name: "VULTR user authorized_key" local_action: module: vr_ssh_key name: "{{ WHOAMI.stdout }}" ssh_key: "{{ lookup('file', '~/.ssh/authorized_keys') }}" ### Configure Firewall ### # These are additive, nothing is removed unless specified or done manually. - name: "VULTR firewall groups" local_action: module: vr_firewall_group name: "{{ firewall_group }}" when: (firewall_group is defined) and (vultr_common.firewall_group is defined) and (vultr_common.firewall_group[firewall_group] is defined) - name: "Get public IP of ansible host" local_action: module: ipify_facts when: not(ansible_facts.ipify_public_ip is defined) - name: "VULTR firewall rule: {{ firewall_group }}/management" local_action: module: vr_firewall_rule group: "{{ firewall_group }}" protocol: tcp port: 22 ip_version: v4 cidr: "{{ ansible_facts.ipify_public_ip | ipv4 }}/32" when: (firewall_group is defined) and (ansible_facts.ipify_public_ip is defined) and (vultr_common.firewall_group[firewall_group] is defined) - name: "VULTR firewall rule: {{ firewall_group }}" local_action: module: vr_firewall_rule group: "{{ firewall_group }}" protocol: "{{ item.protocol | default('tcp') }}" port: "{{ item.port | default('0') }}" ip_version: "{{ item.ip_version | default('v4') }}" state: "{{ item.state | default('present') }}" with_items: "{{ (vultr_common.firewall_group[firewall_group]) }}" when: (firewall_group is defined) and (vultr_common.firewall_group[firewall_group] is defined) ### Deploy Instances ### - name: "VULTR provision instances" local_action: module: vr_server name: "{{ item.name }}" hostname: "{{ item.name }}" ipv6_enabled: yes os: "{{ item.os }}" plan: "{{ item.plan }}" private_network_enabled: yes region: "{{ item.region }}" ssh_key: "{{ WHOAMI.stdout }}" state: present firewall_group: "{{ firewall_group | default('') }}" force: False tag: "{{ tag | default('none') }}" with_items: "{{ vultr.servers }}" register: BUILD ### Update Ansible inventory ### - name: "Initialize SERVER list" set_fact: SERVER=[] when: (BUILD is defined) no_log: True - name: "Populate SERVER list" set_fact: SERVER: "{{ SERVER }} + [ '{{ item.vultr_server.name }},{{ item.vultr_server.v4_main_ip }},{{ item.vultr_server.v6_main_ip }}' ]" with_items: "{{ BUILD.results }}" when: (SERVER is defined) no_log: True - name: "Update inventory file with SERVER list" ini_file: path: "{{ vultr_common.inventory_file }}" section: vultr option: "{{ item.split(',')[0] }} ansible_host" value: "{{ item.split(',')[1] }}" no_extra_spaces: yes mode: 0666 state: present backup: yes with_items: "{{ SERVER }}" when: (SERVER is defined) ### Update DNS ### - name: "VULTR DNS domain" local_action: module: vr_dns_domain name: "{{ item.split(',')[0] | regex_replace('^\\w+.') }}" server_ip: 127.0.0.1 state: present with_items: "{{ SERVER }}" when: (SERVER is defined) - name: "VULTR DNS A record" vr_dns_record: record_type: A name: "{{ item.split(',')[0] | regex_replace('[.]\\w*') }}" domain: "{{ item.split(',')[0] | regex_replace('^\\w+.') }}" data: "{{ item.split(',')[1] }}" ttl: 300 with_items: "{{ SERVER }}" when: (SERVER is defined) - name: "VULTR DNS AAAA record" vr_dns_record: record_type: AAAA name: "{{ item.split(',')[0] | regex_replace('[.]\\w*') }}" domain: "{{ item.split(',')[0] | regex_replace('^\\w+.') }}" data: "{{ item.split(',')[2] }}" ttl: 300 with_items: "{{ SERVER }}" when: (SERVER is defined) |
Below is the host_vars file that I am using for this playbook. The vultr_common.firewall_groups contain the firewall rules to create and must match whats specified in the server.yaml file.
host_vars/localhost.yaml: vultr_common variables
--- vultr_common: apikey: !!! YOURAPIKEY !!! inventory_file: /etc/ansible/hosts min_balance: -50 firewall_group: shellserver: - protocol: icmp ip_version: v4 - protocol: icmp ip_version: v6 webserver: - protocol: tcp port: 80 ip_version: v4 - protocol: tcp port: 443 ip_version: v4 - protocol: tcp port: 80 ip_version: v6 - protocol: tcp port: 443 ip_version: v6 - protocol: icmp ip_version: v4 - protocol: icmp ip_version: v6 |
Below is the list of servers to be created or destroyed by the playbooks.
PROD-WEB.yaml: vultr server list
--- tag: PROD-WEB firewall_group: webserver modified: Mon, 5 Feb 2018 21:53:52 -0500 vultr: servers: - name: curly.example.com os: Debian 9 x64 (stretch) plan: 1024 MB RAM,25 GB SSD,1.00 TB BW region: Chicago - name: larry.example.com os: Debian 9 x64 (stretch) plan: 1024 MB RAM,25 GB SSD,1.00 TB BW region: Chicago - name: moe.example.com os: Debian 9 x64 (stretch) plan: 1024 MB RAM,25 GB SSD,1.00 TB BW region: Chicago ... |
Once the servers are created, I just simply run a followup playbooks against the “vultr” servers section of my inventory file to install, configure and secure any software on the individual servers.
To clean things up, the following is my deprovisioning playbook. This will destroy the servers, related DNS records and host entries in the Ansible inventory. Firewall groups, DNS zones are left intact for future deployments or for other servers that may still be running.
vr_deprovision.yaml: a /usr/local/bin/ansible-playbook script: Vultr server deprovisioning
#!/usr/local/bin/ansible-playbook ## Deprovision virtual instances on VULTR. ## 2018 (v.01) - Playbook from www.davideaves.com --- - name: "VULTR deprovision instance" hosts: localhost connection: local gather_facts: false environment: VULTR_API_KEY: "{{ vultr_common.apikey }}" tasks: ### Prerequisite validation ### - block: - name: "Playbook external variables include" include_vars: "{{ VARS }}" when: (VARS is defined) always: - name: "Playbook external variables example" local_action: debug msg="playbook.yaml -e VARS=tag.yaml" when: (VARS is not defined) - name: "Playbook requirement check" fail: msg: | Required variable is undefined! > vultr_common.apikey > vultr_common.inventory_file > vultr.servers: ... when: (vultr_common is undefined) or (vultr_common.apikey is undefined) or (vultr_common.inventory_file is undefined) or (vultr is undefined) or (vultr.servers is undefined) - name: "Inventory file status" stat: path="{{ vultr_common.inventory_file }}" register: INVENTORY_FILE - name: "Inventory file writeable" fail: msg: "{{ vultr_common.inventory_file }} not writeable" when: not(INVENTORY_FILE.stat.writeable) - name: "~/.vultr.ini handling block" block: - name: "Validate ini file exits" file: path: "~/.vultr.ini" mode: 0600 state: touch changed_when: False - name: "VULTR_API_KEY is present" ini_file: path: "~/.vultr.ini" section: default option: "VULTR_API_KEY" value: "{{ vultr_common.apikey }}" no_extra_spaces: yes state: present rescue: - fail: msg: "Unable to handle ~/.vultr.ini" ### Destroy Instances ### - name: "VULTR deprovision instances" local_action: module: vr_server name: "{{ item.name }}" state: absent with_items: "{{ vultr.servers }}" register: BUILD ### Update Ansible inventory ### - name: "Initialize empty list (SERVER)" set_fact: SERVER=[] when: (BUILD is defined) and (BUILD.changed) no_log: True - name: "Populate empty list (SERVER)" set_fact: SERVER: "{{ SERVER }} + [ '{{ item.vultr_server.name }},{{ item.vultr_server.v4_main_ip }},{{ item.vultr_server.v6_main_ip }}' ]" with_items: "{{ BUILD.results }}" when: (SERVER is defined) no_log: True - name: "Remove servers from inventory file." ini_file: path: "{{ vultr_common.inventory_file }}" section: vultr option: "{{ item.split(',')[0] }} ansible_host" value: "{{ item.split(',')[1] }}" no_extra_spaces: yes mode: 0666 state: absent backup: yes with_items: "{{ SERVER }}" when: (SERVER is defined) ### Update DNS ### - name: "VULTR DNS A record" vr_dns_record: record_type: A name: "{{ item.split(',')[0] | regex_replace('[.]\\w*') }}" domain: "{{ item.split(',')[0] | regex_replace('^\\w+.') }}" state: absent with_items: "{{ SERVER }}" when: (SERVER is defined) - name: "VULTR DNS AAAA record" vr_dns_record: record_type: AAAA name: "{{ item.split(',')[0] | regex_replace('[.]\\w*') }}" domain: "{{ item.split(',')[0] | regex_replace('^\\w+.') }}" state: absent with_items: "{{ SERVER }}" when: (SERVER is defined) |