-
Notifications
You must be signed in to change notification settings - Fork 5
Expand file tree
/
Copy pathmake-vms-prompt.yml
More file actions
109 lines (99 loc) · 3.8 KB
/
make-vms-prompt.yml
File metadata and controls
109 lines (99 loc) · 3.8 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# file: make-vms-prompt.yml
#
# prerequisites:
# NEW: sudo dnf install redhat-rpm-config python2-devel libxml2-devel libcurl-devel ; sudo easy_install ovirt-engine-sdk-python
# RPM: yum --enablerepo=rhel-7-server-rhv-4.2-manager-rpms install python-ovirt-engine-sdk4
# OLD: sudo yum --enablerepo=rhel-7-server-rhv-4.1-rpms install python-ovirt-engine-sdk4
#
# execution:
# $ ansible-playbook -i inventory make-vms-prompt.yml --ask-vault-pass
# $ ansible-playbook make-vms-prompt.yml -e @./someVarFile.yml
---
- name: Query vars files for prompt dialog
hosts: localhost
gather_facts: yes
vars:
choices:
tasks:
- find:
path: "{{ playbook_dir }}/{{ ansible_domain }}"
pattern: "*-vars.yml"
register: result
- set_fact:
choices: "{{ choices }} {{ item.path | basename }}"
with_items: "{{ result.files }}"
no_log: true
- debug:
msg: "Please choose one of these:{{ choices }}"
- hosts: localhost
#- hosts: rhvm
gather_facts: yes
vars_prompt:
- prompt: "Please choose one of the above"
name: input
private: no
vars_files:
- "{{ ansible_domain }}/{{ input }}"
- "{{ ansible_domain }}/rhvm-vault.yml"
tasks:
- name: Login to RHV
ovirt_auth:
url: "{{ rhvurl }}"
insecure: yes
username: "{{ rhvuser }}"
password: "{{ rhvpass }}"
- name: Create VMs
ovirt_vm:
auth: "{{ ovirt_auth }}"
cluster: "{{ rhvCluster }}"
template: "{{ templateName }}"
name: "{{ item.key }}"
state: running
# clone: true
instance_type: "{{ instanceType }}"
nics:
- name: nic1
profile_name: ovirtmgmt
cloud_init:
host_name: "{{ item.key }}.{{ ansible_domain }}"
user_name: root
root_password: "{{ vm_root_password }}"
authorized_ssh_keys: "{{ ssh_key }}"
dns_servers: "{{ dnsServer }}"
dns_search: "{{ ansible_domain }}"
nic_name: eth0
nic_on_boot: true
nic_boot_protocol: "{{ item.value.nic1.ipaddress | ipaddr | ternary(item.value.nic1.ipaddress,'dhcp') }}"
#nic_boot_protocol: "{{ item.value.nic1.ipaddress | default('dhcp') | ipaddr | ternary(item.value.nic1.ipaddress,'dhcp') }}"
nic_ip_address: "{{ item.value.nic1.ipaddress | default(omit) }}"
nic_netmask: "{{ item.value.nic1.netmask | default(omit) }}"
nic_gateway: "{{ item.value.nic1.gateway | default(omit) }}"
custom_script: |
runcmd:
- hostnamectl set-hostname {{ item.key }}.{{ ansible_domain }}
- nmcli con mod "System eth0" connection.id "eth0"
- sed -i -e '/192.168.122.1/d' -e '/^[;#]/d' /etc/resolv.conf
- yum -y remove cloud-init
wait: true
with_dict: "{{ vms }}" # See {{ ansible_domain }}/{{ input }} (e.g. home.lab/ocp-cns-vars.yml)
- name: Create and attach disks
ovirt_disk:
auth: "{{ ovirt_auth }}"
vm_name: "{{ item.0.name }}"
storage_domain: "{{ storageDomain }}"
name: "{{ item.0.name }}-disk-{{ item.1 }}"
size: "{{ disks_size }}"
format: cow
interface: virtio_scsi
wait: true
with_subelements:
- "{{ disks }}"
- id
- name: Cleanup RHV auth token
ovirt_auth:
ovirt_auth: "{{ ovirt_auth }}"
state: absent
### RUBBISH ###
#- nmcli con add type ethernet connection.id eth0 connection.interface-name eth0 ipv4.auto
#- nmcli con add type ethernet connection.id eth0 connection.interface-name eth0 ipv4.method static ipv4.addresses {{ item.value.nic1.ipaddress }}/24 ipv4.gateway {{ item.value.nic1.gateway }} ipv4.dns {{ dnsServer }} ipv4.dns-search {{ ansible_domain }}
#- nmcli con del "System eth0"