Skip to content
This repository has been archived by the owner on May 27, 2024. It is now read-only.

ansible-examples should adhere to the standards of the ansible-linter #323

Open
wants to merge 11 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 23 additions & 23 deletions jboss-standalone/demo-aws-launch.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,32 +2,32 @@
- name: Provision instances
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is also a YAML sequence, for example. And it's not indented like the rest of things, which is inconsistent. Ideally, there shouldn't be unnecessary indentation without a good reason.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I dont get your point here. This yaml file is indented like all other yaml files.

hosts: localhost
connection: local
gather_facts: False
gather_facts: false

# load AWS variables from this group vars file
vars_files:
- group_vars/all
- group_vars/all

tasks:
- name: Launch instances
ec2:
access_key: "{{ ec2_access_key }}"
secret_key: "{{ ec2_secret_key }}"
keypair: "{{ ec2_keypair }}"
group: "{{ ec2_security_group }}"
type: "{{ ec2_instance_type }}"
image: "{{ ec2_image }}"
region: "{{ ec2_region }}"
instance_tags: "{'ansible_group':'jboss', 'type':'{{ ec2_instance_type }}', 'group':'{{ ec2_security_group }}', 'Name':'demo_''{{ tower_user_name }}'}"
count: "{{ ec2_instance_count }}"
wait: true
register: ec2
- name: Launch instances
amazon.aws.ec2_instance:
access_key: "{{ ec2_access_key }}"
secret_key: "{{ ec2_secret_key }}"
keypair: "{{ ec2_keypair }}"
group: "{{ ec2_security_group }}"
type: "{{ ec2_instance_type }}"
image: "{{ ec2_image }}"
region: "{{ ec2_region }}"
instance_tags: "{'ansible_group':'jboss', 'type':'{{ ec2_instance_type }}', 'group':'{{ ec2_security_group }}', 'Name':'demo_''{{ tower_user_name }}'}"
count: "{{ ec2_instance_count }}"
wait: true
register: ec2

- name: Wait for SSH to come up
wait_for:
host: "{{ item.public_dns_name }}"
port: 22
delay: 60
timeout: 320
state: started
with_items: "{{ ec2.instances }}"
- name: Wait for SSH to come up
ansible.builtin.wait_for:
host: "{{ item.public_dns_name }}"
port: 22
delay: 60
timeout: 320
state: started
with_items: "{{ ec2.instances }}"
4 changes: 2 additions & 2 deletions jboss-standalone/deploy-application.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@
- hosts: all

roles:
# Optionally, (re)deploy JBoss here.
# - jboss-standalone
# Optionally, (re)deploy JBoss here.
# - jboss-standalone
- java-app
8 changes: 4 additions & 4 deletions jboss-standalone/roles/java-app/tasks/main.yml
Original file line number Diff line number Diff line change
@@ -1,23 +1,23 @@
---
- name: Copy application WAR file to host
copy:
ansible.builtin.copy:
src: jboss-helloworld.war
dest: /tmp

- name: Deploy HelloWorld to JBoss
jboss:
community.general.web_infrastructure.jboss:
deploy_path: /usr/share/jboss-as/standalone/deployments/
src: /tmp/jboss-helloworld.war
deployment: helloworld.war
state: present

- name: Copy application WAR file to host
copy:
ansible.builtin.copy:
src: ticket-monster.war
dest: /tmp

- name: Deploy Ticket Monster to JBoss
jboss:
community.general.web_infrastructure.jboss:
deploy_path: /usr/share/jboss-as/standalone/deployments/
src: /tmp/ticket-monster.war
deployment: ticket-monster.war
Expand Down
10 changes: 6 additions & 4 deletions jboss-standalone/roles/jboss-standalone/handlers/main.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
---
- name: restart jboss
service:
- name: Restart jboss
ansible.builtin.service:
name: jboss
state: restarted
listen: restart_jboss

- name: restart iptables
service:
- name: Restart iptables
ansible.builtin.service:
name: iptables
state: restarted
listen: restart_iptables
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why use snake_case for handlers? They aren't Python identifiers. It'd be confusing.

72 changes: 37 additions & 35 deletions jboss-standalone/roles/jboss-standalone/tasks/main.yml
Original file line number Diff line number Diff line change
@@ -1,100 +1,102 @@
---
- name: Install Java 1.7 and some basic dependencies
yum:
ansible.builtin.yum:
name: "{{ item }}"
state: present
with_items:
- unzip
- java-1.7.0-openjdk
- libselinux-python
- libsemanage-python
- unzip
- java-1.7.0-openjdk
- libselinux-python
- libsemanage-python

- name: Download JBoss from jboss.org
get_url:
ansible.builtin.get_url:
url: http://download.jboss.org/jbossas/7.1/jboss-as-7.1.1.Final/jboss-as-7.1.1.Final.zip
dest: /opt/jboss-as-7.1.1.Final.zip

- name: Extract archive
unarchive:
ansible.builtin.unarchive:
dest: /usr/share
src: /opt/jboss-as-7.1.1.Final.zip
creates: /usr/share/jboss-as
copy: no
copy: false

# Rename the dir to avoid encoding the version in the init script
# Rename the dir to avoid encoding the version in the init script
- name: Rename install directory
command: /bin/mv jboss-as-7.1.1.Final jboss-as
ansible.builtin.command: /bin/mv jboss-as-7.1.1.Final jboss-as
args:
chdir: /usr/share
chdir: /usr/share
creates: /usr/share/jboss-as

- name: Copying standalone.xml configuration file
template:
ansible.builtin.template:
src: standalone.xml
dest: /usr/share/jboss-as/standalone/configuration/
notify: restart jboss
notify: restart_jboss

- name: Add group "jboss"
group:
ansible.builtin.group:
name: jboss

- name: Add user "jboss"
user:
ansible.builtin.user:
name: jboss
group: jboss
home: /usr/share/jboss-as

- name: Change ownership of JBoss installation
file:
ansible.builtin.file:
path: /usr/share/jboss-as/
owner: jboss
group: jboss
state: directory
recurse: yes
recurse: true

- name: Copy the init script
copy:
ansible.builtin.copy:
src: jboss-as-standalone.sh
dest: /etc/init.d/jboss
mode: 0755
mode: "0755"

- name: Workaround for systemd bug
shell: service jboss start && chkconfig jboss on
ignore_errors: yes
ansible.builtin.shell: service jboss start && chkconfig jboss on
ignore_errors: true

- name: Enable JBoss to be started at boot
service:
ansible.builtin.service:
name: jboss
enabled: yes
enabled: true
state: started

- name: deploy iptables rules
template:
- name: Deploy iptables rules
ansible.builtin.template:
src: iptables-save
dest: /etc/sysconfig/iptables
owner: root
group: root
mode: u=rw,g=,o=
when: ansible_distribution_major_version != "7"
notify: restart iptables
notify: restart_iptables

- name: Ensure that firewalld is installed
yum:
ansible.builtin.yum:
name: firewalld
state: present
when: ansible_distribution_major_version == "7"

- name: Ensure that firewalld is started
service:
ansible.builtin.service:
name: firewalld
state: started
when: ansible_distribution_major_version == "7"

- name: deploy firewalld rules
firewalld:
immediate: yes
- name: Deploy firewalld rules
ansible.posix.firewalld:
immediate: true
port: "{{ item }}"
state: enabled
permanent: yes
permanent: true
when: ansible_distribution_major_version == "7"
with_items:
- "{{ http_port }}/tcp"
- "{{ https_port }}/tcp"

- "{{ http_port }}/tcp"
- "{{ https_port }}/tcp"
124 changes: 62 additions & 62 deletions lamp_haproxy/aws/demo-aws-launch.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,74 +3,74 @@
- name: Provision instances in AWS
hosts: localhost
connection: local
gather_facts: False
gather_facts: false

# load AWS variables from this group vars file
vars_files:
- group_vars/all
- group_vars/all

tasks:
- name: Launch webserver instances
ec2:
access_key: "{{ ec2_access_key }}"
secret_key: "{{ ec2_secret_key }}"
keypair: "{{ ec2_keypair }}"
group: "{{ ec2_security_group }}"
type: "{{ ec2_instance_type }}"
image: "{{ ec2_image }}"
region: "{{ ec2_region }}"
instance_tags: "{'ansible_group':'webservers', 'type':'{{ ec2_instance_type }}', 'group':'{{ ec2_security_group }}', 'Name':'demo_''{{ tower_user_name }}'}"
count: "{{ ec2_instance_count }}"
wait: true
register: ec2
- name: Launch webserver instances
ec2:
access_key: "{{ ec2_access_key }}"
secret_key: "{{ ec2_secret_key }}"
keypair: "{{ ec2_keypair }}"
group: "{{ ec2_security_group }}"
type: "{{ ec2_instance_type }}"
image: "{{ ec2_image }}"
region: "{{ ec2_region }}"
instance_tags: "{'ansible_group':'webservers', 'type':'{{ ec2_instance_type }}', 'group':'{{ ec2_security_group }}', 'Name':'demo_''{{ tower_user_name }}'}"
count: "{{ ec2_instance_count }}"
wait: true
register: ec2

- name: Launch database instance
ec2:
access_key: "{{ ec2_access_key }}"
secret_key: "{{ ec2_secret_key }}"
keypair: "{{ ec2_keypair }}"
group: "{{ ec2_security_group }}"
type: "{{ ec2_instance_type }}"
image: "{{ ec2_image }}"
region: "{{ ec2_region }}"
instance_tags: "{'ansible_group':'dbservers', 'type':'{{ ec2_instance_type }}', 'group':'{{ ec2_security_group }}', 'Name':'demo_''{{ tower_user_name }}'}"
count: "1"
wait: true
register: ec2
- name: Launch database instance
ec2:
access_key: "{{ ec2_access_key }}"
secret_key: "{{ ec2_secret_key }}"
keypair: "{{ ec2_keypair }}"
group: "{{ ec2_security_group }}"
type: "{{ ec2_instance_type }}"
image: "{{ ec2_image }}"
region: "{{ ec2_region }}"
instance_tags: "{'ansible_group':'dbservers', 'type':'{{ ec2_instance_type }}', 'group':'{{ ec2_security_group }}', 'Name':'demo_''{{ tower_user_name }}'}"
count: "1"
wait: true
register: ec2

- name: Launch load balancing instance
ec2:
access_key: "{{ ec2_access_key }}"
secret_key: "{{ ec2_secret_key }}"
keypair: "{{ ec2_keypair }}"
group: "{{ ec2_security_group }}"
type: "{{ ec2_instance_type }}"
image: "{{ ec2_image }}"
region: "{{ ec2_region }}"
instance_tags: "{'ansible_group':'lbservers', 'type':'{{ ec2_instance_type }}', 'group':'{{ ec2_security_group }}', 'Name':'demo_''{{ tower_user_name }}'}"
count: "1"
wait: true
register: ec2
- name: Launch load balancing instance
ec2:
access_key: "{{ ec2_access_key }}"
secret_key: "{{ ec2_secret_key }}"
keypair: "{{ ec2_keypair }}"
group: "{{ ec2_security_group }}"
type: "{{ ec2_instance_type }}"
image: "{{ ec2_image }}"
region: "{{ ec2_region }}"
instance_tags: "{'ansible_group':'lbservers', 'type':'{{ ec2_instance_type }}', 'group':'{{ ec2_security_group }}', 'Name':'demo_''{{ tower_user_name }}'}"
count: "1"
wait: true
register: ec2

- name: Launch monitoring instance
ec2:
access_key: "{{ ec2_access_key }}"
secret_key: "{{ ec2_secret_key }}"
keypair: "{{ ec2_keypair }}"
group: "{{ ec2_security_group }}"
type: "{{ ec2_instance_type }}"
image: "{{ ec2_image }}"
region: "{{ ec2_region }}"
instance_tags: "{'ansible_group':'monitoring', 'type':'{{ ec2_instance_type }}', 'group':'{{ ec2_security_group }}', 'Name':'demo_''{{ tower_user_name }}'}"
count: "1"
wait: true
register: ec2
- name: Launch monitoring instance
ec2:
access_key: "{{ ec2_access_key }}"
secret_key: "{{ ec2_secret_key }}"
keypair: "{{ ec2_keypair }}"
group: "{{ ec2_security_group }}"
type: "{{ ec2_instance_type }}"
image: "{{ ec2_image }}"
region: "{{ ec2_region }}"
instance_tags: "{'ansible_group':'monitoring', 'type':'{{ ec2_instance_type }}', 'group':'{{ ec2_security_group }}', 'Name':'demo_''{{ tower_user_name }}'}"
count: "1"
wait: true
register: ec2

- name: Wait for SSH to come up
wait_for:
host: "{{ item.public_dns_name }}"
port: 22
delay: 60
timeout: 320
state: started
with_items: "{{ ec2.instances }}"
- name: Wait for SSH to come up
wait_for:
host: "{{ item.public_dns_name }}"
port: 22
delay: 60
timeout: 320
state: started
with_items: "{{ ec2.instances }}"
Loading