diff --git a/test/integration/cleanup_ec2.py b/test/integration/cleanup_ec2.py index cb724a7777..9d6357883f 100644 --- a/test/integration/cleanup_ec2.py +++ b/test/integration/cleanup_ec2.py @@ -11,6 +11,7 @@ import boto import optparse import yaml import os.path +import boto.ec2.elb def delete_aws_resources(get_func, attr, opts): for item in get_func(): @@ -113,6 +114,9 @@ if __name__ == '__main__': aws = boto.connect_ec2(aws_access_key_id=opts.ec2_access_key, aws_secret_access_key=opts.ec2_secret_key) + elb = boto.connect_elb(aws_access_key_id=opts.ec2_access_key, + aws_secret_access_key=opts.ec2_secret_key) + try: # Delete matching keys delete_aws_resources(aws.get_all_key_pairs, 'name', opts) @@ -120,6 +124,9 @@ if __name__ == '__main__': # Delete matching groups delete_aws_resources(aws.get_all_security_groups, 'name', opts) + # Delete ELBs + delete_aws_resources(elb.get_all_load_balancers, 'name', opts) + # Delete recorded EIPs delete_aws_eips(aws.get_all_addresses, 'public_ip', opts) diff --git a/test/integration/roles/test_ec2_elb_lb/defaults/main.yml b/test/integration/roles/test_ec2_elb_lb/defaults/main.yml index 234720c640..76164523d4 100644 --- a/test/integration/roles/test_ec2_elb_lb/defaults/main.yml +++ b/test/integration/roles/test_ec2_elb_lb/defaults/main.yml @@ -1,2 +1,3 @@ --- -# defaults file for test_ec2_elb_lb +# defaults file for test_ec2_eip +tag_prefix: '{{resource_prefix}}' diff --git a/test/integration/roles/test_ec2_elb_lb/tasks/main.yml b/test/integration/roles/test_ec2_elb_lb/tasks/main.yml index 684d8f35ec..efc2affb26 100644 --- a/test/integration/roles/test_ec2_elb_lb/tasks/main.yml +++ b/test/integration/roles/test_ec2_elb_lb/tasks/main.yml @@ -1,2 +1,421 @@ --- -# tasks file for test_ec2_elb_lb +# __Test Info__ +# Create a self signed cert and upload it to AWS +# http://www.akadia.com/services/ssh_test_certificate.html +# http://docs.aws.amazon.com/ElasticLoadBalancing/latest/DeveloperGuide/ssl-server-cert.html + +# __Test Outline__ +# +# __ec2_elb_lb__ +# create test elb with listeners and certificat +# change AZ's +# change listeners +# remove listeners +# remove elb + +# __ec2-common__ +# test environment variable EC2_REGION +# test with no parameters +# test with only instance_id +# test invalid region parameter +# test valid region parameter +# test invalid ec2_url parameter +# test valid ec2_url parameter +# test credentials from environment +# test credential parameters + +# ============================================================ +# create test elb with listeners, certificate, and health check + +- name: Create ELB + ec2_elb_lb: + name: "{{ tag_prefix }}" + region: "{{ ec2_region }}" + ec2_access_key: "{{ ec2_access_key }}" + ec2_secret_key: "{{ ec2_secret_key }}" + state: present + zones: + - us-east-1c + - us-east-1d + listeners: + - protocol: http + load_balancer_port: 80 + instance_port: 80 + - protocol: https + load_balancer_port: 443 + instance_protocol: http + instance_port: 80 + ssl_certificate_id: "{{ elb_lb_cert }}" + health_check: + ping_protocol: http + ping_port: 80 + ping_path: "/index.html" + response_timeout: 5 + interval: 30 + unhealthy_threshold: 2 + healthy_threshold: 10 + register: info + +- assert: + that: + - 'info.changed' + - '"failed" not in info' + - 'info.elb.status == "created"' + - 'info.elb.zones[0] == "us-east-1c"' + - 'info.elb.zones[1] == "us-east-1d"' + - 'info.elb.health_check.healthy_threshold == 10' + - 'info.elb.health_check.interval == 30' + - 'info.elb.health_check.target == "HTTP:80/index.html"' + - 'info.elb.health_check.timeout == 5' + - 'info.elb.health_check.unhealthy_threshold == 2' + - 'info.elb.listeners[0][0] == 80' + - 'info.elb.listeners[0][1] == 80' + - 'info.elb.listeners[0][2] == "HTTP"' + - 'info.elb.listeners[0][3] == "HTTP"' + - 'info.elb.listeners[1][0] == 443' + - 'info.elb.listeners[1][1] == 80' + - 'info.elb.listeners[1][2] == "HTTPS"' + - 'info.elb.listeners[1][3] == "HTTP"' + - 'info.elb.listeners[1][4] == "{{ elb_lb_cert }}"' +# ============================================================ + +# check ports, would be cool, but we are at the mercy of AWS +# to start things in a timely manner + +#- name: check to make sure 80 is listening +# wait_for: host={{ info.elb.dns_name }} port=80 timeout=600 +# register: result + +#- name: assert can connect to port# +# assert: 'result.state == "started"' + +#- name: check to make sure 443 is listening +# wait_for: host={{ info.elb.dns_name }} port=443 timeout=600 +# register: result + +#- name: assert can connect to port# +# assert: 'result.state == "started"' + +# ============================================================ + +# Change AZ's + +- name: Change AZ's + ec2_elb_lb: + name: "{{ tag_prefix }}" + region: "{{ ec2_region }}" + ec2_access_key: "{{ ec2_access_key }}" + ec2_secret_key: "{{ ec2_secret_key }}" + state: present + zones: + - us-east-1b + listeners: + - protocol: http + load_balancer_port: 80 + instance_port: 80 + - protocol: https + load_balancer_port: 443 + instance_protocol: http + instance_port: 80 + ssl_certificate_id: "{{ elb_lb_cert }}" + purge_zones: yes + health_check: + ping_protocol: http + ping_port: 80 + ping_path: "/index.html" + response_timeout: 5 + interval: 30 + unhealthy_threshold: 2 + healthy_threshold: 10 + register: info + + + +- assert: + that: + - '"failed" not in info' + - 'info.elb.status == "ok"' + - 'info.changed' + - 'info.elb.zones[0] == "us-east-1b"' + +# ============================================================ + +# Update AZ's + +- name: Update AZ's + ec2_elb_lb: + name: "{{ tag_prefix }}" + region: "{{ ec2_region }}" + ec2_access_key: "{{ ec2_access_key }}" + ec2_secret_key: "{{ ec2_secret_key }}" + state: present + zones: + - us-east-1b + - us-east-1c + - us-east-1d + listeners: + - protocol: http + load_balancer_port: 80 + instance_port: 80 + - protocol: https + load_balancer_port: 443 + instance_protocol: http + instance_port: 80 + ssl_certificate_id: "{{ elb_lb_cert }}" + purge_zones: yes + register: info + +- assert: + that: + - '"failed" not in info' + - 'info.changed' + - 'info.elb.status == "ok"' + - 'info.elb.zones[0] == "us-east-1b"' + - 'info.elb.zones[1] == "us-east-1c"' + - 'info.elb.zones[2] == "us-east-1d"' + + +# ============================================================ + +# Purge Listeners + +- name: Purge Listeners + ec2_elb_lb: + name: "{{ tag_prefix }}" + region: "{{ ec2_region }}" + ec2_access_key: "{{ ec2_access_key }}" + ec2_secret_key: "{{ ec2_secret_key }}" + state: present + zones: + - us-east-1b + - us-east-1c + - us-east-1d + listeners: + - protocol: http + load_balancer_port: 80 + instance_port: 81 + purge_listeners: yes + register: info + +- assert: + that: + - '"failed" not in info' + - 'info.elb.status == "ok"' + - 'info.changed' + - 'info.elb.listeners[0][0] == 80' + - 'info.elb.listeners[0][1] == 81' + - 'len(info.elb.listeners) == 1' + + + +# ============================================================ + +# add Listeners + +- name: Add Listeners + ec2_elb_lb: + name: "{{ tag_prefix }}" + region: "{{ ec2_region }}" + ec2_access_key: "{{ ec2_access_key }}" + ec2_secret_key: "{{ ec2_secret_key }}" + state: present + zones: + - us-east-1b + - us-east-1c + - us-east-1d + listeners: + - protocol: http + load_balancer_port: 80 + instance_port: 81 + - protocol: https + load_balancer_port: 443 + instance_protocol: http + instance_port: 80 + ssl_certificate_id: "{{ elb_lb_cert }}" + purge_listeners: yes + register: info + +- assert: + that: + - '"failed" not in info' + - 'info.elb.status == "ok"' + - 'info.changed' + - 'info.elb.listeners[0][0] == 80' + - 'info.elb.listeners[0][1] == 80' + - 'info.elb.listeners[0][2] == "HTTP"' + - 'info.elb.listeners[0][3] == "HTTP"' + - 'info.elb.listeners[1][0] == 443' + - 'info.elb.listeners[1][1] == 80' + - 'info.elb.listeners[1][2] == "HTTPS"' + - 'info.elb.listeners[1][3] == "HTTP"' + - 'info.elb.listeners[1][4] == "{{ elb_lb_cert }}"' + - 'len(info.elb.listeners) == 2' + + +# ============================================================ + +- name: test with no parameters + ec2_elb_lb: + register: result + ignore_errors: true + +- name: assert failure when called with no parameters + assert: + that: + - 'result.failed' + - 'result.msg == "missing required arguments: name,state"' + + + +# ============================================================ +- name: test with only name + ec2_elb_lb: + name="{{ tag_prefix }}" + register: result + ignore_errors: true + +- name: assert failure when called with only name + assert: + that: + - 'result.failed' + - 'result.msg == "missing required arguments: state"' + + +# ============================================================ +- name: test invalid region parameter + ec2_elb_lb: + name="{{ tag_prefix }}" + region='asdf querty 1234' + state=present + register: result + ignore_errors: true + +- name: assert invalid region parameter + assert: + that: + - 'result.failed' + - 'result.msg.startswith("value of region must be one of:")' + + +# ============================================================ +- name: test valid region parameter + ec2_elb_lb: + name: "{{ tag_prefix }}" + region: "{{ ec2_region }}" + state: present + zones: + - us-east-1a + - us-east-1d + listeners: + - protocol: http + load_balancer_port: 80 + instance_port: 80 + + register: result + ignore_errors: true + +- name: assert valid region parameter + assert: + that: + - 'result.failed' + - 'result.msg.startswith("No handler was ready to authenticate.")' + + +# ============================================================ + +- name: test invalid ec2_url parameter + ec2_elb_lb: + name: "{{ tag_prefix }}" + region: "{{ ec2_region }}" + state: present + zones: + - us-east-1a + - us-east-1d + listeners: + - protocol: http + load_balancer_port: 80 + instance_port: 80 + environment: + EC2_URL: bogus.example.com + register: result + ignore_errors: true + +- name: assert invalid ec2_url parameter + assert: + that: + - 'result.failed' + - 'result.msg.startswith("No handler was ready to authenticate.")' + + +# ============================================================ +- name: test valid ec2_url parameter + ec2_elb_lb: + name: "{{ tag_prefix }}" + region: "{{ ec2_region }}" + state: present + zones: + - us-east-1a + - us-east-1d + listeners: + - protocol: http + load_balancer_port: 80 + instance_port: 80 + environment: + EC2_URL: '{{ec2_url}}' + register: result + ignore_errors: true + +- name: assert valid ec2_url parameter + assert: + that: + - 'result.failed' + - 'result.msg.startswith("No handler was ready to authenticate.")' + + +# ============================================================ +- name: test credentials from environment + ec2_elb_lb: + name: "{{ tag_prefix }}" + region: "{{ ec2_region }}" + state: present + zones: + - us-east-1a + - us-east-1d + listeners: + - protocol: http + load_balancer_port: 80 + instance_port: 80 + environment: + EC2_ACCESS_KEY: bogus_access_key + EC2_SECRET_KEY: bogus_secret_key + register: result + ignore_errors: true + +- name: assert credentials from environment + assert: + that: + - 'result.failed' + - '"InvalidClientTokenId" in result.msg' + + +# ============================================================ +- name: test credential parameters + ec2_elb_lb: + name: "{{ tag_prefix }}" + region: "{{ ec2_region }}" + state: present + zones: + - us-east-1a + - us-east-1d + listeners: + - protocol: http + load_balancer_port: 80 + instance_port: 80 + register: result + ignore_errors: true + +- name: assert credential parameters + assert: + that: + - 'result.failed' + - '"No handler was ready to authenticate. 1 handlers were checked." in result.msg'