1

This playbook appears to be SSHing onto my local machine rather than the remote one. This condition is guessed based on the output I've included at the bottom.

I've adapted the example from here: http://docs.ansible.com/ansible/guide_aws.html#provisioning

The playbook is split into two plays:

  1. creation of the EC2 instance and
  2. configuration of the EC2 instance

Note: To run this you'll need to create a key-pair with the same name as the project (you can get more information here: https://us-west-2.console.aws.amazon.com/ec2/v2/home?region=us-west-2#KeyPairs:sort=keyName)

The playbook is listed below:

# Create instance
- hosts: 127.0.0.1
  connection: local
  gather_facts: false
  vars:
    project_name: my-test
  tasks:
    - name: Get the current username
      local_action: command whoami
      register: username_on_the_host

    - name: Capture current instances
      ec2_remote_facts:
        region: "us-west-2"
      register: ec2_instances

    - name: Create instance
      ec2:
        region: "us-west-2"
        zone: "us-west-2c"
        keypair: "{{ project_name }}"
        group:
          - "SSH only"
        instance_type: "t2.nano"
        image: "ami-59799439"  # debian:jessie amd64 hvm on us-west 2
        count_tag: "{{ project_name }}-{{ username_on_the_host.stdout }}-test"
        exact_count: 1
        wait: yes
        instance_tags:
          Name: "{{ project_name }}-{{ username_on_the_host.stdout }}-test"
          "{{ project_name }}-{{ username_on_the_host.stdout }}-test": simple_ec2
          Creator: "{{ username_on_the_host.stdout }}"
      register: ec2_info

    - name: Wait for instances to listen on port 22
      wait_for:
        state: started
        host: "{{ item.public_dns_name }}"
        port: 22
      with_items: "{{ ec2_info.instances }}"
      when: ec2_info|changed

    - name: Add new instance to launched group
      add_host:
        hostname: "{{ item.public_dns_name }}"
        groupname: launched
      with_items: "{{ ec2_info.instances }}"
      when: ec2_info|changed

    - name: Get ec2_info information
      debug:
        msg: "{{ ec2_info }}"

# Configure and install all we need
- hosts: launched
  remote_user: admin
  gather_facts: true
  tasks:
    - name: Display all variables/facts known for a host
      debug:
        var: hostvars[inventory_hostname]

    - name: List hosts
      debug: msg="groups={{groups}}"

    - name: Get current user
      command: whoami

    - name: Prepare system
      become: yes
      become_method: sudo
      apt: "name={{item}} state=latest"
      with_items:
        - software-properties-common
        - python-software-properties
        - devscripts
        - build-essential
        - libffi-dev
        - libssl-dev
        - vim

The output I have is:

TASK [Get current user] ********************************************************
changed: [ec2-35-167-142-43.us-west-2.compute.amazonaws.com] => {"changed": true, "cmd": ["whoami"], "delta": "0:00:00.006532", "end": "2017-01-09 14:53:55.806000", "rc": 0, "start": "2017-01-09 14:53:55.799468", "stderr": "", "stdout": "brianbruggeman", "stdout_lines": ["brianbruggeman"], "warnings": []}

TASK [Prepare system] **********************************************************
failed: [ec2-35-167-142-43.us-west-2.compute.amazonaws.com] (item=['software-properties-common', 'python-software-properties', 'devscripts', 'build-essential', 'libffi-dev', 'libssl-dev', 'vim']) => {"failed": true, "item": ["software-properties-common", "python-software-properties", "devscripts", "build-essential", "libffi-dev", "libssl-dev", "vim"], "module_stderr": "sudo: a password is required\n", "module_stdout": "", "msg": "MODULE FAILURE"}
Brian Bruggeman
  • 5,008
  • 2
  • 36
  • 55

1 Answers1

1

This should work.

- name: Create Ec2 Instances
  hosts: localhost
  connection: local
  gather_facts: False

  vars:
    project_name: device-graph
    ami_id: ami-59799439  # debian jessie 64-bit hvm
    region: us-west-2
    zone: "us-west-2c"
    instance_size: "t2.nano"

  tasks:
    - name: Provision a set of instances
      ec2:
        key_name: my_key
        group: ["SSH only"]
        instance_type: "{{ instance_size }}"
        image: "{{ ami_id }}"
        wait: true
        exact_count: 1
        count_tag:
          Name: "{{ project_name }}-{{ username.stdout }}-test"
          Creator: "{{ username.stdout }}"
          Project: "{{ project_name }}"
        instance_tags:
          Name: "{{ project_name }}-{{ username.stdout }}-test"
          Creator: "{{ username.stdout }}"
          Project: "{{ project_name }}"
      register: ec2

    - name: Add all instance public IPs to host group
      add_host:
        hostname: "{{ item.public_ip }}"
        groups: launched_ec2_hosts
      with_items: "{{ ec2.tagged_instances }}"


- name: configuration play
  hosts: launched_ec2_hosts
  user: admin
  gather_facts: true

  vars:
    ansible_ssh_private_key_file: "~/.ssh/project-name.pem"

  tasks:
    - name: get the username running the deploy
      shell: whoami
      register: username
Brian Bruggeman
  • 5,008
  • 2
  • 36
  • 55